What's the equivalent in asyncio of gevent.sleep(0) - python

If I want some code to run later I can call gevent.sleep(0) to yield from current greenlet, how to handle it in asyncio?
e.g.
def spawn(f):
def wrapper(*args, **kwargs):
return gevent.spawn(f, *args, **kwargs)
return wrapper
#spawn
def f(a, b):
gevent.sleep(0) # gevent.idle()
print(a + b)
#spawn
def g():
print("hello")
f(1, 3)
f(4, 5)
g()
gevent.sleep(3)
"""
Expected:
hello
4
9
"""
of this case g will run ahead of f. In asyncio is there something similar?

The equivalent of gevent.sleep(time) would be await asyncio.sleep(time) in asyncio. If you call await asyncio.sleep(time) would sleep/block the caller task and if there are other tasks available they will run. After the declared time passes, caller task will be available for execution.
Example:
import asyncio
async def f():
await asyncio.sleep(2)
print('This is function f!')
async def g():
print("This is function g!")
async def main():
loop = asyncio.get_event_loop()
loop.create_task(f())
loop.create_task(g())
await asyncio.sleep(10)
asyncio.run(main())

Related

Call an async function periodically?

I have the following function to call s(c) every 24 hours.
def schedule_next_sync():
t = datetime.datetime.now()
t = t.replace(hour=0) + datetime.timedelta(hours=24)
def wrapper():
s(c)
schedule_next_sync()
tornado.ioloop.IOLoop.current().add_timeout(datetime.datetime.timestamp(t), wrapper)
However, s() will be changed to an async function.
async def s(c):
How to update schedule_next_sync for async function? Should run s() synchronously? Or change schedule_next_sync() to an async function?
Once s is async, you could use asyncio.sleep() instead of the lower-level add_timeout():
async def schedule_next_sync():
async def call_forever():
while True:
await asyncio.sleep(1)
await s(c)
tornado.ioloop.IOLoop.current().create_task(call_forever())
If you really want to do it with timeouts, something like this should work:
def schedule_next_sync():
t = datetime.datetime.now() + datetime.timedelta(seconds=1)
def wrapper():
loop = asyncio.get_running_loop()
task = loop.create_task(s(c))
task.add_done_callback(lambda _: schedule_next_sync())
loop = tornado.ioloop.IOLoop.current()
loop.add_timeout(datetime.datetime.timestamp(t), wrapper)

How to correctly lock async generators?

I am trying to use an async generator as a wrapper for a shared connection
async def mygen():
await init()
connection = await open_connection()
while True:
data = yield
await connection.send(data)
shared_gen = None
async def send_data(data):
global shared_gen
if not shared_gen:
shared_gen = mygen()
await shared_gen.asend(None)
await shared_gen.asend(data)
Is the above code safe from race conditions? Is it possible for two asends to execute concurrently or the second one will block implicitly until the generator is ready in the yield step? Assume connection.send is not concurrency safe.
Update:
Wrote a wrapper to help use safely.
class Locked:
def __init__(self, resource):
self._resource = resource
self._lock = asyncio.Lock()
#contextlib.asynccontextmanager
async def lock(self):
async with self._lock:
yield self._resource
async def send_data(locked_gen, data):
async with locked_gen.lock() as gen:
await gen.asend(data)
async def main():
gen = mygen()
await gen.asend(None)
locked_gen = Locked(gen)
...
Is it possible for two asends to execute concurrently or the second one will block implicitly until the generator is ready in the yield step?
It is not possible for asend to be called concurrently, but trying to do so doesn't result in blocking. Instead, the second one will raise a RuntimeError, as demonstrated by the following example:
import asyncio
async def gen():
while True:
yield
await asyncio.sleep(1)
async def main():
ait = gen()
await ait.asend(None) # start the generator
async def send():
print('sending')
await ait.asend(42)
await asyncio.gather(send(), send())
asyncio.run(main())
To make the send block until the previous one finishes, you need an explicit lock around the await of asend:
async def main():
ait = gen()
await ait.asend(None)
lock = asyncio.Lock()
async def send():
async with lock:
print('sending')
await ait.asend(42)
await asyncio.gather(send(), send())

how to understand await in coroutines?

Following example shows we can run phase1 then run phase2. But what we wanted with coroutine is to do two things concurrently instead of one after another. I know if I use asyncio.get_event_loop.create_task can achieve what I want, but why use await? I think there is no difference between using await and just using the plain function.
import asyncio
async def outer():
print('in outer')
print('waiting for result1')
result1 = await phase1()
print('waiting for result2')
result2 = await phase2(result1)
return (result1, result2)
async def phase1():
print('in phase1')
return 'result1'
async def phase2(arg):
print('in phase2')
return 'result2 derived from {}'.format(arg)
event_loop = asyncio.get_event_loop()
try:
return_value = event_loop.run_until_complete(outer())
print('return value: {!r}'.format(return_value))
finally:
event_loop.close()

python asyncio add_done_callback with async def

I have 2 functions: The first one, def_a, is an asynchronous function and the second one is def_b which is a regular function and called with the result of def_a as a callback with the add_done_callback function.
My code looks like this:
import asyncio
def def_b(result):
next_number = result.result()
# some work on the next_number
print(next_number + 1)
async def def_a(number):
await some_async_work(number)
return number + 1
loop = asyncio.get_event_loop()
task = asyncio.ensure_future(def_a(1))
task.add_done_callback(def_b)
response = loop.run_until_complete(task)
loop.close()
And it's work perfectly.
The problem began when also the second function, def_b, became asynchronous. Now it looks like this:
async def def_b(result):
next_number = result.result()
# some asynchronous work on the next_number
print(next_number + 1)
But now I can not provide it to the add_done_callback function, because it's not a regular function.
My question is- Is it possible and how can I provide def_b to the add_done_callback function if def_b is asynchronous?
add_done_callback is considered a "low level" interface. When working with coroutines, you can chain them in many ways, for example:
import asyncio
async def my_callback(result):
print("my_callback got:", result)
return "My return value is ignored"
async def coro(number):
await asyncio.sleep(number)
return number + 1
async def add_success_callback(fut, callback):
result = await fut
await callback(result)
return result
loop = asyncio.get_event_loop()
task = asyncio.ensure_future(coro(1))
task = add_success_callback(task, my_callback)
response = loop.run_until_complete(task)
print("response:", response)
loop.close()
Keep in mind add_done_callback will still call the callback if your future raises an exception (but calling result.result() will raise it).
This only works for one future job, if you have multiple async jobs, they will blocks each other, a better way is using asyncio.as_completed() to iterate future list:
import asyncio
async def __after_done_callback(future_result):
# await for something...
pass
async def __future_job(number):
await some_async_work(number)
return number + 1
loop = asyncio.get_event_loop()
tasks = [asyncio.ensure_future(__future_job(x)) for x in range(100)] # create 100 future jobs
for f in asyncio.as_completed(tasks):
result = await f
await __after_done_callback(result)
loop.close()
You can try the aiodag library. It's a very lightweight wrapper around asyncio that abstracts away some of the async plumbing that you usually have to think about. From this example you won't be able to tell that things are running asynchronously since it's just 1 task that depends on another, but it is all running async.
import asyncio
from aiodag import task
#task
async def def_b(result):
# some asynchronous work on the next_number
print(result + 1)
#task
async def def_a(number):
await asyncio.sleep(number)
return number + 1
async def main():
a = def_a(1)
b = def_b(a) # this makes task b depend on task a
return await b
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop)
response = loop.run_until_complete(main())

Lazy iterators (generators) with asyncio

I have a blocking, non-async code like this:
def f():
def inner():
while True:
yield read()
return inner()
With this code the caller can choose when to stop the function to generate data. How to change this to async? This solution doesn't work:
async def f():
async def inner():
while True:
yield await coroutine_read()
return inner()
... because yield can't be used in async def functions. If i remove the async from the inner() signature, I can't use await anymore.
Upd:
Starting with Python 3.6 we have asynchronous generators and able to use yield directly inside coroutines.
As noted above, you can't use yield inside async funcs. If you want to create coroutine-generator you have to do it manually, using __aiter__ and __anext__ magic methods:
import asyncio
# `coroutine_read()` generates some data:
i = 0
async def coroutine_read():
global i
i += 1
await asyncio.sleep(i)
return i
# `f()` is asynchronous iterator.
# Since we don't raise `StopAsyncIteration`
# it works "like" `while True`, until we manually break.
class f:
async def __aiter__(self):
return self
async def __anext__(self):
return await coroutine_read()
# Use f() as asynchronous iterator with `async for`:
async def main():
async for i in f():
print(i)
if i >= 3:
break
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Output:
1
2
3
[Finished in 6.2s]
You may also like to see other post, where StopAsyncIteration uses.

Categories

Resources