yield object inside asyncio function - python

I'm trying to work with asyncio I tried this code where I process a list of elements ad I print the state of It (element) (working)
problem: how can I yield the element(object) when I do this I have this error object async_generator can't be used in 'await' expression
import asyncio, random
async def process_element(element):
print('starting', element)
await asyncio.sleep(random.random()) # simulate IO-bound processing
print('done', element)
async def do_stuff(q):
while not q.empty():
value = await q.get()
await process_element(element=value)
q.task_done()
async def main():
jobs = asyncio.Queue()
for i in range(20):
await jobs.put(i)
for i in range(5):
asyncio.create_task(do_stuff(jobs))
await jobs.join()
asyncio.run(main())

It is just a matter of receiving whatever you want to yield using an async for instead of a plain await:
import asyncio, random
async def process_element(element):
print('starting', element)
await asyncio.sleep(random.random()) # simulate IO-bound processing
yield element
print('done', element)
async def do_stuff(q):
while not q.empty():
value = await q.get()
async for response in process_element(value):
print(f"process yield element: {response}")
q.task_done()
async def main():
jobs = asyncio.Queue()
for i in range(20):
await jobs.put(i)
for i in range(5):
asyncio.create_task(do_stuff(jobs))
await jobs.join()
asyncio.run(main())
If for some reason you don't want to use "async for", you can call
the methods __anext__ and asend on the async generator object
(which is what is returned by calling process_element after it contains an yield keyword).
Both .__anext__ and .asend have to be awaited, and will throw StopAsyncIteration when the generator is exhausted (in contrast with StopIteration for non-async generators).

Related

Pause all asyncio tasks in Python

I have some asyncio tasks and I need to pause all of them.
This is my part of code:
import asyncio
import random
async def workers1():
while True:
k = random.randint(100, 200)
await asyncio.sleep(k)
await my_print(k)
async def workers2():
while True:
k = random.randint(100, 200)
await asyncio.sleep(k)
await my_print(k)
async def my_print(k):
print(k)
if k == 122:
>>>>>>> suspend all of workers
while k != 155:
k = await repair()
await asyncio.sleep(1)
r>>>>>> resume all of workers
async def main():
tasks = [asyncio.create_task(workers1()),
asyncio.create_task(workers2())
]
[await x for x in tasks]
if __name__ == '__main__':
asyncio.run(main())
How can I suspend all of workers in my code when trouble happens in a function my_print and after repair in my_print resume all of tasks?
I will be glad if you give an example.
I have been seen this link. But that's not what I need.
Simply replace your call to await asyncio.sleep(1) with time.sleep(1). If your code doesn't have an await expression in it, all the other tasks are effectively blocked.
import asyncio
import random
import time
async def workers1():
while True:
k = random.randint(100, 200)
await asyncio.sleep(k)
await my_print(k)
async def workers2():
while True:
k = random.randint(100, 200)
await asyncio.sleep(k)
await my_print(k)
async def my_print(k):
print(k)
if k == 122:
>>>>>>> suspend all of workers
while k != 155:
k = random.randint(100, 200)
time.sleep(1.0) # CHANGE HERE
r>>>>>> resume all of workers
async def main():
tasks = [asyncio.create_task(workers1()),
asyncio.create_task(workers2())
]
[await x for x in tasks]
if __name__ == '__main__':
asyncio.run(main())
So, first, note that the time.sleep trick can be replaced with any non-asynchronous code. So you can do anything that runs synchronously instead of time.sleep.
Including set up a second asyncio loop in a different thread and run tasks in that loop.
The following code uses ThreadPoolExecutor from concurrent.futures to set up a new event loop. In particular:
future = executor.submit(asyncio.run, task_3())
Will set up a new thread and run task_3 in that new loop.
The next line future.result() blocks the entire first loop (task_1 and task_2) until task_3 exits.
In task_3 you can do any asyncio operations you like, and until that exits all of the existing tasks will be suspended.
import asyncio, concurrent.futures
async def task_1():
while True:
print('task 1 runs')
await asyncio.sleep(1)
async def task_2():
print('task 2 starts')
await asyncio.sleep(5)
print('first set of tasks suspends')
future = executor.submit(asyncio.run, task_3())
print('suspending existing tasks')
future.result()
print('resuming tasks')
async def task_3():
print('task 3 runs')
await asyncio.sleep(4)
print('task 3 finishes')
async def main():
asyncio.ensure_future(task_1())
asyncio.ensure_future(task_2())
await asyncio.sleep(15)
executor = concurrent.futures.ThreadPoolExecutor()
asyncio.run(main())

Python asyncio double await

For some reason I need to write a double await, but I don't exactly know why. Can someone explain this to me?
I've created a small example of the issue I ran into.
import asyncio
from random import randint
async def work():
return randint(1, 100)
async def page():
return asyncio.gather(*[
work()
for _ in range(10)
])
async def run():
results = await (await page())
return max(list(results))
result = asyncio.run(run())
It is the line results = await (await page()).
To actually execute awaitable objects you need to await on them.
Your page here is coroutine function, when called, it returns a coroutine which is an awaitable object!
When you say await page(), you're running the body of it. after execution it gives you(return) another awaitable object which is the result of calling asyncio.gather(). So you need to await on that too. That's why you need two await.
If you don't you'd see:
RuntimeError: await wasn't used with future
You could do this nested await expression inside the calling coroutine:
import asyncio
from random import randint
async def work():
return randint(1, 100)
async def page():
return await asyncio.gather(*[work() for _ in range(10)])
async def run():
results = await page()
return max(list(results))
result = asyncio.run(run())
print(result)

Asynchronous, Multiple HTTP requests in a While Loop

The code below is intended to send multiple HTTP requests asynchronously in a while loop, and depending on the response from each request(request "X" always returns "XXX", "Y" always returns "YYY" and so on), do something and sleep for interval seconds specified for each request.
However, it throws an error...
RuntimeError: cannot reuse already awaited coroutine
Could anyone help me how I could fix the code to realise the intended behaviour?
class Client:
def __init__(self):
pass
async def run_forever(self, coro, interval):
while True:
res = await coro
await self._onresponse(res, interval)
async def _onresponse(self, res, interval):
if res == "XXX":
# ... do something with the resonse ...
await asyncio.sleep(interval)
if res == "YYY":
# ... do something with the resonse ...
await asyncio.sleep(interval)
if res == "ZZZ":
# ... do something with the resonse ...
await asyncio.sleep(interval)
async def request(something):
# ... HTTP request using aiohttp library ...
return response
async def main():
c = Client()
await c.run_forever(request("X"), interval=1)
await c.run_forever(request("Y"), interval=2)
await c.run_forever(request("Z"), interval=3)
# ... and more
As the error says, you can't await a coroutine more than once. Instead of passing a coroutine into run_forever and then awaiting it in a loop, passing the coroutine's argument(s) instead and await a new coroutine each iteration of the loop.
class Client:
async def run_forever(self, value, interval):
while True:
res = await rqequest(value)
await self._response(response, interval)
You also need to change how you await run_forever. await is blocking, so when you await something with an infinite loop, you'll never reach the next line. Instead, you want to gather multiple coroutines as once.
async def main():
c = Client()
await asyncio.gather(
c.run_forever("X", interval=1),
c.run_forever("Y", interval=2),
c.run_forever("Z", interval=3),
)

How to correctly lock async generators?

I am trying to use an async generator as a wrapper for a shared connection
async def mygen():
await init()
connection = await open_connection()
while True:
data = yield
await connection.send(data)
shared_gen = None
async def send_data(data):
global shared_gen
if not shared_gen:
shared_gen = mygen()
await shared_gen.asend(None)
await shared_gen.asend(data)
Is the above code safe from race conditions? Is it possible for two asends to execute concurrently or the second one will block implicitly until the generator is ready in the yield step? Assume connection.send is not concurrency safe.
Update:
Wrote a wrapper to help use safely.
class Locked:
def __init__(self, resource):
self._resource = resource
self._lock = asyncio.Lock()
#contextlib.asynccontextmanager
async def lock(self):
async with self._lock:
yield self._resource
async def send_data(locked_gen, data):
async with locked_gen.lock() as gen:
await gen.asend(data)
async def main():
gen = mygen()
await gen.asend(None)
locked_gen = Locked(gen)
...
Is it possible for two asends to execute concurrently or the second one will block implicitly until the generator is ready in the yield step?
It is not possible for asend to be called concurrently, but trying to do so doesn't result in blocking. Instead, the second one will raise a RuntimeError, as demonstrated by the following example:
import asyncio
async def gen():
while True:
yield
await asyncio.sleep(1)
async def main():
ait = gen()
await ait.asend(None) # start the generator
async def send():
print('sending')
await ait.asend(42)
await asyncio.gather(send(), send())
asyncio.run(main())
To make the send block until the previous one finishes, you need an explicit lock around the await of asend:
async def main():
ait = gen()
await ait.asend(None)
lock = asyncio.Lock()
async def send():
async with lock:
print('sending')
await ait.asend(42)
await asyncio.gather(send(), send())

Why is asyncio queue await get() blocking?

Why is await queue.get() blocking?
import asyncio
async def producer(queue, item):
await queue.put(item)
async def consumer(queue):
val = await queue.get()
print("val = %d" % val)
async def main():
queue = asyncio.Queue()
await consumer(queue)
await producer(queue, 1)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.close()
If I call the producer() before consumer(), it works fine
That is to say, the following works fine.
async def main():
queue = asyncio.Queue()
await producer(queue, 1)
await consumer(queue)
Why isn't await queue.get() yielding control back to the event loop so that the producer coroutine can run which will populate the queue so that queue.get() can return.
You need to start the consumer and the producer in parallel, e.g. defining main like this:
async def main():
queue = asyncio.Queue()
await asyncio.gather(consumer(queue), producer(queue, 1))
If for some reason you can't use gather, then you can do (the equivalent of) this:
async def main():
queue = asyncio.Queue()
asyncio.create_task(consumer(queue))
asyncio.create_task(producer(queue, 1))
await asyncio.sleep(100) # what your program actually does
Why isn't await queue.get() yielding control back to the event loop so that the producer coroutine can run which will populate the queue so that queue.get() can return.
await queue.get() is yielding control back to the event loop. But await means wait, so when your main coroutine says await consumer(queue), that means "resume me once consumer(queue) has completed." Since consumer(queue) is itself waiting for someone to produce something, you have a classic case of deadlock.
Reversing the order works only because your producer is one-shot, so it immediately returns to the caller. If your producer happened to await an external source (such as a socket), you would have a deadlock there as well. Starting them in parallel avoids the deadlock regardless of how producer and consumer are written.
It's because you call await consumer(queue), which means the next line (procuder) will not be called until consumer returns, which it of course never does because nobody produced yet
check out the Example in the docs and see how they use it there: https://docs.python.org/3/library/asyncio-queue.html#examples
another simple example:
import asyncio
import random
async def produce(queue, n):
for x in range(1, n + 1):
# produce an item
print('producing {}/{}'.format(x, n))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
item = str(x)
# put the item in the queue
await queue.put(item)
# indicate the producer is done
await queue.put(None)
async def consume(queue):
while True:
# wait for an item from the producer
item = await queue.get()
if item is None:
# the producer emits None to indicate that it is done
break
# process the item
print('consuming item {}...'.format(item))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
loop = asyncio.get_event_loop()
queue = asyncio.Queue(loop=loop)
producer_coro = produce(queue, 10)
consumer_coro = consume(queue)
loop.run_until_complete(asyncio.gather(producer_coro, consumer_coro))
loop.close()
You should use .run_until_complete() with .gather()
Here is your updated code:
import asyncio
async def producer(queue, item):
await queue.put(item)
async def consumer(queue):
val = await queue.get()
print("val = %d" % val)
queue = asyncio.Queue()
loop = asyncio.get_event_loop()
loop.run_until_complete(
asyncio.gather(consumer(queue), producer(queue, 1))
)
loop.close()
Out:
val = 1
Also you could use .run_forever() with .create_task()
So your code snippet will be:
import asyncio
async def producer(queue, item):
await queue.put(item)
async def consumer(queue):
val = await queue.get()
print("val = %d" % val)
queue = asyncio.Queue()
loop = asyncio.get_event_loop()
loop.create_task(consumer(queue))
loop.create_task(producer(queue, 1))
try:
loop.run_forever()
except KeyboardInterrupt:
loop.close()
Out:
val = 1

Categories

Resources