Python using futures with loop_forever - python

Just started experimenting with asynch which looks really cool. I'm trying to use futures with an asynch coroutine that runs forever but I get this error:
Task exception was never retrieved
future: <Task finished coro=<slow_operation() done, defined at ./asynchio-test3.py:5> exception=InvalidStateError("FINISHED: <Future finished result='This is the future!'>",)>
This is my code which runs as expected if I remove the 3 lines related to futures:
import asyncio
#asyncio.coroutine
def slow_operation():
yield from asyncio.sleep(1)
print ("This is the task!")
future.set_result('This is the future!')
asyncio.async(slow_operation())
def got_result(future):
print(future.result())
loop = asyncio.get_event_loop()
future = asyncio.Future()
future.add_done_callback(got_result)
asyncio.async(slow_operation())
try:
loop.run_forever()
finally:
loop.close()

slow_operator is called indefinitely, calling set_result for the same future object multiple times; which is not possbile.
>>> import asyncio
>>> future = asyncio.Future()
>>> future.set_result('result')
>>> future.set_result('result')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Python35\lib\asyncio\futures.py", line 329, in set_result
raise InvalidStateError('{}: {!r}'.format(self._state, self))
asyncio.futures.InvalidStateError: FINISHED: <Future finished result='result'>
Create new future for each slow_operator call. For example:
#asyncio.coroutine
def slow_operation(future):
yield from asyncio.sleep(1)
print ("This is the task!")
future.set_result('This is the future!')
asyncio.async(slow_operation(new_future()))
def got_result(future):
print(future.result())
def new_future():
future = asyncio.Future()
future.add_done_callback(got_result)
return future
loop = asyncio.get_event_loop()
asyncio.async(slow_operation(new_future()))
try:
loop.run_forever()
finally:
loop.close()
BTW, you can use new syntax (async, await) if you're using Python 3.5+:
async def slow_operation(future):
await asyncio.sleep(1)
print ("This is the task!")
future.set_result('This is the future!')
asyncio.ensure_future(slow_operation(new_future()))

Following #falsetru answer this is a complete program that has 3 asynch coroutines each with their own got_result function. I'm using v3.4 so thats why I don't use the new syntax. As an interesting side effect the output clearly demonstrates the single threaded nature of coroutines. I hope its useful as a template for someone:
import asyncio
#asyncio.coroutine
def task1(future):
yield from asyncio.sleep(1)
print ("This is operation#1")
future.set_result('This is the result of operation #1!')
asyncio.async(task1(new_future(got_result1)))
def got_result1(future):
print(future.result())
#asyncio.coroutine
def task2(future):
yield from asyncio.sleep(1)
print ("This is operation#2")
future.set_result('This is the result of operation #2!')
asyncio.async(task2(new_future(got_result2)))
def got_result2(future):
print(future.result())
#asyncio.coroutine
def task3(future):
yield from asyncio.sleep(1)
print ("This is operation#3")
future.set_result('This is the result of operation #3!')
asyncio.async(task3(new_future(got_result3)))
def got_result3(future):
print(future.result())
def new_future(callback):
future = asyncio.Future()
future.add_done_callback(callback)
return future
tasks = [task1(new_future(got_result1)),
task2(new_future(got_result2)),
task3(new_future(got_result3))]
loop = asyncio.get_event_loop()
for task in tasks:
asyncio.async(task)
try:
loop.run_forever()
finally:
loop.close()

Related

Running aioschedule with other coroutine in python

I have two coroutines one of which is using aioschedule. This is my code
import aioschedule as schedule
import asyncio
async def foo():
while True:
print('foooooo')
await asyncio.sleep(5)
async def bar():
while True:
print('bar')
await asyncio.sleep(1)
schedule.every(2).seconds.do(bar)
loop = asyncio.get_event_loop()
loop.create_task(schedule.run_pending())
loop.create_task(foo())
try:
loop.run_forever()
except KeyboardInterrupt:
loop.stop()
What i want is it should printed bar every n seconds when other task is running but the output is only foooooo. Am i missing something?
try this:
import aioschedule as schedule
import asyncio
async def foo():
while True:
print('foooooo')
await asyncio.sleep(5)
async def bar():
while True:
print('bar')
await asyncio.sleep(1)
#schedule.every(2).seconds.do(bar) <---removed line
loop = asyncio.get_event_loop()
loop.create_task(schedule.run_pending())
loop.create_task(foo())
loop.create_task(bar()) #<---- added line
try:
loop.run_forever()
except KeyboardInterrupt:
loop.stop()

yield object inside asyncio function

I'm trying to work with asyncio I tried this code where I process a list of elements ad I print the state of It (element) (working)
problem: how can I yield the element(object) when I do this I have this error object async_generator can't be used in 'await' expression
import asyncio, random
async def process_element(element):
print('starting', element)
await asyncio.sleep(random.random()) # simulate IO-bound processing
print('done', element)
async def do_stuff(q):
while not q.empty():
value = await q.get()
await process_element(element=value)
q.task_done()
async def main():
jobs = asyncio.Queue()
for i in range(20):
await jobs.put(i)
for i in range(5):
asyncio.create_task(do_stuff(jobs))
await jobs.join()
asyncio.run(main())
It is just a matter of receiving whatever you want to yield using an async for instead of a plain await:
import asyncio, random
async def process_element(element):
print('starting', element)
await asyncio.sleep(random.random()) # simulate IO-bound processing
yield element
print('done', element)
async def do_stuff(q):
while not q.empty():
value = await q.get()
async for response in process_element(value):
print(f"process yield element: {response}")
q.task_done()
async def main():
jobs = asyncio.Queue()
for i in range(20):
await jobs.put(i)
for i in range(5):
asyncio.create_task(do_stuff(jobs))
await jobs.join()
asyncio.run(main())
If for some reason you don't want to use "async for", you can call
the methods __anext__ and asend on the async generator object
(which is what is returned by calling process_element after it contains an yield keyword).
Both .__anext__ and .asend have to be awaited, and will throw StopAsyncIteration when the generator is exhausted (in contrast with StopIteration for non-async generators).

How to stop asyncio loop with multiple tasks

I can't figure how to stop loop after one task is finished. In sample when WsServe count to 5 I expect loop to close. But instead stop I got RuntimeError: Cannot close a running event loop
#!/usr/bin/env python
import asyncio
async def rxer():
i=0
while True:
i+=1
print ('Rxer ',i)
await asyncio.sleep(1)
async def WsServe():
for i in range(5):
print ('WsServe',i)
await asyncio.sleep(1)
print ('Finish')
loop.stop()
loop.close()
loop=asyncio.get_event_loop()
loop.create_task(rxer())
loop.run_until_complete(WsServe())
loop.run_forever()
The error comes from calling loop.close() from inside the loop. You don't need to bother with loop.close(), loop.stop() is quite sufficient to stop the loop. loop.close() is only relevant when you want to ensure that all the resources internally acquired by the loop are released. It is not needed when your process is about to exit anyway, and removing the call to loop.close() indeed eliminates the error.
But also, loop.stop() is incompatible with run_until_complete(). It happens to work in this code because the coroutine returns immediately after calling loop.stop(); if you added e.g. an await asyncio.sleep(1) after loop.stop(), you'd again get a (different) RuntimeError.
To avoid such issues, I suggest that you migrate to the newer asyncio.run API and avoid both run_until_complete and stop. Instead, you can just use an event to terminate the main function, and the loop with it:
# rxer() defined as before
async def WsServe(stop_event):
for i in range(5):
print ('WsServe',i)
await asyncio.sleep(1)
print ('Finish')
stop_event.set()
await asyncio.sleep(1)
async def main():
asyncio.get_event_loop().create_task(rxer())
stop_event = asyncio.Event()
asyncio.get_event_loop().create_task(WsServe(stop_event))
await stop_event.wait()
asyncio.run(main())
# python 3.6 and older:
#asyncio.get_event_loop().run_until_complete(main())
Check commented lines of your implementation as below:
import asyncio
async def rxer():
i=0
while True:
i+=1
print ('Rxer ',i)
await asyncio.sleep(1)
async def WsServe():
for i in range(5):
print ('WsServe',i)
await asyncio.sleep(1)
print ('Finish')
#loop.stop()
#loop.close()
loop=asyncio.get_event_loop()
loop.create_task(rxer())
loop.run_until_complete(WsServe())
#loop.run_forever()
And here is the output:
Rxer 1
WsServe 0
Rxer 2
WsServe 1
Rxer 3
WsServe 2
Rxer 4
WsServe 3
Rxer 5
WsServe 4
Rxer 6
Finish

Beginner async/await question for api requests

I want speed up some API requests... for that I try to figure out how to do and copy some code which run but when I try my own code its no longer asynchrone. Maybe someone find the fail?
Copy Code (guess from stackoverflow):
#!/usr/bin/env python3
import asyncio
#asyncio.coroutine
def func_normal():
print('A')
yield from asyncio.sleep(5)
print('B')
return 'saad'
#asyncio.coroutine
def func_infinite():
for i in range(10):
print("--%d" % i)
return 'saad2'
loop = asyncio.get_event_loop()
tasks = func_normal(), func_infinite()
a, b = loop.run_until_complete(asyncio.gather(*tasks))
print("func_normal()={a}, func_infinite()={b}".format(**vars()))
loop.close()
My "own" code (I need at the end a list returned and merge the results of all functions):
import asyncio
import time
#asyncio.coroutine
def say_after(start,count,say,yep=True):
retl = []
if yep:
time.sleep(5)
for x in range(start,count):
retl.append(x)
print(say)
return retl
def main():
print(f"started at {time.strftime('%X')}")
loop = asyncio.get_event_loop()
tasks = say_after(10,20,"a"), say_after(20,30,"b",False)
a, b = loop.run_until_complete(asyncio.gather(*tasks))
print("func_normal()={a}, func_infinite()={b}".format(**vars()))
loop.close()
c = a + b
#print(c)
print(f"finished at {time.strftime('%X')}")
main()
Or I m completly wrong and should solve that with multithreading? What would be the best way for API requests that returns a list that I need to merge?
Added comment for each section that needs improvement. Removed some to simply code.
In fact, I didn't find any performance uplift with using range() wrapped in coroutine and using async def, might worth with heavier operations.
import asyncio
import time
# #asyncio.coroutine IS DEPRECATED since python 3.8
#asyncio.coroutine
def say_after(wait=True):
result = []
if wait:
print("I'm sleeping!")
time.sleep(5)
print("'morning!")
# This BLOCKs thread, but release GIL so other thread can run.
# But asyncio runs in ONE thread, so this still harms simultaneity.
# normal for is BLOCKING operation.
for i in range(5):
result.append(i)
print(i, end='')
print()
return result
def main():
start = time.time()
# Loop argument will be DEPRECATED from python 3.10
# Make main() as coroutine, then use asyncio.run(main()).
# It will be in asyncio Event loop, without explicitly passing Loop.
loop = asyncio.get_event_loop()
tasks = say_after(), say_after(False)
# As we will use asyncio.run(main()) from now on, this should be await-ed.
a, b = loop.run_until_complete(asyncio.gather(*tasks))
print(f"Took {time.time() - start:5f}")
loop.close()
main()
Better way:
import asyncio
import time
async def say_after(wait=True):
result = []
if wait:
print("I'm sleeping!")
await asyncio.sleep(2) # 'await' a coroutine version of it instead.
print("'morning!")
# wrap iterator in generator - or coroutine
async def asynchronous_range(end):
for _i in range(end):
yield _i
# use it with async for
async for i in asynchronous_range(5):
result.append(i)
print(i, end='')
print()
return result
async def main():
start = time.time()
tasks = say_after(), say_after(False)
a, b = await asyncio.gather(*tasks)
print(f"Took {time.time() - start:5f}")
asyncio.run(main())
Result
Your code:
DeprecationWarning: "#coroutine" decorator is deprecated since Python 3.8, use "async def" instead
def say_after(wait=True):
I'm sleeping!
'morning!
01234
01234
Took 5.003802
Better async code:
I'm sleeping!
01234
'morning!
01234
Took 2.013863
Note that fixed code now finish it's job while other task is sleeping.

Asyncio task cancellation

This is a test script I created to better understand task cancellation -
import asyncio
import random
import signal
import traceback
async def shutdown(signame, loop):
print("Shutting down")
tasks = [task for task in asyncio.Task.all_tasks()]
for task in tasks:
task.cancel()
try:
await task
except asyncio.CancelledError:
print("Task cancelled: %s", task)
loop.stop()
async def another():
await asyncio.sleep(2)
async def some_other_process():
await asyncio.sleep(5)
return "Me"
async def process(job, loop, i):
print(i)
task = loop.create_task(some_other_process())
value = await task
if i < 1:
another_task = loop.create_task(another())
await another_task
# await some_other_process()
def pull(loop):
i = 0
while True:
job = f"random-integer-{random.randint(0, 100)}"
try:
loop.run_until_complete(process(job, loop, i))
i += 1
except asyncio.CancelledError as e:
print("Task cancelled")
break
except Exception:
print(traceback.format_exc())
# asyncio.get_event_loop().stop()
def main():
try:
loop = asyncio.get_event_loop()
for signame in ['SIGINT']:
loop.add_signal_handler(
getattr(signal, signame),
lambda: asyncio.ensure_future(shutdown(signame, loop))
)
try:
pull(loop)
except Exception:
print(traceback.format_exc())
finally:
loop.close()
finally:
print("Done")
if __name__ == "__main__":
main()
And I can not understand why I see -
Task was destroyed but it is pending!
task: <Task cancelling coro=<shutdown() done, defined at test.py:6>>
loop.add_signal_handler(
getattr(signal, signame),
lambda: asyncio.ensure_future(shutdown(signame, loop))
)
Here using asyncio.ensure_future you create task for shutdown coroutine, but you don't await anywhere for this task to be finished. Later when you close event loop it warns you this task is pending.
Upd:
If you want to do some clenup, the best place for it is right before loop.close() regardless of reason your script ended (signal, exception, etc.)
Try to alter your code this way:
# ...
async def shutdown(loop): # remove `signal` arg
# ...
def main():
try:
loop = asyncio.get_event_loop()
try:
pull(loop)
except Exception:
print(traceback.format_exc())
finally:
loop.run_until_complete(shutdown(loop)) # just run until shutdown is done
loop.close()
finally:
print("Done")
# ...
Upd2:
In case you still want signal handler, you probably want to do something like this:
from functools import partial
loop.add_signal_handler(
getattr(signal, signame),
partial(cb, signame, loop)
)
def cb(signame, loop):
loop.stop()
loop.run_until_complete(shutdown(signame, loop))

Categories

Resources