I can't figure how to stop loop after one task is finished. In sample when WsServe count to 5 I expect loop to close. But instead stop I got RuntimeError: Cannot close a running event loop
#!/usr/bin/env python
import asyncio
async def rxer():
i=0
while True:
i+=1
print ('Rxer ',i)
await asyncio.sleep(1)
async def WsServe():
for i in range(5):
print ('WsServe',i)
await asyncio.sleep(1)
print ('Finish')
loop.stop()
loop.close()
loop=asyncio.get_event_loop()
loop.create_task(rxer())
loop.run_until_complete(WsServe())
loop.run_forever()
The error comes from calling loop.close() from inside the loop. You don't need to bother with loop.close(), loop.stop() is quite sufficient to stop the loop. loop.close() is only relevant when you want to ensure that all the resources internally acquired by the loop are released. It is not needed when your process is about to exit anyway, and removing the call to loop.close() indeed eliminates the error.
But also, loop.stop() is incompatible with run_until_complete(). It happens to work in this code because the coroutine returns immediately after calling loop.stop(); if you added e.g. an await asyncio.sleep(1) after loop.stop(), you'd again get a (different) RuntimeError.
To avoid such issues, I suggest that you migrate to the newer asyncio.run API and avoid both run_until_complete and stop. Instead, you can just use an event to terminate the main function, and the loop with it:
# rxer() defined as before
async def WsServe(stop_event):
for i in range(5):
print ('WsServe',i)
await asyncio.sleep(1)
print ('Finish')
stop_event.set()
await asyncio.sleep(1)
async def main():
asyncio.get_event_loop().create_task(rxer())
stop_event = asyncio.Event()
asyncio.get_event_loop().create_task(WsServe(stop_event))
await stop_event.wait()
asyncio.run(main())
# python 3.6 and older:
#asyncio.get_event_loop().run_until_complete(main())
Check commented lines of your implementation as below:
import asyncio
async def rxer():
i=0
while True:
i+=1
print ('Rxer ',i)
await asyncio.sleep(1)
async def WsServe():
for i in range(5):
print ('WsServe',i)
await asyncio.sleep(1)
print ('Finish')
#loop.stop()
#loop.close()
loop=asyncio.get_event_loop()
loop.create_task(rxer())
loop.run_until_complete(WsServe())
#loop.run_forever()
And here is the output:
Rxer 1
WsServe 0
Rxer 2
WsServe 1
Rxer 3
WsServe 2
Rxer 4
WsServe 3
Rxer 5
WsServe 4
Rxer 6
Finish
Related
I get this error:
D:\pythonstuff\demo.py:28: DeprecationWarning: The explicit passing of coroutine objects to asyncio.wait() is deprecated since Python 3.8, and scheduled for removal in Python 3.11.
await asyncio.wait([
Waited 1 second!
Waited 5 second!
Time passed: 0hour:0min:5sec
Process finished with exit code 0
When I run the code:
import asyncio
import time
class class1():
async def function_inside_class(self):
await asyncio.sleep(1)
print("Waited 1 second!")
async def function_inside_class2(self):
await asyncio.sleep(5)
print("Waited 5 second!")
def tic():
global _start_time
_start_time = time.time()
def tac():
t_sec = round(time.time() - _start_time)
(t_min, t_sec) = divmod(t_sec,60)
(t_hour,t_min) = divmod(t_min,60)
print('Time passed: {}hour:{}min:{}sec'.format(t_hour,t_min,t_sec))
object = class1()
async def main():
tic()
await asyncio.wait([
object.function_inside_class(),
object.function_inside_class2()
])
tac()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.close()
Are there any good alternatives to asyncio.wait? I don't want a warning in console every time I launch my application.
Edit: I don't want to just hide the error, that's bad practice, and I'm looking for other ways to do the same or a similar thing, not another async library to restore the old functionality.
You can just call it this way as it recommends in the docs here
Example from the docs:
async def foo():
return 42
task = asyncio.create_task(foo())
done, pending = await asyncio.wait({task})
So your code would become:
await asyncio.wait([
asyncio.create_task(object.function_inside_class()),
asyncio.create_task(object.function_inside_class2())
])
I would like to stop a python asyncio task from another task and start it again when some condition in the second task happen.
Please note, than I don't want to cancel the coroutine of the first task (the state of that coroutine when it stopped should be available). Also, I don't care about the exact state the first task is in, I just want the event loop stop running the first task until told otherwise from the second.
I hope this example code helps understanding the problem:
import asyncio
async def coroutine1():
i = 0
while(True):
i += 1
print("coroutine1: " + str(i) )
await asyncio.sleep(1)
async def coroutine2(task1):
i = 0
while(True):
i += 1
if (i > 3) and (i<10):
pass #TODO: stop task1 here
else:
pass #TODO: Maybe check if task1 is running
#and start task1 again if it's not?
print("coroutine2: " + str(i) )
await asyncio.sleep(1)
async def main_coroutine():
loop = asyncio.get_event_loop()
task1 = loop.create_task(coroutine1())
task2 = loop.create_task(coroutine2(task1))
done, pending = await asyncio.wait(
[task1, task2]
, return_when=asyncio.FIRST_COMPLETED,)
loop = asyncio.get_event_loop()
loop.run_until_complete(main_coroutine())
loop.close()
I would like to stop a python asyncio task from another task and start it again when some condition in the second task happen.
I assume you control the task creation, but don't want to touch the implementation of the coroutine. In your case, you control coroutine2 and main_coroutine, but not the insides of coroutine1.
In that case you can wrap the coroutine in a an __await__ that, instead of the normal yield from loop, checkes your stopped flag and waits for a future that tells it when to resume.
class Stoppable:
def __init__(self, coro):
self._coro_iter = coro.__await__()
self._stopped = None
def __await__(self):
while True:
while self._stopped:
print('awaiting stopped')
yield from self._stopped.__await__()
try:
v = next(self._coro_iter)
except StopIteration as e:
return v
yield v
def stop(self):
loop = asyncio.get_event_loop()
self._stopped = loop.create_future()
def start(self):
if self._stopped is not None:
self._stopped.set_result(None)
self._stopped = None
You can use the wrapper to modify coroutine2 to stop and resume the execution of coroutine1 at will:
async def coroutine2(s):
i = 0
while True:
i += 1
if i == 3:
print('stopping coroutine1')
s.stop()
elif i == 10:
print('restarting coroutine1')
s.start()
print("coroutine2: " + str(i) )
await asyncio.sleep(1)
async def main_coroutine():
loop = asyncio.get_event_loop()
s = Stoppable(coroutine1())
fut1 = asyncio.ensure_future(s)
task2 = loop.create_task(coroutine2(s))
done, pending = await asyncio.wait(
[fut1, task2], return_when=asyncio.FIRST_COMPLETED)
The way wrapper works is by unrolling the loop inherent in yield from. For example, to just delegate __await__ to another coroutine, one would write:
def __await__(self):
yield from self._coro_iter
Written like this, you can't implement stopping because the yield from contains an implicit loop that yields all the values produced by the underlying iterator - something like:
def __await__(self):
while True:
try:
v = next(self._coro_iter)
except StopIteration as e:
return e.value
yield v
Taken like this, it is easy enough to add an if that checks for _stopped at each iteration pass, meaning each time we're resumed by the event loop. The remaining hurdle is that one cannot just busy-loop until _stopped is rescinded - we must yield something else to allow the event loop to resume running other coroutines. Fortunately that is easily achieved by making _stopped a future, and yielding from the future. When the future's result is set, we will be automatically resumed and continue executing the wrapped coroutine.
It seems it can't be done.
It's possible to cancel an ongoing task with task1.cancel() and it's possible to create a new task with asyncio.get_event_loop().create_task(newTask).
It's also possible to get the coroutine of a running task with task1._coro but if we try to create a task again with a previously scheduled coroutine we will get a RuntimeError exception. This the discussion where they decided it: https://bugs.python.org/issue25887
Finally, a possible way of accomplishing the desire effect is using a asyncio.Queue object:
import asyncio
async def coroutine1(stop_queue):
i = 0
while(True):
if stop_queue.empty(): #if the queue is empty keep working.
i += 1
print("coroutine1: " + str(i) )
await asyncio.sleep(1)
async def coroutine2(stop_queue):
i = 0
while(True):
i += 1
if i == 3:
await stop_queue.put("whatever..") #put something in the queue
if i == 11:
await stop_queue.get() #take something from the queue
print("coroutine2: " + str(i) )
await asyncio.sleep(1)
async def main_coroutine():
stop_queue = asyncio.Queue()
done, pending = await asyncio.wait(
[coroutine1(stop_queue), coroutine2(stop_queue)]
, return_when=asyncio.ALL_COMPLETED,)
loop = asyncio.get_event_loop()
loop.run_until_complete(main_coroutine())
loop.close()
I have several http requests to fire simultaneously. I am trying to use async for to do this.
import asyncio
async def ticker(delay, to):
for i in range(to):
yield i
print(i)
await asyncio.sleep(delay) # instead of aiohttp request
print(i, ' fin')
async def main():
async for x in ticker(1,2):
pass
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
I keep getting subsequent calls with the following:
0
0 fin
1
1 fin
Instead I need the output as shown below:
0
1
0 fin
1 fin
Could you please advise me on how to do this?
The problem is that async for is exactly what you don't need.
async for is designed to iterate while waiting for a task to complete between each iteration; you want to iterate (starting requests) without waiting for the previous task(s) to finish.
You'll want something like
async def do_request():
await asyncio.sleep(1)
async def main():
await asyncio.gather(*[
do_request() for i in range(10)
])
Comment with a follow-up if that doesn't answer your question.
I've been using asyncio for a bit but I'm still fairly unfamiliar with it. My current issue is that while trying to wait for a response from a function with asyncio, the waiting (while loop) blocks the function from happening. Here is the code that sums up the problem:
import asyncio
response = 0
async def handle(x):
await asyncio.sleep(0.1)
return x
async def run():
global response
for number in range(1, 21):
response = await handle(number)
print(response)
if response == 10:
await wait_for_next(response)
async def wait_for_next(x):
while response == x:
print('waiting',response,x)
await asyncio.sleep(0.5)
print('done')
tasks = [run()]
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))
wait_for_next is supposed to wait for the next response, but the while loop blocks the run() function. How could I stop this happening? Should I be using loop.run_in_executor, and if so, how?
(There were a couple of other examples of this I could find, but they were very specific and I didn't understand if our problems/solutions would be the same.)
As already noted, loop stuck because await wait_for_next(response) blocks execution flow until this coroutine wouldn't be finished.
If you want some of your coroutines to be started without blocking execution flow you can start it as asyncio.Task (more about tasks) using ensure_future function:
import asyncio
response = 0
async def handle(x):
await asyncio.sleep(0.1)
return x
async def run():
global response
for number in range(1, 21):
response = await handle(number)
print(response)
if response == 10:
# run wait_for_next "in background" instead of blocking flow:
asyncio.ensure_future(wait_for_next(response))
async def wait_for_next(x):
while response == x:
print('waiting',response,x)
await asyncio.sleep(0.5)
print('done')
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
Output:
1
2
3
4
5
6
7
8
9
10
waiting 10 10
11
12
13
14
done
15
16
17
18
19
20
I'm quite new in this python asyncio topic. I have a simple question:
I have a task containing two coroutines to be run concurrently. First coroutine(my_coroutine) would just print something continuously until second_to_sleep is reached. The second coroutine(seq_coroutine) would call 4 other coroutines sequentially one after the other. My goal is to stop the loop at the end of seq_coroutine whenever it is completely finished. To be exact, I want my_coroutine be alive until seq_coroutine is finished. Can someone help me with that?
My code is like this:
import asyncio
async def my_coroutine(task, seconds_to_sleep = 3):
print("{task_name} started\n".format(task_name=task))
for i in range(1, seconds_to_sleep):
await asyncio.sleep(1)
print("\n{task_name}: second {seconds}\n".format(task_name=task, seconds=i))
async def coroutine1():
print("coroutine 1 started")
await asyncio.sleep(1)
print("coroutine 1 finished\n")
async def coroutine2():
print("coroutine 2 started")
await asyncio.sleep(1)
print("coroutine 2 finished\n")
async def coroutine3():
print("coroutine 3 started")
await asyncio.sleep(1)
print("coroutine 3 finished\n")
async def coroutine4():
print("coroutine 4 started")
await asyncio.sleep(1)
print("coroutine 4 finished\n")
async def seq_coroutine():
await coroutine1()
await coroutine2()
await coroutine3()
await coroutine4()
def main():
main_loop = asyncio.get_event_loop()
task = [asyncio.ensure_future(my_coroutine("task1", 11)),
asyncio.ensure_future(seq_coroutine())]
try:
print('loop is started\n')
main_loop.run_until_complete(asyncio.gather(*task))
finally:
print('loop is closed')
main_loop.close()
if __name__ == "__main__":
main()
This is the output of this program:
loop is started
task1 started
coroutine 1 started
task1: second 1
coroutine 1 finished
coroutine 2 started
task1: second 2
coroutine 2 finished
coroutine 3 started
task1: second 3
coroutine 3 finished
coroutine 4 started
task1: second 4
coroutine 4 finished
task1: second 5
task1: second 6
task1: second 7
task1: second 8
task1: second 9
task1: second 10
loop is closed
I only want to have something like this:
loop is started
task1 started
coroutine 1 started
task1: second 1
coroutine 1 finished
coroutine 2 started
task1: second 2
coroutine 2 finished
coroutine 3 started
task1: second 3
coroutine 3 finished
coroutine 4 started
task1: second 4
coroutine 4 finished
loop is closed
I just found a suitable solution for my problem.
I won't remove my post and I'll post my solution so that it may help others who face the same question.
I used asyncio.wait(task, return_when=asyncio.FIRST_COMPLETED) and it will return the result whenever the first task is finished.
This is the solution:
import asyncio
from asyncio.tasks import FIRST_COMPLETED
from concurrent.futures import CancelledError
async def my_coroutine(task, seconds_to_sleep = 3):
print("{task_name} started\n".format(task_name=task))
for i in range(1, seconds_to_sleep):
await asyncio.sleep(1)
print("\n{task_name}: second {seconds}\n".format(task_name=task, seconds=i))
async def coroutine1():
print("coroutine 1 started")
await asyncio.sleep(1)
print("coroutine 1 finished\n")
async def coroutine2():
print("coroutine 2 started")
await asyncio.sleep(1)
print("coroutine 2 finished\n")
async def coroutine3():
print("coroutine 3 started")
await asyncio.sleep(1)
print("coroutine 3 finished\n")
async def coroutine4():
print("coroutine 4 started")
await asyncio.sleep(1)
print("coroutine 4 finished\n")
async def seq_coroutine(loop):
await coroutine1()
await coroutine2()
await coroutine3()
await coroutine4()
def main():
main_loop = asyncio.get_event_loop()
task = [asyncio.ensure_future(my_coroutine("task1", 11)),
asyncio.ensure_future(seq_coroutine(main_loop))]
try:
print('loop is started\n')
done, pending = main_loop.run_until_complete(asyncio.wait(task, return_when=asyncio.FIRST_COMPLETED))
print("Completed tasks: {completed}\nPending tasks: {pending}".format(completed = done, pending = pending))
#canceling the tasks
for task in pending:
print("Cancelling {task}: {task_cancel}".format(task=task, task_cancel=task.cancel()))
except CancelledError as e:
print("Error happened while canceling the task: {e}".format(e=e))
finally:
print('loop is closed')
if __name__ == "__main__":
main()
You can use a variable to signal to another coroutine. asyncio.Event is usually used:
import asyncio
import random
async def clock(name, event):
print("* {} started".format(name))
i = 0
while not event.is_set():
await asyncio.sleep(0.1)
i += 1
print("* {}: {}".format(name, i))
print("* {} done".format(name))
return i
async def coro(x):
print("coro() started", x)
await asyncio.sleep(random.uniform(0.2, 0.5))
print("coro() finished", x)
async def seq_coroutine(name):
event = asyncio.Event()
clock_task = asyncio.ensure_future(clock(name, event))
# await asyncio.sleep(0) # if you want to give a chance to clock() to start
await coro(1)
await coro(2)
await coro(3)
await coro(4)
event.set()
i = await clock_task
print("Got:", i)
def main():
main_loop = asyncio.get_event_loop()
main_loop.run_until_complete(seq_coroutine("foo"))
main_loop.close()
if __name__ == "__main__":
main()
You can also use await event.wait() to block a piece of code until the event is set:
async def xxx(event):
print("xxx started")
await event.wait()
print("xxx ended")
Here's another way to do the same thing, which I think is cleaner in representing the dependence between jobs:
import asyncio
async def poll():
i = 0
while True:
print("First", i)
i += 1
await asyncio.sleep(20)
print("Second", i)
i += 1
await asyncio.sleep(20)
async def stop():
poller = asyncio.ensure_future(poll())
await asyncio.sleep(5)
poller.cancel()
main_loop = asyncio.get_event_loop()
main_loop.run_until_complete(stop())
main_loop.close()
Basically, instead of breaking the entire event loop on a single job ending and then cancelling the job there, we just cancel the dependent job directly when the parent job finishes.