I have a function that I want to call every minute for instance and my code look like this:
async def fun1():
print('fun1')
await asyncio.sleep(30)
async def fun2():
print('fun2')
await asyncio.sleep(10)
async def fun3():
print('fun3')
async def main():
global loop
loop.create_task(fun1())
loop.create_task(fun2())
while True:
await fun3()
await asyncio.sleep(1)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
but it does not print anything. I would like my function to be called every 10 seconds for instance. It looks like fun2 is waiting for fun1 to finish instead of triggering every 30 seconds and 10 seconds respectively...
Any idea why pelase?
Currently, fun1 and fun2 will only print once each since neither contain a loop. Add a loop to make them each print every 10/30 seconds.
import asyncio
async def fun1():
while True:
print('fun1')
await asyncio.sleep(30)
async def fun2():
while True:
print('fun2')
await asyncio.sleep(10)
async def fun3():
print('fun3')
async def main():
global loop
loop.create_task(fun1())
loop.create_task(fun2())
while True:
await fun3()
await asyncio.sleep(1)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Related
I have two coroutines one of which is using aioschedule. This is my code
import aioschedule as schedule
import asyncio
async def foo():
while True:
print('foooooo')
await asyncio.sleep(5)
async def bar():
while True:
print('bar')
await asyncio.sleep(1)
schedule.every(2).seconds.do(bar)
loop = asyncio.get_event_loop()
loop.create_task(schedule.run_pending())
loop.create_task(foo())
try:
loop.run_forever()
except KeyboardInterrupt:
loop.stop()
What i want is it should printed bar every n seconds when other task is running but the output is only foooooo. Am i missing something?
try this:
import aioschedule as schedule
import asyncio
async def foo():
while True:
print('foooooo')
await asyncio.sleep(5)
async def bar():
while True:
print('bar')
await asyncio.sleep(1)
#schedule.every(2).seconds.do(bar) <---removed line
loop = asyncio.get_event_loop()
loop.create_task(schedule.run_pending())
loop.create_task(foo())
loop.create_task(bar()) #<---- added line
try:
loop.run_forever()
except KeyboardInterrupt:
loop.stop()
I have the following function to call s(c) every 24 hours.
def schedule_next_sync():
t = datetime.datetime.now()
t = t.replace(hour=0) + datetime.timedelta(hours=24)
def wrapper():
s(c)
schedule_next_sync()
tornado.ioloop.IOLoop.current().add_timeout(datetime.datetime.timestamp(t), wrapper)
However, s() will be changed to an async function.
async def s(c):
How to update schedule_next_sync for async function? Should run s() synchronously? Or change schedule_next_sync() to an async function?
Once s is async, you could use asyncio.sleep() instead of the lower-level add_timeout():
async def schedule_next_sync():
async def call_forever():
while True:
await asyncio.sleep(1)
await s(c)
tornado.ioloop.IOLoop.current().create_task(call_forever())
If you really want to do it with timeouts, something like this should work:
def schedule_next_sync():
t = datetime.datetime.now() + datetime.timedelta(seconds=1)
def wrapper():
loop = asyncio.get_running_loop()
task = loop.create_task(s(c))
task.add_done_callback(lambda _: schedule_next_sync())
loop = tornado.ioloop.IOLoop.current()
loop.add_timeout(datetime.datetime.timestamp(t), wrapper)
I am trying to use an async generator as a wrapper for a shared connection
async def mygen():
await init()
connection = await open_connection()
while True:
data = yield
await connection.send(data)
shared_gen = None
async def send_data(data):
global shared_gen
if not shared_gen:
shared_gen = mygen()
await shared_gen.asend(None)
await shared_gen.asend(data)
Is the above code safe from race conditions? Is it possible for two asends to execute concurrently or the second one will block implicitly until the generator is ready in the yield step? Assume connection.send is not concurrency safe.
Update:
Wrote a wrapper to help use safely.
class Locked:
def __init__(self, resource):
self._resource = resource
self._lock = asyncio.Lock()
#contextlib.asynccontextmanager
async def lock(self):
async with self._lock:
yield self._resource
async def send_data(locked_gen, data):
async with locked_gen.lock() as gen:
await gen.asend(data)
async def main():
gen = mygen()
await gen.asend(None)
locked_gen = Locked(gen)
...
Is it possible for two asends to execute concurrently or the second one will block implicitly until the generator is ready in the yield step?
It is not possible for asend to be called concurrently, but trying to do so doesn't result in blocking. Instead, the second one will raise a RuntimeError, as demonstrated by the following example:
import asyncio
async def gen():
while True:
yield
await asyncio.sleep(1)
async def main():
ait = gen()
await ait.asend(None) # start the generator
async def send():
print('sending')
await ait.asend(42)
await asyncio.gather(send(), send())
asyncio.run(main())
To make the send block until the previous one finishes, you need an explicit lock around the await of asend:
async def main():
ait = gen()
await ait.asend(None)
lock = asyncio.Lock()
async def send():
async with lock:
print('sending')
await ait.asend(42)
await asyncio.gather(send(), send())
Following example shows we can run phase1 then run phase2. But what we wanted with coroutine is to do two things concurrently instead of one after another. I know if I use asyncio.get_event_loop.create_task can achieve what I want, but why use await? I think there is no difference between using await and just using the plain function.
import asyncio
async def outer():
print('in outer')
print('waiting for result1')
result1 = await phase1()
print('waiting for result2')
result2 = await phase2(result1)
return (result1, result2)
async def phase1():
print('in phase1')
return 'result1'
async def phase2(arg):
print('in phase2')
return 'result2 derived from {}'.format(arg)
event_loop = asyncio.get_event_loop()
try:
return_value = event_loop.run_until_complete(outer())
print('return value: {!r}'.format(return_value))
finally:
event_loop.close()
I'm quite new in this python asyncio topic. I have a simple question:
I have a task containing two coroutines to be run concurrently. First coroutine(my_coroutine) would just print something continuously until second_to_sleep is reached. The second coroutine(seq_coroutine) would call 4 other coroutines sequentially one after the other. My goal is to stop the loop at the end of seq_coroutine whenever it is completely finished. To be exact, I want my_coroutine be alive until seq_coroutine is finished. Can someone help me with that?
My code is like this:
import asyncio
async def my_coroutine(task, seconds_to_sleep = 3):
print("{task_name} started\n".format(task_name=task))
for i in range(1, seconds_to_sleep):
await asyncio.sleep(1)
print("\n{task_name}: second {seconds}\n".format(task_name=task, seconds=i))
async def coroutine1():
print("coroutine 1 started")
await asyncio.sleep(1)
print("coroutine 1 finished\n")
async def coroutine2():
print("coroutine 2 started")
await asyncio.sleep(1)
print("coroutine 2 finished\n")
async def coroutine3():
print("coroutine 3 started")
await asyncio.sleep(1)
print("coroutine 3 finished\n")
async def coroutine4():
print("coroutine 4 started")
await asyncio.sleep(1)
print("coroutine 4 finished\n")
async def seq_coroutine():
await coroutine1()
await coroutine2()
await coroutine3()
await coroutine4()
def main():
main_loop = asyncio.get_event_loop()
task = [asyncio.ensure_future(my_coroutine("task1", 11)),
asyncio.ensure_future(seq_coroutine())]
try:
print('loop is started\n')
main_loop.run_until_complete(asyncio.gather(*task))
finally:
print('loop is closed')
main_loop.close()
if __name__ == "__main__":
main()
This is the output of this program:
loop is started
task1 started
coroutine 1 started
task1: second 1
coroutine 1 finished
coroutine 2 started
task1: second 2
coroutine 2 finished
coroutine 3 started
task1: second 3
coroutine 3 finished
coroutine 4 started
task1: second 4
coroutine 4 finished
task1: second 5
task1: second 6
task1: second 7
task1: second 8
task1: second 9
task1: second 10
loop is closed
I only want to have something like this:
loop is started
task1 started
coroutine 1 started
task1: second 1
coroutine 1 finished
coroutine 2 started
task1: second 2
coroutine 2 finished
coroutine 3 started
task1: second 3
coroutine 3 finished
coroutine 4 started
task1: second 4
coroutine 4 finished
loop is closed
I just found a suitable solution for my problem.
I won't remove my post and I'll post my solution so that it may help others who face the same question.
I used asyncio.wait(task, return_when=asyncio.FIRST_COMPLETED) and it will return the result whenever the first task is finished.
This is the solution:
import asyncio
from asyncio.tasks import FIRST_COMPLETED
from concurrent.futures import CancelledError
async def my_coroutine(task, seconds_to_sleep = 3):
print("{task_name} started\n".format(task_name=task))
for i in range(1, seconds_to_sleep):
await asyncio.sleep(1)
print("\n{task_name}: second {seconds}\n".format(task_name=task, seconds=i))
async def coroutine1():
print("coroutine 1 started")
await asyncio.sleep(1)
print("coroutine 1 finished\n")
async def coroutine2():
print("coroutine 2 started")
await asyncio.sleep(1)
print("coroutine 2 finished\n")
async def coroutine3():
print("coroutine 3 started")
await asyncio.sleep(1)
print("coroutine 3 finished\n")
async def coroutine4():
print("coroutine 4 started")
await asyncio.sleep(1)
print("coroutine 4 finished\n")
async def seq_coroutine(loop):
await coroutine1()
await coroutine2()
await coroutine3()
await coroutine4()
def main():
main_loop = asyncio.get_event_loop()
task = [asyncio.ensure_future(my_coroutine("task1", 11)),
asyncio.ensure_future(seq_coroutine(main_loop))]
try:
print('loop is started\n')
done, pending = main_loop.run_until_complete(asyncio.wait(task, return_when=asyncio.FIRST_COMPLETED))
print("Completed tasks: {completed}\nPending tasks: {pending}".format(completed = done, pending = pending))
#canceling the tasks
for task in pending:
print("Cancelling {task}: {task_cancel}".format(task=task, task_cancel=task.cancel()))
except CancelledError as e:
print("Error happened while canceling the task: {e}".format(e=e))
finally:
print('loop is closed')
if __name__ == "__main__":
main()
You can use a variable to signal to another coroutine. asyncio.Event is usually used:
import asyncio
import random
async def clock(name, event):
print("* {} started".format(name))
i = 0
while not event.is_set():
await asyncio.sleep(0.1)
i += 1
print("* {}: {}".format(name, i))
print("* {} done".format(name))
return i
async def coro(x):
print("coro() started", x)
await asyncio.sleep(random.uniform(0.2, 0.5))
print("coro() finished", x)
async def seq_coroutine(name):
event = asyncio.Event()
clock_task = asyncio.ensure_future(clock(name, event))
# await asyncio.sleep(0) # if you want to give a chance to clock() to start
await coro(1)
await coro(2)
await coro(3)
await coro(4)
event.set()
i = await clock_task
print("Got:", i)
def main():
main_loop = asyncio.get_event_loop()
main_loop.run_until_complete(seq_coroutine("foo"))
main_loop.close()
if __name__ == "__main__":
main()
You can also use await event.wait() to block a piece of code until the event is set:
async def xxx(event):
print("xxx started")
await event.wait()
print("xxx ended")
Here's another way to do the same thing, which I think is cleaner in representing the dependence between jobs:
import asyncio
async def poll():
i = 0
while True:
print("First", i)
i += 1
await asyncio.sleep(20)
print("Second", i)
i += 1
await asyncio.sleep(20)
async def stop():
poller = asyncio.ensure_future(poll())
await asyncio.sleep(5)
poller.cancel()
main_loop = asyncio.get_event_loop()
main_loop.run_until_complete(stop())
main_loop.close()
Basically, instead of breaking the entire event loop on a single job ending and then cancelling the job there, we just cancel the dependent job directly when the parent job finishes.