retrieve result of async function inside of another async function - python

I'm facing this problem with async function
my_class.py
class cl:
async def foo():
return
async def bar(foo):
#some code here
return result
main.py
from my_class import cl
import asyncio
c = cl()
r = asyncio.run(c.foo)
x = cl.bar(r)
s = asyncio.run(x)
How can I retrieve the return of foo() to the function bar() because now I get this error :
ValueError: The future belongs to a different loop than the one specified as the loop argument
THANKS!!

Futures in Python imply that the result is available in the future, as the asynchronous function has not "finished" executing. I assume you will want to wait for foo to "finish" running in bar before using its value. This should do it:
import asyncio
class cl:
async def foo(self):
return
async def bar(self):
value = await self.foo() # waits for foo, then gets its value
#some code here
return result
c = cl()
x = c.bar()
s = asyncio.run(x)
Note: I changed some minor syntax so this code snippet can execute as-is

Related

Python: Copy context (contextvars.Context) to a separate thread

As for now, I've found a lot of examples on how contextvars module behaves with asyncio, but none on how one behaves with threads (asyncio.get_event_loop().run_in_executor, threading.Thread, and so on).
My question is, how can I pass context to a separate thread? Below you can see a code snippet that does not work (python 3.9.8).
import typing
import asyncio
import contextvars
import concurrent.futures
class CustomThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor):
def submit(
self,
function: typing.Callable,
*args,
**kwargs
) -> concurrent.futures.Future:
context = contextvars.copy_context()
return super().submit(
context.run,
functools.partial(function, *args, **kwargs)
)
def function():
print(var.get())
async def main():
await asyncio.get_event_loop().run_in_executor(None, function)
if __name__ == '__main__':
var = contextvars.ContextVar('variable')
var.set('Message.')
asyncio.get_event_loop().set_default_executor(CustomThreadPoolExecutor)
asyncio.run(main())
You can use wrapper function that takes copy_context.items(), set them and call your function. functools.partial will help you to create wrapped function for passing to run_in_executor. This is working test for my decorators:
def test_run_in_thread_pool_executor():
def init(func, ctx_vars, *args, **kwargs):
for var, value in ctx_vars:
var.set(value)
return func(*args, **kwargs)
#async_add_headers('streaming')
async def wrapper(f):
loop = asyncio.get_event_loop()
ctx = contextvars.copy_context()
executor = futures.ThreadPoolExecutor(max_workers=5)
return await loop.run_in_executor(executor, functools.partial(init, f, ctx.items()))
#add_headers('client')
def foo():
assert caller_context_var.get() == 'streaming'
async def main_test():
await wrapper(foo)
asyncio.run(main_test())
Here add_headers and async_add_headers change some contextvars in order of calling functions. caller_context_var.get() would be equal to 'client' without init function.
Unfortunately it works only for ThreadPoolExecutor and doesn't for ProcessPoolExecutor because Context objects are not picklable. Check relative PEP 567 section. There are also example with executor:
executor = ThreadPoolExecutor()
current_context = contextvars.copy_context()
executor.submit(current_context.run, some_function)

The execution order inside a corutine?

Yield from coroutine vs yield from task
In this link, there is an example give by #dano that:
import asyncio
#asyncio.coroutine
def test1():
print("in test1")
#asyncio.coroutine
def dummy():
yield from asyncio.sleep(1)
print("dummy ran")
#asyncio.coroutine
def main():
test1()
yield from dummy()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
The output is only
dummy ran
I can't add comment to that directly, so I have to ask a new question here;
(1) why the test1() isn't executed in order in such corutine function.
Whether corutine can only use the following two ways?
yield from cor()
asyncio.async(cor())
Where did test1() go?
(2) There is also some other problems in understanding the differnence of the followng two methods to use corutine() function. Are they the same?
yield from asyncio.async(cor())
asyncio.async(cor())
I use the following code to explain:
import random
import datetime
global a,b,c
import asyncio
a = [random.randint(0, 1<<256) for i in range(500000)]
b= list(a)
c= list(a)
#asyncio.coroutine
def test1():
global b
b.sort(reverse=True)
print("in test1")
#asyncio.coroutine
def test2():
global c
c.sort(reverse=True)
print("in test2")
#asyncio.coroutine
def dummy():
yield from asyncio.sleep(1)
print("dummy ran")
#asyncio.coroutine
def test_cor():
for i in asyncio.sleep(1):
yield i
#asyncio.coroutine
def main():
test1()
print("hhhh_______________________")
asyncio.async(test1())
asyncio.async(test2())
print("hhhh_______________________")
print("hhh")
asyncio.async(dummy())
yield from test_cor()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
print("hhhhhhh")
However the output is
hhhh_______________________
hhhh_______________________
hhh
in test1
in test2
hhhhhhh
It even didn't execute the dummy() function !
And I use
#asyncio.coroutine
def test2():
# global c
# c.sort(reverse=True)
print("in test2")
(3) without sorting and I think test2 should run faster so that test1 is output after test2. However, the output didn't change. I don't know why.
(4)And I also tried to remove sorting for both test1() and test2(), then amazingly, dummy() runs and output the following. Why ??
hhhh_______________________
hhhh_______________________
hhh
in test1
in test2
dummy ran
hhhhhhh
I don't know how these things happens....I am relly bewilerded.

Python - decorators based execution

I am trying to execute all the functions with common decorator without calling all the functions. For example
#run
#v1
def test1():
#do something
#run
#v1
#v2
def test2():
#do something
#run
#v2
def test3():
#do something
#run
def test4():
#do something
I want to executes the test functions based on the decorators like #run executes all 4 tests. #v1 executes only first two. How can I do that? Any guidance will be helpful.
You could probably use the decorator to "register" your functions in a list:
_to_run = [] # list of functions to run
def run(func):
_to_run.append(func) # add the decorated function to the list
return func
#run
def test1():
print('test1')
return 1
#run
def test2():
print('test2')
def test3():
print('test3')
if __name__ == '__main__':
for test in _to_run: # iterate over registered functions
x = test()
print('Returned:', x)
On the other hand, you could as well create this list explicitly, without decorators.

Lazy iterators (generators) with asyncio

I have a blocking, non-async code like this:
def f():
def inner():
while True:
yield read()
return inner()
With this code the caller can choose when to stop the function to generate data. How to change this to async? This solution doesn't work:
async def f():
async def inner():
while True:
yield await coroutine_read()
return inner()
... because yield can't be used in async def functions. If i remove the async from the inner() signature, I can't use await anymore.
Upd:
Starting with Python 3.6 we have asynchronous generators and able to use yield directly inside coroutines.
As noted above, you can't use yield inside async funcs. If you want to create coroutine-generator you have to do it manually, using __aiter__ and __anext__ magic methods:
import asyncio
# `coroutine_read()` generates some data:
i = 0
async def coroutine_read():
global i
i += 1
await asyncio.sleep(i)
return i
# `f()` is asynchronous iterator.
# Since we don't raise `StopAsyncIteration`
# it works "like" `while True`, until we manually break.
class f:
async def __aiter__(self):
return self
async def __anext__(self):
return await coroutine_read()
# Use f() as asynchronous iterator with `async for`:
async def main():
async for i in f():
print(i)
if i >= 3:
break
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Output:
1
2
3
[Finished in 6.2s]
You may also like to see other post, where StopAsyncIteration uses.

Method chaining with asyncio coroutines

I want to implement method chaining, but not for usual functions - for asyncio coroutines.
import asyncio
class Browser:
#asyncio.coroutine
def go(self):
# some actions
return self
#asyncio.coroutine
def click(self):
# some actions
return self
"Intuitive" way to call chain wouldn't work, because single method returns coroutine (generator), not self:
#asyncio.coroutine
def main():
br = yield from Browser().go().click() # this will fail
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Correct way to call chain is:
br = yield from (yield from Browser().go()).click()
But it looks ugly and becomes unreadable when chain grows.
Is there any way to do this better? Any ideas are welcome.
I created solution, that do a job close to the needed. Idea is to use wrapper for Browser() which uses __getattr__ and __call__ to collect action (like getting attribute or call) and return self to catch next one action. After all actions collected, we "catch" yiled from wrapper using __iter__ and process all collected actions.
import asyncio
def chain(obj):
"""
Enables coroutines chain for obj.
Usage: text = yield from chain(obj).go().click().attr
Note: Returns not coroutine, but object that can be yield from.
"""
class Chain:
_obj = obj
_queue = []
# Collect getattr of call to queue:
def __getattr__(self, name):
Chain._queue.append({'type': 'getattr', 'name': name})
return self
def __call__(self, *args, **kwargs):
Chain._queue.append({'type': 'call', 'params': [args, kwargs]})
return self
# On iter process queue:
def __iter__(self):
res = Chain._obj
while Chain._queue:
action = Chain._queue.pop(0)
if action['type'] == 'getattr':
res = getattr(res, action['name'])
elif action['type'] == 'call':
args, kwargs = action['params']
res = res(*args, **kwargs)
if asyncio.iscoroutine(res):
res = yield from res
return res
return Chain()
Usage:
class Browser:
#asyncio.coroutine
def go(self):
print('go')
return self
#asyncio.coroutine
def click(self):
print('click')
return self
def text(self):
print('text')
return 5
#asyncio.coroutine
def main():
text = yield from chain(Browser()).go().click().go().text()
print(text)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Output:
go
click
go
text
5
Note, that chain() doesn't return real coroutine, but object that can be used like coroutine on yield from. We should wrap result of chain() to get normal coroutine, which can be passed to any asyncio function that requires coroutine:
#asyncio.coroutine
def chain_to_coro(chain):
return (yield from chain)
#asyncio.coroutine
def main():
ch = chain(Browser()).go().click().go().text()
coro = chain_to_coro(ch)
results = yield from asyncio.gather(*[coro], return_exceptions=True)
print(results)
Output:
go
click
go
text
[5]
It's still not particularly pretty, but you could implement a chain function that scales a little bit better:
import asyncio
#asyncio.coroutine
def chain(obj, *funcs):
for f, *args in funcs:
meth = getattr(obj, f) # Look up the method on the object
obj = yield from meth(*args)
return obj
class Browser:
#asyncio.coroutine
def go(self, x, y):
return self
#asyncio.coroutine
def click(self):
return self
#asyncio.coroutine
def main():
#br = yield from (yield from Browser().go(3, 4)).click()
br = yield from chain(Browser(),
("go", 3, 4),
("click",))
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
The idea is to pass tuples in a (method_name, arg1, arg2, argX) format to the chain function, rather than actually chaining the method calls themselves. You can just pass the method names directly if you don't need to support passing arguments to any of the methods in the chain.

Categories

Resources