Interprocess communication using asyncio? - python

I have a set of CPU-intensive processes that once in a while depend on each other to proceed. So something like
def run():
while True:
do stuff
wake up some other process
wait for some other process to wake me up
do stuff
Within each process I'd like to use async, so that I can always have an instance of run running while others are waiting to be woken up. Looking at the asyncio docs, the only IPC option in the "High-level APIs" section that I see uses sockets. I'd much rather use a pipe, which it looks like I can perhaps do with the low-level API, but that documentation is chock full of warnings that if you're just writing an application then it's a mistake to be using it. Can someone weigh in on the idiomatic thing to do here? (And also, speed is an important factor, so if there's some less-idiomatic-but-more-performant thing I'd like to know about that option as well.)

I would like to mention the aioprocessing library, as I successfully used it in one of my projects. It provides an anync interface to the multiprocessing primitives including IPC, such as Process, Pipe, Lock, Queue and etc. It uses thread pool to do this:
...
#staticmethod
def coro_maker(func):
def coro_func(self, *args, loop=None, **kwargs):
return self.run_in_executor(
getattr(self, func), *args, loop=loop, **kwargs
)
return coro_func
But to be honest, a lot depends on the problem being solved, on what tasks are being performed concurrently, since the intensive IPC itself within the async approach is less effective than the synchronous approach due to overhead of event loop, thread pool and etc. Sometimes it is better to make all IPC operations synchronous and put it all in a separate thread. Again, it all depends on the problem and the environment. Below is a benchmark that is far from comprehensive, but it can give an approximate picture of the problem that is being solved in it (intensive exchange of buffers).
note: I wrote about the difference between a Queue and SimpleQueue here
Sync SimpleQueue: 1.4309470653533936
AioSimpleQueue: 12.32670259475708
AioQueue: 14.342737436294556
AioPipe: 11.747064590454102
subprocess pipe stream: 7.344956159591675
socket stream: 4.360717058181763
# main.py
import sys
import time
import asyncio
import aioprocessing as ap
import multiprocessing as mp
import proc
count = 5*10**4
data = b'*'*100
async def sync_simple_queue_func():
out_ = mp.SimpleQueue()
in_ = mp.SimpleQueue()
p = ap.AioProcess(target=proc.start_sync_queue_func, args=(out_, in_))
p.start()
begin_ts = time.time()
for i in range(count):
out_.put(data)
res = in_.get()
print('Sync SimpleQueue: ', time.time() - begin_ts)
out_.put(None)
async def simple_queue_func():
out_ = ap.AioSimpleQueue()
in_ = ap.AioSimpleQueue()
p = ap.AioProcess(target=proc.start_queue_func, args=(out_, in_))
p.start()
begin_ts = time.time()
for i in range(count):
await out_.coro_put(data)
res = await in_.coro_get()
print('AioSimpleQueue: ', time.time() - begin_ts)
await out_.coro_put(None)
async def queue_func():
out_ = ap.AioQueue()
in_ = ap.AioQueue()
p = ap.AioProcess(target=proc.start_queue_func, args=(out_, in_))
p.start()
begin_ts = time.time()
for i in range(count):
await out_.coro_put(data)
res = await in_.coro_get()
print('AioQueue: ', time.time() - begin_ts)
await out_.coro_put(None)
async def pipe_func():
main_, child_ = ap.AioPipe()
p = ap.AioProcess(target=proc.start_pipe_func, args=(child_,))
p.start()
begin_ts = time.time()
for i in range(count):
await main_.coro_send(data)
res = await main_.coro_recv()
print('AioPipe: ', time.time() - begin_ts)
await main_.coro_send(None)
await p.coro_join()
server = None
async def handle_child(reader, writer):
begin_ts = time.time()
for i in range(count):
writer.write(data)
res = await reader.read(len(data))
print('socket stream: ', time.time() - begin_ts)
writer.close()
async def socket_func():
global server
addr = ('127.0.0.1', 8888)
server = await asyncio.start_server(handle_child, *addr)
p = ap.AioProcess(target=proc.start_socket_func, args=(addr,))
p.start()
async with server:
await server.serve_forever()
async def subprocess_func():
prog = await asyncio.create_subprocess_shell(
'python proc.py',
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE)
begin_ts = time.time()
for i in range(count):
prog.stdin.write(data)
res = await prog.stdout.read(len(data))
print('subprocess pipe stream: ', time.time() - begin_ts)
prog.stdin.close()
async def main():
await sync_simple_queue_func()
await simple_queue_func()
await queue_func()
await pipe_func()
await subprocess_func()
await socket_func()
asyncio.run(main())
# proc.py
import asyncio
import sys
import aioprocessing as ap
async def sync_queue_func(in_, out_):
while True:
n = in_.get()
if n is None:
return
out_.put(n)
async def queue_func(in_, out_):
while True:
n = await in_.coro_get()
if n is None:
return
await out_.coro_put(n)
async def pipe_func(child):
while True:
n = await child.coro_recv()
if n is None:
return
await child.coro_send(n)
data = b'*' * 100
async def socket_func(addr):
reader, writer = await asyncio.open_connection(*addr)
while True:
n = await reader.read(len(data))
if not n:
break
writer.write(n)
def start_sync_queue_func(in_, out_):
asyncio.run(sync_queue_func(in_, out_))
def start_queue_func(in_, out_):
asyncio.run(queue_func(in_, out_))
def start_pipe_func(child):
asyncio.run(pipe_func(child))
def start_socket_func(addr):
asyncio.run(socket_func(addr))
async def connect_stdin_stdout():
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
protocol = asyncio.StreamReaderProtocol(reader)
dummy = asyncio.Protocol()
await loop.connect_read_pipe(lambda: protocol, sys.stdin) # sets read_transport
w_transport, _ = await loop.connect_write_pipe(lambda: dummy, sys.stdout)
writer = asyncio.StreamWriter(w_transport, protocol, reader, loop)
return reader, writer
async def main():
reader, writer = await connect_stdin_stdout()
while True:
res = await reader.read(len(data))
if not res:
break
writer.write(res)
if __name__ == "__main__":
asyncio.run(main())

Related

Binance Multithread Sockets - functions not called concurrently

I have a code to receive data from binance, about current prices:
import asyncio
from binance import AsyncClient, BinanceSocketManager
import time
from datetime import datetime
def analyze(res):
kline = res['k']
if kline['x']: #candle is compleated
print('{} start_sleeping {} {}'.format(
datetime.now(),
kline['s'],
datetime.fromtimestamp(kline['t'] / 1000),
))
time.sleep(5)
print('{} finish_sleeping {}'.format(datetime.now(), kline['s']))
async def open_binance_stream(symbol):
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
ts = bm.kline_socket(symbol)
async with ts as tscm:
while True:
res = await tscm.recv()
analyze(res)
await client.close_connection()
async def main():
t1 = asyncio.create_task(open_binance_stream('ETHBTC'))
t2 = asyncio.create_task(open_binance_stream('XRPBTC'))
await asyncio.gather(*[t1, t2])
if __name__ == "__main__":
asyncio.run(main())
How to make analyze function to be called concurently.
Binance sends info in the same time with both streams data (ETHBTC and XRPBTC)
But function analyze will be called only once previous analyze (sleep) is completed.
I wish function analyze is called immediately and independently.
Have you tried to put analyze in a thread. I think it will achieve what you want.
import asyncio
from binance import AsyncClient, BinanceSocketManager
import time
from datetime import datetime
from threading import Thread
def analyze(res):
kline = res['k']
if kline['x']: #candle is compleated
print('{} start_sleeping {} {}'.format(
datetime.now(),
kline['s'],
datetime.fromtimestamp(kline['t'] / 1000),
))
time.sleep(5)
print('{} finish_sleeping {}'.format(datetime.now(), kline['s']))
async def open_binance_stream(symbol):
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
ts = bm.kline_socket(symbol)
async with ts as tscm:
while True:
res = await tscm.recv()
Thread(target= analyze, args = (res)).start()
await client.close_connection()
async def main():
t1 = asyncio.create_task(open_binance_stream('ETHBTC'))
t2 = asyncio.create_task(open_binance_stream('XRPBTC'))
await asyncio.gather(*[t1, t2])
if __name__ == "__main__":
asyncio.run(main())
This should work as expected.

Python: Correct way of using proxybroker lib with asyncio

I want to use the ProxyBroker lib in my python program to generate a list/queue of 10 working proxies.
Unfortunately I was not able to find anything similar in the example page of the lib.
This is what I got right now, but it feels like I'm using asyncio the wrong way to complete my task. Especially the gather function I'm using in combination with the collect(proxies) call.
def get_proxies(self, limit=10):
async def collect(proxies):
p = []
while True:
proxy = await proxies.get()
if proxy is None:
break
p.append(proxy)
return p
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(
broker.find(types=['HTTP', 'HTTPS'], limit=10),
collect(proxies))
loop = asyncio.get_event_loop()
proxy_list = loop.run_until_complete(tasks)
loop.close()
return proxy_list
What would be the preferred/correct way of generating the proxy list?
You can do it this:
"""Find and show 10 working HTTP(S) proxies."""
import asyncio
from proxybroker import Broker
async def show(proxies):
while True:
proxy = await proxies.get()
if proxy is None: break
print('Found proxy: %s' % proxy)
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(
broker.find(types=['HTTP', 'HTTPS'], limit=10),
show(proxies))
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
or if you want to generate a file:
import asyncio
from proxybroker import Broker
async def save(proxies, filename):
"""Save proxies to a file."""
with open(filename, 'w') as f:
while True:
proxy = await proxies.get()
if proxy is None:
break
proto = 'https' if 'HTTPS' in proxy.types else 'http'
row = '%s://%s:%d\n' % (proto, proxy.host, proxy.port)
f.write(row)
def main():
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=10),
save(proxies, filename='proxies.txt'))
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
if __name__ == '__main__':
main()
Is all!
Good look!

Reading from pipe with pexpect on Linux

I post a new question related the old for a problem with the get from queue. This is the code (thanks to Martijn Pieters)
import asyncio
import sys
import json
import os
import websockets
async def socket_consumer(socket, outgoing):
# take messages from the web socket and push them into the queue
async for message in socket:
await outgoing.put(message)
file = open(r"/home/host/Desktop/FromSocket.txt", "a")
file.write("From socket: " + ascii(message) + "\n")
file.close()
async def socket_producer(socket, incoming):
# take messages from the queue and send them to the socket
while True:
message = await incoming.get()
file = open(r"/home/host/Desktop/ToSocket.txt", "a")
file.write("To socket: " + ascii(message) + "\n")
file.close()
await socket.send(message)
async def connect_socket(incoming, outgoing, loop=None):
header = {"Authorization": r"Basic XXX="}
uri = 'XXXXXX'
async with websockets.connect(uri, extra_headers=header) as web_socket:
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
socket_consumer(web_socket, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
socket_producer(web_socket, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# pipe support
async def stdio(loop=None):
if loop is None:
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
await loop.connect_read_pipe(
lambda: asyncio.StreamReaderProtocol(reader), sys.stdin)
writer_transport, writer_protocol = await loop.connect_write_pipe(
asyncio.streams.FlowControlMixin, os.fdopen(sys.stdout.fileno(), 'wb'))
writer = asyncio.streams.StreamWriter(
writer_transport, writer_protocol, None, loop)
return reader, writer
async def pipe_consumer(pipe_reader, outgoing):
# take messages from the pipe and push them into the queue
while True:
message = await pipe_reader.readline()
if not message:
break
file = open(r"/home/host/Desktop/FromPipe.txt", "a")
file.write("From pipe: " + ascii(message.decode('utf8')) + "\n")
file.close()
await outgoing.put(message.decode('utf8'))
async def pipe_producer(pipe_writer, incoming):
# take messages from the queue and send them to the pipe
while True:
json_message = await incoming.get()
file = open(r"/home/host/Desktop/ToPipe.txt", "a")
file.write("Send to pipe message: " + ascii(json_message) + "\n")
file.close()
try:
message = json.loads(json_message)
message_type = int(message.get('header', {}).get('messageID', -1))
except (ValueError, TypeError, AttributeError):
# failed to decode the message, or the message was not
# a dictionary, or the messageID was convertable to an integer
message_type = None
file = open(r"/home/host/Desktop/Error.txt", "a")
file.write(" Error \n")
file.close()
# 1 is DENM message, 2 is CAM message
file.write("Send to pipe type: " + type)
if message_type in {1, 2}:
file.write("Send to pipe: " + json_message)
pipe_writer.write(json_message.encode('utf8') + b'\n')
await pipe_writer.drain()
async def connect_pipe(incoming, outgoing, loop=None):
reader, writer = await stdio()
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
pipe_consumer(reader, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
pipe_producer(writer, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# force a result check; if there was an exception it'll be re-raised
for task in done:
task.result()
def main():
loop = asyncio.get_event_loop()
pipe_to_socket = asyncio.Queue(loop=loop)
socket_to_pipe = asyncio.Queue(loop=loop)
socket_coro = connect_socket(pipe_to_socket, socket_to_pipe, loop=loop)
pipe_coro = connect_pipe(socket_to_pipe, pipe_to_socket, loop=loop)
loop.run_until_complete(asyncio.gather(socket_coro, pipe_coro))
main()
To send to the Pipe I use this code:
import pexpect
test = r"/home/host/PycharmProjects/Tim/Tim.py"
process = pexpect.spawn("python3 " + test)
message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400},"cam":{"generationDeltaTime":1,"camParameters":{"basicContainer":{"stationType":5}}';
process.write(message + "\n")
process.wait()
but how can I create a script to read instead of write?
I tried with
test = r"/home/host/PycharmProjects/Tim/Tim.py"
p = pexpect.spawn("python3 " + test, timeout=None)
while True:
m = p.read()
file = open(r"/home/host/Desktop/OpeListening.txt", "a")
file.write(str(m))
file.close()
p.wait()
But the read goes immediately to the next step without any message. What is my error?
At the moment I use with success Popen
process = subprocess.Popen(['python3', test], shell=False, stdout=subprocess.PIPE,
stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
while True:
result = process.stdout.readline()
result = result.decode("utf-8")
print(result)
proc.wait()

python asynchronous http request

I have some equipment with http interface that frequently generate infinite http page with values I want to parse and save to the database.
I started with requests:
import asyncio
import asyncpg
import requests
class node_http_mtr():
def __init__(self, ip, nsrc, ndst):
self.ip = ip
self.nsrc = nsrc
self.ndst = ndst
try:
self.data = requests.get('http://' + self.ip + '/nph-cgi_mtr?duration=-1&interval=0', stream=True, timeout=10)
except:
return
def __iter__(self):
return self
def __next__(self):
mtr = list()
try:
for chunk in self.data.iter_content(32 * (self.nsrc + self.ndst), '\n'):
# DEBUG log chunk
for line in chunk.split('\n'):
# DEBUG log line
if line.startswith('MTR'):
try:
_, io, num, val = line.split(' ')
l, r = val.split(':')[1], val.split(':')[2]
mtr.append((self.ip, io+num, l, r))
except:
# ERROR log line
pass
if len(mtr) == self.nsrc + self.ndst:
break
if len(mtr) == self.nsrc + self.ndst:
yield mtr
else:
continue
except:
# ERROR connection lost
return
async def save_to_db(data_to_save):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''INSERT INTO mtr (ip, io, l, r) VALUES %s''' % ','.join(str(row) for row in data_to_save))
finally:
await pool.release(conn)
async def remove_from_db(ip):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''DELETE FROM httpmtr WHERE ip = $1''', ip)
finally:
await pool.release(conn)
async def http_mtr_worker():
global workers_list
global loop
while True:
await asyncio.sleep(0)
for ip in list(workers_list):
data_to_save = next(workers_list[ip])
if data_to_save:
asyncio.ensure_future(save_to_db(next(data_to_save)))
await asyncio.sleep(0)
async def check_for_workers():
global workers_list
global pool
while True:
await asyncio.sleep(0)
try:
async with pool.acquire() as conn:
workers = await conn.fetch('''SELECT ip FROM httpmtr''')
finally:
await pool.release(conn)
for worker in workers:
if worker['ip'] not in list(workers_list):
workers_list[worker['ip']] = node_http_mtr(worker['ip'], 8, 8)
await asyncio.sleep(0)
print('Add worker', worker['ip'])
await asyncio.sleep(0)
ips_to_delete = set(workers_list.keys()) - set([i[0] for i in workers])
if len(ips_to_delete) != 0:
for ip in ips_to_delete:
print('Delete worker ', ip)
workers_list.pop(ip)
await asyncio.sleep(0)
async def make_db_connection():
pool = await asyncpg.create_pool(user='postgres', password='data', database='test', host='localhost', max_queries=50000, command_timeout=60)
return pool
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
pool = loop.run_until_complete(make_db_connection())
workers_list = {}
try:
asyncio.ensure_future(check_for_workers())
asyncio.ensure_future(http_mtr_worker())
loop.run_forever()
except Exception as e:
print(e)
pass
finally:
print("Closing Loop")
loop.close()
I have triggered procedure in DB which deletes all data older then 1 second, the final result with one worker in PostgreSQL is:
test=# select count(*) from mtr;
count
-------
384
(1 ёЄЁюър)
It means 384 results per second. There are 16 different kinds of data in each device, so I have 384/16 = 24 values per second. It's appropriate result. But the more workers I add the worse performance I have: with 10 workers I have 2-3 times less values. The goal is to have hundreds of workers and 24-25 values/sec.
Next I tried to do is to use aiohttp. I expected to get much better result. Hastily I wrote test code:
import asyncio
from aiohttp import ClientSession
import asyncpg
async def parse(line):
if line.startswith('MTR'):
_, io, num, val = line.split(' ')
l, r = val.split(':')[1], val.split(':')[2]
return ('ip.will.be.here', io + num, l, r)
async def run():
url = "http://10.150.20.130/nph-cgi_mtr?duration=-1&interval=0"
async with ClientSession() as session:
while True:
async with session.get(url) as response:
buffer = b''
start = False
async for line in response.content.iter_any():
if line.startswith(b'\n'):
start = True
buffer += line
elif start and line.endswith(b'\n'):
buffer += line
mtr = [await parse(line) for line in buffer.decode().split('\n')[1:-1]]
await save_to_db(mtr)
break
elif start:
buffer += line
async def make_db_connection():
pool = await asyncpg.create_pool(user='postgres', password='data', database='test', host='localhost', max_queries=50000, command_timeout=60)
return pool
async def save_to_db(data_to_save):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''INSERT INTO mtr (ip, io, l, r) VALUES %s''' % ','.join(str(row) for row in data_to_save))
finally:
await pool.release(conn)
loop = asyncio.get_event_loop()
pool = loop.run_until_complete(make_db_connection())
future = asyncio.ensure_future(run())
loop.run_until_complete(future)
And I've got this:
test=# select count(*) from mtr;
count
-------
80
(1 ёЄЁюър)
i.e. I've gotten 5 time worse performance with asynchronous requests. I'm stuck. I don't understand how to solve it.
UPDATE. Profiling didn't make the situation more clear at all.
requests:
aiohttp:
With requests the situation is more or less clear. But what the problem with async aiohttp I don't understand at all.
UPDATE 16/05/18. Finally I came back to multithreading and I got what I need - constant performance with big amount of workers. Asynchronous calls is not a panacea indeed.

Making 1 milion requests with aiohttp/asyncio - literally

I followed up this tutorial: https://pawelmhm.github.io/asyncio/python/aiohttp/2016/04/22/asyncio-aiohttp.html and everything works fine when I am doing like 50 000 requests. But I need to do 1 milion API calls and then I have problem with this code:
url = "http://some_url.com/?id={}"
tasks = set()
sem = asyncio.Semaphore(MAX_SIM_CONNS)
for i in range(1, LAST_ID + 1):
task = asyncio.ensure_future(bound_fetch(sem, url.format(i)))
tasks.add(task)
responses = asyncio.gather(*tasks)
return await responses
Because Python needs to create 1 milion tasks, it basically just lags and then prints Killed message in terminal. Is there any way to use a generator insted of pre-made set (or list) of urls? Thanks.
Schedule all 1 million tasks at once
This is the code you are talking about. It takes up to 3 GB RAM so it is easily possible that it will be terminated by the operating system if you have low free memory.
import asyncio
from aiohttp import ClientSession
MAX_SIM_CONNS = 50
LAST_ID = 10**6
async def fetch(url, session):
async with session.get(url) as response:
return await response.read()
async def bound_fetch(sem, url, session):
async with sem:
await fetch(url, session)
async def fetch_all():
url = "http://localhost:8080/?id={}"
tasks = set()
async with ClientSession() as session:
sem = asyncio.Semaphore(MAX_SIM_CONNS)
for i in range(1, LAST_ID + 1):
task = asyncio.create_task(bound_fetch(sem, url.format(i), session))
tasks.add(task)
return await asyncio.gather(*tasks)
if __name__ == '__main__':
asyncio.run(fetch_all())
Use queue to streamline the work
This is my suggestion how to use asyncio.Queue to pass URLs to worker tasks. The queue is filled as-needed, there is no pre-made list of URLs.
It takes only 30 MB RAM :)
import asyncio
from aiohttp import ClientSession
MAX_SIM_CONNS = 50
LAST_ID = 10**6
async def fetch(url, session):
async with session.get(url) as response:
return await response.read()
async def fetch_worker(url_queue):
async with ClientSession() as session:
while True:
url = await url_queue.get()
try:
if url is None:
# all work is done
return
response = await fetch(url, session)
# ...do something with the response
finally:
url_queue.task_done()
# calling task_done() is necessary for the url_queue.join() to work correctly
async def fetch_all():
url = "http://localhost:8080/?id={}"
url_queue = asyncio.Queue(maxsize=100)
worker_tasks = []
for i in range(MAX_SIM_CONNS):
wt = asyncio.create_task(fetch_worker(url_queue))
worker_tasks.append(wt)
for i in range(1, LAST_ID + 1):
await url_queue.put(url.format(i))
for i in range(MAX_SIM_CONNS):
# tell the workers that the work is done
await url_queue.put(None)
await url_queue.join()
await asyncio.gather(*worker_tasks)
if __name__ == '__main__':
asyncio.run(fetch_all())
asyncio is memory bound (like any other program). You can not spawn more task that memory can hold. My guess is that you hit a memory limit. Check dmesg for more information.
1 millions RPS doesn't mean there is 1M tasks. A task can do several request in the same second.

Categories

Resources