I am making a socket client with python asyncio.
When connected to the server, it continues to receive the data size initially, and receives the data as much as the size.
However, sometimes the client receives the original data rather than the size of the original data, which is the first data to be received.
So, I am trying to write using asyncio.Queue,
I can't figure out how to implement it and put data into the queue.
Any help would be greatly appreciated.
Send part
while True:
# type(data) - np.ndarray
str_data = base64.b64encode(data)
length = str(len(str_data))
writer.write(length.encode('utf-8').ljust(64))
await writer.drain()
writer.write(str_data)
await writer.drain()
Receive part
async def socket_handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
while True:
try:
data: bytes = await reader.read(64)
try:
mes = int(data.decode())
except ValueError:
print('invalid data skip')
await reader.read(-1)
continue
data: bytes = await reader.read(mes)
self.data = deepcopy(data)
Use asyncio.readexactly to ensure how many bytes are read.
I suggest closing the socket when error occur because it is unlikely to recover from non-aligned data stream.
async def socket_handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
while True:
data: bytes = await reader.readexactly(64)
try:
mes = int(data.decode())
except ValueError:
writer.close()
return
# print('invalid data skip')
# await reader.read(-1)
# continue
try:
data: bytes = await reader.readexactly(mes)
except asyncio.IncompleteReadError:
writer.close()
return
self.data = deepcopy(data)
Moreover,
struct module can pack length in bytes.
numpy.array can be sent directly using memoryview
async def sender():
while True:
# type(data) - np.ndarray
mv = memoryview(data)
length = len(mv)
writer.write(struct.pack('!Q', length))
await writer.drain()
writer.write(mv)
await writer.drain()
async def socket_handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
while True:
try:
length, = struct.unpack('!Q', await reader.readexactly(8))
data = await reader.readexactly(length)
except asyncio.IncompleteReadError:
writer.close()
return
# change dtype to the actual one
# reshape is probably needed
array = np.from_buffer(data, dtype=np.float64)
Related
So I have been trying to experiment with Streams in Python and wrote the following code.
ServiceSubscription.py
class ServiceSubscription():
def __init__(self) -> None:
self.subscriber_connections = []
self.service_connections = []
self.server_listener = None
# Dictionary of service readers where key is the name of the service and the value is the reader for the service
self.service_readers = {}
"""
Create the listening server on port 7777
"""
async def initiate_server(self):
server = await asyncio.start_server(self.handle_incoming, '127.0.0.1', 7777)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
"""
Handle the incoming connection based on whether the connection is from a service or suscriber
The first message sent should include either 'service:SERVICE_NAME' or 'suscriber: [SERVICE1, SERVICE2, ...]'
"""
async def handle_incoming(self, reader: StreamReader, writer: StreamWriter):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
if ("Service:" in f"{message!r}"):
message = message[0:7]
self.service_connections.append(Connections(reader, writer, message))
service_reader = ServiceReader(reader=reader, writer=writer)
self.service_readers[message] = (service_reader)
await service_reader.broadcast()
elif ("Suscriber:" in f"{message!r}"):
message = message[0:9]
self.subscriber_connections.append(Connections(reader, writer, message))
self.service_readers[message].add_suscribers(writer)
else:
pass
class ServiceReader():
def __init__(self, reader: StreamReader, writer: StreamWriter):
self.reader = reader
self.writer = writer
self.suscribers: Writer = []
self.loop = asyncio.get_event_loop()
def stop(self):
self._stop.set()
"""
Add new subscriber's StreamWriter here
"""
def add_suscribers(self, writer: StreamWriter):
# Not sure if this will work
self.suscribers.append(writer)
"""
Read data and broadcast it to subscribed clients
"""
async def broadcast(self):
while not self.reader.at_eof():
data = await self.reader.readline()
if b'\n' in data:
print(True)
data = data.decode()
print(data)
WriterTest.py
import asyncio
from os import linesep
async def tcp_echo_client(message):
reader, writer = await asyncio.open_connection(
'127.0.0.1', 7777)
print(f'Send: {message!r}\n')
writer.write(message.encode())
await writer.drain()
while not writer.is_closing():
data = input("Type a message\n")
data = (data + "\n").encode()
writer.write(data)
await writer.drain()
writer.close()
asyncio.run(tcp_echo_client('Service: TEST'))
I ran both python ServiceSubscription.py and python WriterTest.py at the same time to simulate a client and server.
Upon running ServiceSubscription.py, it will print "Serving on ('127.0.0.1', 7777)". When WriterTest.py is executed, ServiceSubscription.py will print "Received 'Service: TEST' from ('127.0.0.1', 39923)". However, typing anything beyond that will not be printed out until WriterTest.py's connection is closed. When the connection is closed, ServiceSubcription.py prints out the remaining bytes in the buffer and also confirms that there are newlines in the data read but it is not picked up by readline as it doesn't return after encountering a newline.
The problem is here, in your WriterTest:
data = input("Type a message\n")
The input function is blocking, so in an asyncio program it blocks the event loop. All tasks are stopped until you enter something. With asyncio streams, the actual transmission of the bytes occurs in another task. Your call to the input function blocks that task, which prevents the transmission. Your server doesn't respond because nothing is actually sent.
Since this is just a test program, you have a couple of quick solutions. You could put this line:
await asyncio.sleep(1.0)
after the line await writer.drain(). This will keep the other tasks running for one second, plenty of time for the data to get transmitted.
You could, of course, replace the call to input with some hard-coded string.
Better solutions can be found at the following link:
Listen to keypress with asyncio
As a general rule, input and asyncio do not play well together.
Data is encoded alongside a break line and a couple of variables are unpacked as reader and writer objects, then meanwhile is_closing() is waiting for a closing process, that should be close(), you could try to set a conditional when message have no characters.
import asyncio
import sys
from os import linesep
async def tcp_echo_client(message):
reader, writer = await asyncio.open_connection(
'127.0.0.1', 7777)
print(f'Send: {message!r}\n')
writer.write(message.encode())
await writer.drain()
while not writer.is_closing():
await asyncio.get_event_loop().run_in_executor(None, lambda s="Type a message\n": sys.stdout.write(s+' '))
data = await asyncio.get_event_loop().run_in_executor(None, sys.stdin.readline)
if len(data)==0: # empty data
break
data = (data + "\n").encode()
writer.write(data)
await writer.drain()
print('Close the connection')
writer.close()
await writer.wait_closed()
here I change while conditional to check instead if data is empty and use a different corountine from asyncio
async def broadcast(self):
while True:
data = asyncio.wait_for(self.reader.readline(), timeout=10.0)
if data is None or len(data.decode()) == 0:
print("Expected message, received None")
break
data = data.decode().rstrip().upper()
print(data)
Here is server.py
import asyncio
import aiofile
async def handler(reader, writer):
data = await reader.read(n=-1)
addr = writer.get_extra_info('peername')
print('Addr', addr)
# photo_120721_215652.jpg
name = data[:23].decode()
async with aiofile.async_open(name, 'wb') as afp:
await afp.write(data[23:])
print("handler end")
async def main():
server = await asyncio.start_server(handler, '0.0.0.0', 8888)
addr = server.sockets[0].getsockname()
print(f'Serving on {addr}')
async with server:
await server.serve_forever()
asyncio.run(main())
Here is client.py
async def send_photos_to_server(filelist):
retry = 0
for file in filelist:
while True:
try:
reader, writer = await asyncio.open_connection('192.168.1.100', 8888)
except OSError:
while retry != 5:
await asyncio.sleep(3)
retry += 1
else:
break # exit the loop
if retry == 5:
print("No connection after {} retries")
break
name = file.split('/')[-1].encode()
writer.write(name)
await writer.drain()
async with aiofile.async_open(file, 'rb') as afp:
afp_reader = await afp.read(length=-1)
writer.write(afp_reader)
await writer.drain()
writer.close()
await writer.wait_closed()
filelist = ['./photos/photo_120721_215652.jpg',
'./photos/photo_120721_215654.jpg',
'./photos/photo_120721_215656.jpg']
asyncio.run(send_photos_to_server(filelist))
So this code is working. But I'm not sure that it's a proper way to do that. Every time to send a new file, we create a new connection. Sure, I could pack all three files into an archive, but I'm not sure it is handy. Also, it's inconvenient to put filename to sending a message because then on the server-side, I have to, somehow, find it. So can someone explain how to modify this code for better practice? I've just started to learn async io and not good at it.
Servers use protocols - it meas rules which describe what one side has to send and how to read it on another size.
For example HTTP first sends headers, next empty line, and next data (body). One of the header has information how long is body.
Other side first has to read data byte after byte until it find empty line ('n\n') and then it has headers and it can find how long is body, and read correct number of bytes.
The same way you can create own protocol.
I assumed you want to send all files in one connection.
You can send number of files + \n, and next uses for-loop to send filename + \n, file size + \n, file data.
And other side should do similar - first read all to \n to get number of files, and use for-loop to again read all to \n to get filename, read all to \n to get size, use size to read file data.
And it shows that it may be simpler to compress data with zip and send it - and other side will have all information - filename, size, data - inside this zip file.
Of course you can reduce it and still send every file in separated connection.
Example code
server.py
import asyncio
import aiofile
async def handler(reader, writer):
# --- before for-loop ---
data = await reader.read(n=-1)
addr = writer.get_extra_info('peername')
print('Addr', addr)
# get number of images + `\n`
start = 0
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
number = int(item.decode())
print('number:', number)
print('--------')
# --- for-loop ---
for _ in range(number):
# get filename + '\n'
start = end+1
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
name = item.decode()
print('name:', name)
# get size + '\n'
start = end+1
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
size = int(item.decode())
print('size:', size)
# get data
start = end+1
end = start+size-1
item = data[start:end]
print('[DEBUG] start,end:', start, end, item[:10])
async with aiofile.async_open(name, 'wb') as afp:
await afp.write(item)
print('--------')
# --- after for-loop ---
print("handler end")
async def main():
server = await asyncio.start_server(handler, '0.0.0.0', 8888)
addr = server.sockets[0].getsockname()
print(f'Serving on {addr}')
async with server:
await server.serve_forever()
asyncio.run(main())
client.py
import asyncio
import aiofile
async def send_photos_to_server(filelist):
# --- before for-loop ---
retry = 0
while True:
try:
reader, writer = await asyncio.open_connection('0.0.0.0', 8888)
except OSError:
while retry != 5:
await asyncio.sleep(3)
retry += 1
else:
break # exit the loop
if retry == 5:
print("No connection after {} retries")
return
# send number of images + '\n'
text = str(len(filelist)) + '\n'
writer.write(text.encode())
await writer.drain()
# --- for-loop ---
for filename in filelist:
# send filename + '\n'
name = filename.split('/')[-1]
text = name + '\n'
writer.write(text.encode())
await writer.drain()
async with aiofile.async_open(filename, 'rb') as afp:
afp_reader = await afp.read(length=-1)
# send size + '\n'
size = str(len(afp_reader))
text = size + '\n'
writer.write(text.encode())
await writer.drain()
# send data
writer.write(afp_reader)
await writer.drain()
# --- after for-loop ---
writer.close()
await writer.wait_closed()
filelist = [
'/home/furas/test/image1.png',
'/home/furas/test/image2.png',
'/home/furas/test/image3.png',
]
asyncio.run(send_photos_to_server(filelist))
So I made a webservice (based on starlette), with an endpoint that accepts a binary body. I want to feed this binary body to fastavro.
Starlette doc says, I can access the raw data as a async stream with request.stream().
async for chunk in request.stream():
# do something with chunk...
Now, I want to feed the stream to fastavro. The thing is, fastavro reader needs a file-like input stream:
with open('some-file.avro', 'rb') as fo:
avro_reader = reader(fo)
My question is, is there a clean way to transform this async stream into a file-like one?
I guess I could implement an object that has a read() method that awaits and returns the data returns by request.stream. But if the caller passes a size, I need to have a memory buffer, doesn't I? Could something based on BufferedRWPair?
Or is the only way to store the whole stream first to the disk or memory, before feeding it to fastavro?
Thanks in advance !
I ended up using a SpooledTemporaryFile:
data_file = SpooledTemporaryFile(mode='w+b',
max_size=MAX_RECEIVED_DATA_MEMORY_SIZE)
async for chunk in request.stream():
data_file.write(chunk)
data_file.seek(0)
avro_reader = reader(data_file)
It's not the ideal solution I envisonned (somehow transmit the data directly between the input and output), but still good enough...
I encountered the same problem and wrote compact class StreamingBody. It does exactly what I need.
from typing import AsyncIterator
import asyncio
class AsyncGen:
def __init__(self, block_count, block_size) -> None:
self.bc = block_count
self.bs = block_size
def __aiter__(self):
return self
async def __anext__(self):
if self.bc == 0:
raise StopAsyncIteration()
self.bc -= 1
return b"A" * self.bs
class StreamingBody:
_chunks: AsyncIterator[bytes]
_backlog: bytes
def __init__(self, chunks: AsyncIterator[bytes]):
self._chunks = chunks
self._backlog = b""
async def _read_until_end(self):
content = self._backlog
self._backlog = b""
while True:
try:
content += await self._chunks.__anext__()
except StopAsyncIteration:
break
return content
async def _read_chunk(self, size: int):
content = self._backlog
bytes_read = len(self._backlog)
while bytes_read < size:
try:
chunk = await self._chunks.__anext__()
except StopAsyncIteration:
break
content += chunk
bytes_read += len(chunk)
self._backlog = content[size:]
content = content[:size]
return content
async def read(self, size: int = -1):
if size > 0:
return await self._read_chunk(size)
elif size == -1:
return await self._read_until_end()
else:
return b""
async def main():
async_gen = AsyncGen(11, 3)
body = StreamingBody(async_gen)
res = await body.read(11)
print(f"[{len(res)}]: {res}")
res = await body.read()
print(f"[{len(res)}]: {res}")
res = await body.read()
print(f"[{len(res)}]: {res}")
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
I'm making a Get request to Dicom server, which returns a Multipart/Related Type=image/jpeg. I tried using aiohttp libararies Multipart feature to parse but it didnt work. The file saved is corrupted.
Here is my code.
import asyncio
import aiohttp
'''
async def fetch(url,session,header):
async with session.get(url,headers=header) as response:
await response
async def multiHit(urls,header):
tasks = []
async with aiohttp.ClientSession() as session:
for i,url in enumerate(urls):
tasks.append(fetch(url,session,header))
result = await asyncio.gather(*tasks)
return result
loop = asyncio.get_event_loop()
res = loop.run_until_complete(multiHit(["FRAME URL"],{"Accept":"multipart/related;type=image/jpeg"}))
print(res)
'''
async def xyz(loop):
async with aiohttp.ClientSession(loop=loop).get(url="FRAME URL",headers={"Accept":"multipart/related;type=image/jpeg"}) as response:
reader = aiohttp.MultipartReader.from_response(response)
while True:
part = await reader.next()
if part is None:
break
filedata = await part.read(decode=False)
import base64
with open('m.jpeg','wb') as outFile:
outFile.write(part.decode(filedata))
return 1
loop = asyncio.get_event_loop()
res = loop.run_until_complete(xyz(loop))
How to I parse the Multipart/related response and save the images?
I figured out that I was parsing the multi-part response properly, but I had to use another library (library name : imagecodecs , method name : jpegsof3_decode) to decompresses individual part into the images. This is give a numpy array of the image. Here is the updated code
reader = aiohttp.MultipartReader.from_response(response)
while True:
part = await reader.next()
if part is None:
break
data = await part.read()
imageDecompressed = jpegsof3_decode(data)
Further the numpy array can be converted into a image using cv2 libray
success, encoded_image = cv2.imencode('.png',imageDecompressed)
Byte version of the converted image can be obtained this way
imageInBytes = encoded_image.tobytes()
I have some equipment with http interface that frequently generate infinite http page with values I want to parse and save to the database.
I started with requests:
import asyncio
import asyncpg
import requests
class node_http_mtr():
def __init__(self, ip, nsrc, ndst):
self.ip = ip
self.nsrc = nsrc
self.ndst = ndst
try:
self.data = requests.get('http://' + self.ip + '/nph-cgi_mtr?duration=-1&interval=0', stream=True, timeout=10)
except:
return
def __iter__(self):
return self
def __next__(self):
mtr = list()
try:
for chunk in self.data.iter_content(32 * (self.nsrc + self.ndst), '\n'):
# DEBUG log chunk
for line in chunk.split('\n'):
# DEBUG log line
if line.startswith('MTR'):
try:
_, io, num, val = line.split(' ')
l, r = val.split(':')[1], val.split(':')[2]
mtr.append((self.ip, io+num, l, r))
except:
# ERROR log line
pass
if len(mtr) == self.nsrc + self.ndst:
break
if len(mtr) == self.nsrc + self.ndst:
yield mtr
else:
continue
except:
# ERROR connection lost
return
async def save_to_db(data_to_save):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''INSERT INTO mtr (ip, io, l, r) VALUES %s''' % ','.join(str(row) for row in data_to_save))
finally:
await pool.release(conn)
async def remove_from_db(ip):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''DELETE FROM httpmtr WHERE ip = $1''', ip)
finally:
await pool.release(conn)
async def http_mtr_worker():
global workers_list
global loop
while True:
await asyncio.sleep(0)
for ip in list(workers_list):
data_to_save = next(workers_list[ip])
if data_to_save:
asyncio.ensure_future(save_to_db(next(data_to_save)))
await asyncio.sleep(0)
async def check_for_workers():
global workers_list
global pool
while True:
await asyncio.sleep(0)
try:
async with pool.acquire() as conn:
workers = await conn.fetch('''SELECT ip FROM httpmtr''')
finally:
await pool.release(conn)
for worker in workers:
if worker['ip'] not in list(workers_list):
workers_list[worker['ip']] = node_http_mtr(worker['ip'], 8, 8)
await asyncio.sleep(0)
print('Add worker', worker['ip'])
await asyncio.sleep(0)
ips_to_delete = set(workers_list.keys()) - set([i[0] for i in workers])
if len(ips_to_delete) != 0:
for ip in ips_to_delete:
print('Delete worker ', ip)
workers_list.pop(ip)
await asyncio.sleep(0)
async def make_db_connection():
pool = await asyncpg.create_pool(user='postgres', password='data', database='test', host='localhost', max_queries=50000, command_timeout=60)
return pool
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
pool = loop.run_until_complete(make_db_connection())
workers_list = {}
try:
asyncio.ensure_future(check_for_workers())
asyncio.ensure_future(http_mtr_worker())
loop.run_forever()
except Exception as e:
print(e)
pass
finally:
print("Closing Loop")
loop.close()
I have triggered procedure in DB which deletes all data older then 1 second, the final result with one worker in PostgreSQL is:
test=# select count(*) from mtr;
count
-------
384
(1 ёЄЁюър)
It means 384 results per second. There are 16 different kinds of data in each device, so I have 384/16 = 24 values per second. It's appropriate result. But the more workers I add the worse performance I have: with 10 workers I have 2-3 times less values. The goal is to have hundreds of workers and 24-25 values/sec.
Next I tried to do is to use aiohttp. I expected to get much better result. Hastily I wrote test code:
import asyncio
from aiohttp import ClientSession
import asyncpg
async def parse(line):
if line.startswith('MTR'):
_, io, num, val = line.split(' ')
l, r = val.split(':')[1], val.split(':')[2]
return ('ip.will.be.here', io + num, l, r)
async def run():
url = "http://10.150.20.130/nph-cgi_mtr?duration=-1&interval=0"
async with ClientSession() as session:
while True:
async with session.get(url) as response:
buffer = b''
start = False
async for line in response.content.iter_any():
if line.startswith(b'\n'):
start = True
buffer += line
elif start and line.endswith(b'\n'):
buffer += line
mtr = [await parse(line) for line in buffer.decode().split('\n')[1:-1]]
await save_to_db(mtr)
break
elif start:
buffer += line
async def make_db_connection():
pool = await asyncpg.create_pool(user='postgres', password='data', database='test', host='localhost', max_queries=50000, command_timeout=60)
return pool
async def save_to_db(data_to_save):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''INSERT INTO mtr (ip, io, l, r) VALUES %s''' % ','.join(str(row) for row in data_to_save))
finally:
await pool.release(conn)
loop = asyncio.get_event_loop()
pool = loop.run_until_complete(make_db_connection())
future = asyncio.ensure_future(run())
loop.run_until_complete(future)
And I've got this:
test=# select count(*) from mtr;
count
-------
80
(1 ёЄЁюър)
i.e. I've gotten 5 time worse performance with asynchronous requests. I'm stuck. I don't understand how to solve it.
UPDATE. Profiling didn't make the situation more clear at all.
requests:
aiohttp:
With requests the situation is more or less clear. But what the problem with async aiohttp I don't understand at all.
UPDATE 16/05/18. Finally I came back to multithreading and I got what I need - constant performance with big amount of workers. Asynchronous calls is not a panacea indeed.