Here is server.py
import asyncio
import aiofile
async def handler(reader, writer):
data = await reader.read(n=-1)
addr = writer.get_extra_info('peername')
print('Addr', addr)
# photo_120721_215652.jpg
name = data[:23].decode()
async with aiofile.async_open(name, 'wb') as afp:
await afp.write(data[23:])
print("handler end")
async def main():
server = await asyncio.start_server(handler, '0.0.0.0', 8888)
addr = server.sockets[0].getsockname()
print(f'Serving on {addr}')
async with server:
await server.serve_forever()
asyncio.run(main())
Here is client.py
async def send_photos_to_server(filelist):
retry = 0
for file in filelist:
while True:
try:
reader, writer = await asyncio.open_connection('192.168.1.100', 8888)
except OSError:
while retry != 5:
await asyncio.sleep(3)
retry += 1
else:
break # exit the loop
if retry == 5:
print("No connection after {} retries")
break
name = file.split('/')[-1].encode()
writer.write(name)
await writer.drain()
async with aiofile.async_open(file, 'rb') as afp:
afp_reader = await afp.read(length=-1)
writer.write(afp_reader)
await writer.drain()
writer.close()
await writer.wait_closed()
filelist = ['./photos/photo_120721_215652.jpg',
'./photos/photo_120721_215654.jpg',
'./photos/photo_120721_215656.jpg']
asyncio.run(send_photos_to_server(filelist))
So this code is working. But I'm not sure that it's a proper way to do that. Every time to send a new file, we create a new connection. Sure, I could pack all three files into an archive, but I'm not sure it is handy. Also, it's inconvenient to put filename to sending a message because then on the server-side, I have to, somehow, find it. So can someone explain how to modify this code for better practice? I've just started to learn async io and not good at it.
Servers use protocols - it meas rules which describe what one side has to send and how to read it on another size.
For example HTTP first sends headers, next empty line, and next data (body). One of the header has information how long is body.
Other side first has to read data byte after byte until it find empty line ('n\n') and then it has headers and it can find how long is body, and read correct number of bytes.
The same way you can create own protocol.
I assumed you want to send all files in one connection.
You can send number of files + \n, and next uses for-loop to send filename + \n, file size + \n, file data.
And other side should do similar - first read all to \n to get number of files, and use for-loop to again read all to \n to get filename, read all to \n to get size, use size to read file data.
And it shows that it may be simpler to compress data with zip and send it - and other side will have all information - filename, size, data - inside this zip file.
Of course you can reduce it and still send every file in separated connection.
Example code
server.py
import asyncio
import aiofile
async def handler(reader, writer):
# --- before for-loop ---
data = await reader.read(n=-1)
addr = writer.get_extra_info('peername')
print('Addr', addr)
# get number of images + `\n`
start = 0
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
number = int(item.decode())
print('number:', number)
print('--------')
# --- for-loop ---
for _ in range(number):
# get filename + '\n'
start = end+1
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
name = item.decode()
print('name:', name)
# get size + '\n'
start = end+1
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
size = int(item.decode())
print('size:', size)
# get data
start = end+1
end = start+size-1
item = data[start:end]
print('[DEBUG] start,end:', start, end, item[:10])
async with aiofile.async_open(name, 'wb') as afp:
await afp.write(item)
print('--------')
# --- after for-loop ---
print("handler end")
async def main():
server = await asyncio.start_server(handler, '0.0.0.0', 8888)
addr = server.sockets[0].getsockname()
print(f'Serving on {addr}')
async with server:
await server.serve_forever()
asyncio.run(main())
client.py
import asyncio
import aiofile
async def send_photos_to_server(filelist):
# --- before for-loop ---
retry = 0
while True:
try:
reader, writer = await asyncio.open_connection('0.0.0.0', 8888)
except OSError:
while retry != 5:
await asyncio.sleep(3)
retry += 1
else:
break # exit the loop
if retry == 5:
print("No connection after {} retries")
return
# send number of images + '\n'
text = str(len(filelist)) + '\n'
writer.write(text.encode())
await writer.drain()
# --- for-loop ---
for filename in filelist:
# send filename + '\n'
name = filename.split('/')[-1]
text = name + '\n'
writer.write(text.encode())
await writer.drain()
async with aiofile.async_open(filename, 'rb') as afp:
afp_reader = await afp.read(length=-1)
# send size + '\n'
size = str(len(afp_reader))
text = size + '\n'
writer.write(text.encode())
await writer.drain()
# send data
writer.write(afp_reader)
await writer.drain()
# --- after for-loop ---
writer.close()
await writer.wait_closed()
filelist = [
'/home/furas/test/image1.png',
'/home/furas/test/image2.png',
'/home/furas/test/image3.png',
]
asyncio.run(send_photos_to_server(filelist))
Related
So I have been trying to experiment with Streams in Python and wrote the following code.
ServiceSubscription.py
class ServiceSubscription():
def __init__(self) -> None:
self.subscriber_connections = []
self.service_connections = []
self.server_listener = None
# Dictionary of service readers where key is the name of the service and the value is the reader for the service
self.service_readers = {}
"""
Create the listening server on port 7777
"""
async def initiate_server(self):
server = await asyncio.start_server(self.handle_incoming, '127.0.0.1', 7777)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
"""
Handle the incoming connection based on whether the connection is from a service or suscriber
The first message sent should include either 'service:SERVICE_NAME' or 'suscriber: [SERVICE1, SERVICE2, ...]'
"""
async def handle_incoming(self, reader: StreamReader, writer: StreamWriter):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
if ("Service:" in f"{message!r}"):
message = message[0:7]
self.service_connections.append(Connections(reader, writer, message))
service_reader = ServiceReader(reader=reader, writer=writer)
self.service_readers[message] = (service_reader)
await service_reader.broadcast()
elif ("Suscriber:" in f"{message!r}"):
message = message[0:9]
self.subscriber_connections.append(Connections(reader, writer, message))
self.service_readers[message].add_suscribers(writer)
else:
pass
class ServiceReader():
def __init__(self, reader: StreamReader, writer: StreamWriter):
self.reader = reader
self.writer = writer
self.suscribers: Writer = []
self.loop = asyncio.get_event_loop()
def stop(self):
self._stop.set()
"""
Add new subscriber's StreamWriter here
"""
def add_suscribers(self, writer: StreamWriter):
# Not sure if this will work
self.suscribers.append(writer)
"""
Read data and broadcast it to subscribed clients
"""
async def broadcast(self):
while not self.reader.at_eof():
data = await self.reader.readline()
if b'\n' in data:
print(True)
data = data.decode()
print(data)
WriterTest.py
import asyncio
from os import linesep
async def tcp_echo_client(message):
reader, writer = await asyncio.open_connection(
'127.0.0.1', 7777)
print(f'Send: {message!r}\n')
writer.write(message.encode())
await writer.drain()
while not writer.is_closing():
data = input("Type a message\n")
data = (data + "\n").encode()
writer.write(data)
await writer.drain()
writer.close()
asyncio.run(tcp_echo_client('Service: TEST'))
I ran both python ServiceSubscription.py and python WriterTest.py at the same time to simulate a client and server.
Upon running ServiceSubscription.py, it will print "Serving on ('127.0.0.1', 7777)". When WriterTest.py is executed, ServiceSubscription.py will print "Received 'Service: TEST' from ('127.0.0.1', 39923)". However, typing anything beyond that will not be printed out until WriterTest.py's connection is closed. When the connection is closed, ServiceSubcription.py prints out the remaining bytes in the buffer and also confirms that there are newlines in the data read but it is not picked up by readline as it doesn't return after encountering a newline.
The problem is here, in your WriterTest:
data = input("Type a message\n")
The input function is blocking, so in an asyncio program it blocks the event loop. All tasks are stopped until you enter something. With asyncio streams, the actual transmission of the bytes occurs in another task. Your call to the input function blocks that task, which prevents the transmission. Your server doesn't respond because nothing is actually sent.
Since this is just a test program, you have a couple of quick solutions. You could put this line:
await asyncio.sleep(1.0)
after the line await writer.drain(). This will keep the other tasks running for one second, plenty of time for the data to get transmitted.
You could, of course, replace the call to input with some hard-coded string.
Better solutions can be found at the following link:
Listen to keypress with asyncio
As a general rule, input and asyncio do not play well together.
Data is encoded alongside a break line and a couple of variables are unpacked as reader and writer objects, then meanwhile is_closing() is waiting for a closing process, that should be close(), you could try to set a conditional when message have no characters.
import asyncio
import sys
from os import linesep
async def tcp_echo_client(message):
reader, writer = await asyncio.open_connection(
'127.0.0.1', 7777)
print(f'Send: {message!r}\n')
writer.write(message.encode())
await writer.drain()
while not writer.is_closing():
await asyncio.get_event_loop().run_in_executor(None, lambda s="Type a message\n": sys.stdout.write(s+' '))
data = await asyncio.get_event_loop().run_in_executor(None, sys.stdin.readline)
if len(data)==0: # empty data
break
data = (data + "\n").encode()
writer.write(data)
await writer.drain()
print('Close the connection')
writer.close()
await writer.wait_closed()
here I change while conditional to check instead if data is empty and use a different corountine from asyncio
async def broadcast(self):
while True:
data = asyncio.wait_for(self.reader.readline(), timeout=10.0)
if data is None or len(data.decode()) == 0:
print("Expected message, received None")
break
data = data.decode().rstrip().upper()
print(data)
I am making a socket client with python asyncio.
When connected to the server, it continues to receive the data size initially, and receives the data as much as the size.
However, sometimes the client receives the original data rather than the size of the original data, which is the first data to be received.
So, I am trying to write using asyncio.Queue,
I can't figure out how to implement it and put data into the queue.
Any help would be greatly appreciated.
Send part
while True:
# type(data) - np.ndarray
str_data = base64.b64encode(data)
length = str(len(str_data))
writer.write(length.encode('utf-8').ljust(64))
await writer.drain()
writer.write(str_data)
await writer.drain()
Receive part
async def socket_handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
while True:
try:
data: bytes = await reader.read(64)
try:
mes = int(data.decode())
except ValueError:
print('invalid data skip')
await reader.read(-1)
continue
data: bytes = await reader.read(mes)
self.data = deepcopy(data)
Use asyncio.readexactly to ensure how many bytes are read.
I suggest closing the socket when error occur because it is unlikely to recover from non-aligned data stream.
async def socket_handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
while True:
data: bytes = await reader.readexactly(64)
try:
mes = int(data.decode())
except ValueError:
writer.close()
return
# print('invalid data skip')
# await reader.read(-1)
# continue
try:
data: bytes = await reader.readexactly(mes)
except asyncio.IncompleteReadError:
writer.close()
return
self.data = deepcopy(data)
Moreover,
struct module can pack length in bytes.
numpy.array can be sent directly using memoryview
async def sender():
while True:
# type(data) - np.ndarray
mv = memoryview(data)
length = len(mv)
writer.write(struct.pack('!Q', length))
await writer.drain()
writer.write(mv)
await writer.drain()
async def socket_handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
while True:
try:
length, = struct.unpack('!Q', await reader.readexactly(8))
data = await reader.readexactly(length)
except asyncio.IncompleteReadError:
writer.close()
return
# change dtype to the actual one
# reshape is probably needed
array = np.from_buffer(data, dtype=np.float64)
Just started working with asyncio Not sure how close I am Each time I run this function I dont want to disturb the previous fileloop send if it is still sending , so ultimately i can have an unlimited amount of files sending at the same time using the same function
async def fileread(self):
loop = asyncio.get_event_loop()
executor = ThreadPoolExecutor(8)
tasks = [loop.run_in_executor(executor,self.fileread())]
#tasks.append(self.fileread())
global flsize
progress = tqdm.tqdm(range(flsize), f"Sending {fp}", unit="B", unit_scale=True,
unit_divisor=1024)
with open(fp, "rb") as f:
bytes_read = f.read(flsize) # NEED TO SEND SMALL BYTS
bytes_read_small = f.read(4096)
while not flsize == 0:
if not bytes_read:
# file transmitting is done
break
global file_sent
file_sent = 1
self.s.sendall(bytes_read) #loop.sendall
progress.update(len(bytes_read))
file_sent = 1
self.s.shutdown()
self.s.close()
await asyncio.wait(tasks)
def connect_to_server(self, *args, **kwargs):
try:
loop = asyncio.new_event_loop()
loop.run_until_complete(self.fileread())
#asyncio.run(self.fileread())
# loop.close() #not sure
except:
print("no op")
I post a new question related the old for a problem with the get from queue. This is the code (thanks to Martijn Pieters)
import asyncio
import sys
import json
import os
import websockets
async def socket_consumer(socket, outgoing):
# take messages from the web socket and push them into the queue
async for message in socket:
await outgoing.put(message)
file = open(r"/home/host/Desktop/FromSocket.txt", "a")
file.write("From socket: " + ascii(message) + "\n")
file.close()
async def socket_producer(socket, incoming):
# take messages from the queue and send them to the socket
while True:
message = await incoming.get()
file = open(r"/home/host/Desktop/ToSocket.txt", "a")
file.write("To socket: " + ascii(message) + "\n")
file.close()
await socket.send(message)
async def connect_socket(incoming, outgoing, loop=None):
header = {"Authorization": r"Basic XXX="}
uri = 'XXXXXX'
async with websockets.connect(uri, extra_headers=header) as web_socket:
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
socket_consumer(web_socket, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
socket_producer(web_socket, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# pipe support
async def stdio(loop=None):
if loop is None:
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
await loop.connect_read_pipe(
lambda: asyncio.StreamReaderProtocol(reader), sys.stdin)
writer_transport, writer_protocol = await loop.connect_write_pipe(
asyncio.streams.FlowControlMixin, os.fdopen(sys.stdout.fileno(), 'wb'))
writer = asyncio.streams.StreamWriter(
writer_transport, writer_protocol, None, loop)
return reader, writer
async def pipe_consumer(pipe_reader, outgoing):
# take messages from the pipe and push them into the queue
while True:
message = await pipe_reader.readline()
if not message:
break
file = open(r"/home/host/Desktop/FromPipe.txt", "a")
file.write("From pipe: " + ascii(message.decode('utf8')) + "\n")
file.close()
await outgoing.put(message.decode('utf8'))
async def pipe_producer(pipe_writer, incoming):
# take messages from the queue and send them to the pipe
while True:
json_message = await incoming.get()
file = open(r"/home/host/Desktop/ToPipe.txt", "a")
file.write("Send to pipe message: " + ascii(json_message) + "\n")
file.close()
try:
message = json.loads(json_message)
message_type = int(message.get('header', {}).get('messageID', -1))
except (ValueError, TypeError, AttributeError):
# failed to decode the message, or the message was not
# a dictionary, or the messageID was convertable to an integer
message_type = None
file = open(r"/home/host/Desktop/Error.txt", "a")
file.write(" Error \n")
file.close()
# 1 is DENM message, 2 is CAM message
file.write("Send to pipe type: " + type)
if message_type in {1, 2}:
file.write("Send to pipe: " + json_message)
pipe_writer.write(json_message.encode('utf8') + b'\n')
await pipe_writer.drain()
async def connect_pipe(incoming, outgoing, loop=None):
reader, writer = await stdio()
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
pipe_consumer(reader, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
pipe_producer(writer, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# force a result check; if there was an exception it'll be re-raised
for task in done:
task.result()
def main():
loop = asyncio.get_event_loop()
pipe_to_socket = asyncio.Queue(loop=loop)
socket_to_pipe = asyncio.Queue(loop=loop)
socket_coro = connect_socket(pipe_to_socket, socket_to_pipe, loop=loop)
pipe_coro = connect_pipe(socket_to_pipe, pipe_to_socket, loop=loop)
loop.run_until_complete(asyncio.gather(socket_coro, pipe_coro))
main()
To send to the Pipe I use this code:
import pexpect
test = r"/home/host/PycharmProjects/Tim/Tim.py"
process = pexpect.spawn("python3 " + test)
message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400},"cam":{"generationDeltaTime":1,"camParameters":{"basicContainer":{"stationType":5}}';
process.write(message + "\n")
process.wait()
but how can I create a script to read instead of write?
I tried with
test = r"/home/host/PycharmProjects/Tim/Tim.py"
p = pexpect.spawn("python3 " + test, timeout=None)
while True:
m = p.read()
file = open(r"/home/host/Desktop/OpeListening.txt", "a")
file.write(str(m))
file.close()
p.wait()
But the read goes immediately to the next step without any message. What is my error?
At the moment I use with success Popen
process = subprocess.Popen(['python3', test], shell=False, stdout=subprocess.PIPE,
stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
while True:
result = process.stdout.readline()
result = result.decode("utf-8")
print(result)
proc.wait()
I have some equipment with http interface that frequently generate infinite http page with values I want to parse and save to the database.
I started with requests:
import asyncio
import asyncpg
import requests
class node_http_mtr():
def __init__(self, ip, nsrc, ndst):
self.ip = ip
self.nsrc = nsrc
self.ndst = ndst
try:
self.data = requests.get('http://' + self.ip + '/nph-cgi_mtr?duration=-1&interval=0', stream=True, timeout=10)
except:
return
def __iter__(self):
return self
def __next__(self):
mtr = list()
try:
for chunk in self.data.iter_content(32 * (self.nsrc + self.ndst), '\n'):
# DEBUG log chunk
for line in chunk.split('\n'):
# DEBUG log line
if line.startswith('MTR'):
try:
_, io, num, val = line.split(' ')
l, r = val.split(':')[1], val.split(':')[2]
mtr.append((self.ip, io+num, l, r))
except:
# ERROR log line
pass
if len(mtr) == self.nsrc + self.ndst:
break
if len(mtr) == self.nsrc + self.ndst:
yield mtr
else:
continue
except:
# ERROR connection lost
return
async def save_to_db(data_to_save):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''INSERT INTO mtr (ip, io, l, r) VALUES %s''' % ','.join(str(row) for row in data_to_save))
finally:
await pool.release(conn)
async def remove_from_db(ip):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''DELETE FROM httpmtr WHERE ip = $1''', ip)
finally:
await pool.release(conn)
async def http_mtr_worker():
global workers_list
global loop
while True:
await asyncio.sleep(0)
for ip in list(workers_list):
data_to_save = next(workers_list[ip])
if data_to_save:
asyncio.ensure_future(save_to_db(next(data_to_save)))
await asyncio.sleep(0)
async def check_for_workers():
global workers_list
global pool
while True:
await asyncio.sleep(0)
try:
async with pool.acquire() as conn:
workers = await conn.fetch('''SELECT ip FROM httpmtr''')
finally:
await pool.release(conn)
for worker in workers:
if worker['ip'] not in list(workers_list):
workers_list[worker['ip']] = node_http_mtr(worker['ip'], 8, 8)
await asyncio.sleep(0)
print('Add worker', worker['ip'])
await asyncio.sleep(0)
ips_to_delete = set(workers_list.keys()) - set([i[0] for i in workers])
if len(ips_to_delete) != 0:
for ip in ips_to_delete:
print('Delete worker ', ip)
workers_list.pop(ip)
await asyncio.sleep(0)
async def make_db_connection():
pool = await asyncpg.create_pool(user='postgres', password='data', database='test', host='localhost', max_queries=50000, command_timeout=60)
return pool
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
pool = loop.run_until_complete(make_db_connection())
workers_list = {}
try:
asyncio.ensure_future(check_for_workers())
asyncio.ensure_future(http_mtr_worker())
loop.run_forever()
except Exception as e:
print(e)
pass
finally:
print("Closing Loop")
loop.close()
I have triggered procedure in DB which deletes all data older then 1 second, the final result with one worker in PostgreSQL is:
test=# select count(*) from mtr;
count
-------
384
(1 ёЄЁюър)
It means 384 results per second. There are 16 different kinds of data in each device, so I have 384/16 = 24 values per second. It's appropriate result. But the more workers I add the worse performance I have: with 10 workers I have 2-3 times less values. The goal is to have hundreds of workers and 24-25 values/sec.
Next I tried to do is to use aiohttp. I expected to get much better result. Hastily I wrote test code:
import asyncio
from aiohttp import ClientSession
import asyncpg
async def parse(line):
if line.startswith('MTR'):
_, io, num, val = line.split(' ')
l, r = val.split(':')[1], val.split(':')[2]
return ('ip.will.be.here', io + num, l, r)
async def run():
url = "http://10.150.20.130/nph-cgi_mtr?duration=-1&interval=0"
async with ClientSession() as session:
while True:
async with session.get(url) as response:
buffer = b''
start = False
async for line in response.content.iter_any():
if line.startswith(b'\n'):
start = True
buffer += line
elif start and line.endswith(b'\n'):
buffer += line
mtr = [await parse(line) for line in buffer.decode().split('\n')[1:-1]]
await save_to_db(mtr)
break
elif start:
buffer += line
async def make_db_connection():
pool = await asyncpg.create_pool(user='postgres', password='data', database='test', host='localhost', max_queries=50000, command_timeout=60)
return pool
async def save_to_db(data_to_save):
global pool
try:
async with pool.acquire() as conn:
await conn.execute('''INSERT INTO mtr (ip, io, l, r) VALUES %s''' % ','.join(str(row) for row in data_to_save))
finally:
await pool.release(conn)
loop = asyncio.get_event_loop()
pool = loop.run_until_complete(make_db_connection())
future = asyncio.ensure_future(run())
loop.run_until_complete(future)
And I've got this:
test=# select count(*) from mtr;
count
-------
80
(1 ёЄЁюър)
i.e. I've gotten 5 time worse performance with asynchronous requests. I'm stuck. I don't understand how to solve it.
UPDATE. Profiling didn't make the situation more clear at all.
requests:
aiohttp:
With requests the situation is more or less clear. But what the problem with async aiohttp I don't understand at all.
UPDATE 16/05/18. Finally I came back to multithreading and I got what I need - constant performance with big amount of workers. Asynchronous calls is not a panacea indeed.