Implement Asyncio to send multiple files through socket simultaneously - python

Just started working with asyncio Not sure how close I am Each time I run this function I dont want to disturb the previous fileloop send if it is still sending , so ultimately i can have an unlimited amount of files sending at the same time using the same function
async def fileread(self):
loop = asyncio.get_event_loop()
executor = ThreadPoolExecutor(8)
tasks = [loop.run_in_executor(executor,self.fileread())]
#tasks.append(self.fileread())
global flsize
progress = tqdm.tqdm(range(flsize), f"Sending {fp}", unit="B", unit_scale=True,
unit_divisor=1024)
with open(fp, "rb") as f:
bytes_read = f.read(flsize) # NEED TO SEND SMALL BYTS
bytes_read_small = f.read(4096)
while not flsize == 0:
if not bytes_read:
# file transmitting is done
break
global file_sent
file_sent = 1
self.s.sendall(bytes_read) #loop.sendall
progress.update(len(bytes_read))
file_sent = 1
self.s.shutdown()
self.s.close()
await asyncio.wait(tasks)
def connect_to_server(self, *args, **kwargs):
try:
loop = asyncio.new_event_loop()
loop.run_until_complete(self.fileread())
#asyncio.run(self.fileread())
# loop.close() #not sure
except:
print("no op")

Related

How to pass serial_asyncio between async tasks

I need to pass reader and writer from serial_asyncio between 2 asynchronous tasks, first task is reading user keyboard input, when user press x, function send information to Arduino, second task reading response from Arduino and for specyfic response do stuff
Code:
import warnings
import serial
import serial.tools.list_ports
import requests
import json
import asyncio
import keyboard
import time
from serial_asyncio import open_serial_connection
with open("config.json") as config:
cfg = json.load(config)
config.close()
headers = {'Accept': 'application/json'}
url = f"""https://{cfg["web"]}/login?username={cfg["user"]}&password={cfg["password"]}"""
get_token = requests.post(url, headers=headers)
token = get_token.json()
my_token = token["token"]
arduino_ports = [
p.device
for p in serial.tools.list_ports.comports()
if p.manufacturer and 'Arduino' in p.manufacturer
]
if not arduino_ports:
raise IOError("No Arduino found")
if len(arduino_ports) > 1:
warnings.warn("Multiple arduinos, select first")
async def arduino_listen(cfg, headers, reader):
print('arduino_listen task created')
while True:
await asyncio.sleep(0.1)
info = await reader.readline()
print('arduino respond:', info)
if "good" in str(info):
print('good from arduino')
if "bad" in str(info):
print('bad from arduino')
async def arduino_writer(cfg, headers, writer):
print('arduino_writer task created')
keypress = False
key = 'x'
while True:
if keypress and not keyboard.is_pressed(key):
print('x pressed on keyboard, send 5')
writer.write(b'5')
await asyncio.sleep(0.3)
keypress = False
elif keyboard.is_pressed(key) and not keypress:
keypress = True
async def serial():
print(f'serial task created, arduino port {arduino_ports[0]} selected')
reader, writer = await open_serial_connection(url=arduino_ports[0], baudrate=115200)
#reader, writer = await open_serial_connection(url=arduino_ports[0], baudrate=115200)
loop = asyncio.get_event_loop()
loop.create_task(serial())
loop.create_task(arduino_listen(cfg, headers, reader))
loop.create_task(arduino_writer(cfg, headers, writer))
loop.run_forever()
My question is, how to pass connection from serial function task to arduino_listen and arduino_writer tasks, I have try to add reader, writer = await open_serial_connection(url=arduino_ports[0], baudrate=115200) before loop.create_task but error unexpected ident appear
Avoid giving functions name that conflict with your module names, it's about serial in your case. Let's call it say start_serial.
As start_serial is the initial function that starts a serial connection arduino_listen and arduino_writer should be run in parallel right after it.
Change the final part of your code to the following:
async def start_serial():
print(f'serial task created, arduino port {arduino_ports[0]} selected')
reader, writer = await open_serial_connection(url=arduino_ports[0], baudrate=115200)
return reader, writer
async def main():
reader, writer = await start_serial()
await asyncio.gather(
arduino_listen(cfg, headers, reader),
arduino_writer(cfg, headers, writer))
asyncio.run(main())

StreamReader.readline() does not return upon reading a newline

So I have been trying to experiment with Streams in Python and wrote the following code.
ServiceSubscription.py
class ServiceSubscription():
def __init__(self) -> None:
self.subscriber_connections = []
self.service_connections = []
self.server_listener = None
# Dictionary of service readers where key is the name of the service and the value is the reader for the service
self.service_readers = {}
"""
Create the listening server on port 7777
"""
async def initiate_server(self):
server = await asyncio.start_server(self.handle_incoming, '127.0.0.1', 7777)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
"""
Handle the incoming connection based on whether the connection is from a service or suscriber
The first message sent should include either 'service:SERVICE_NAME' or 'suscriber: [SERVICE1, SERVICE2, ...]'
"""
async def handle_incoming(self, reader: StreamReader, writer: StreamWriter):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
if ("Service:" in f"{message!r}"):
message = message[0:7]
self.service_connections.append(Connections(reader, writer, message))
service_reader = ServiceReader(reader=reader, writer=writer)
self.service_readers[message] = (service_reader)
await service_reader.broadcast()
elif ("Suscriber:" in f"{message!r}"):
message = message[0:9]
self.subscriber_connections.append(Connections(reader, writer, message))
self.service_readers[message].add_suscribers(writer)
else:
pass
class ServiceReader():
def __init__(self, reader: StreamReader, writer: StreamWriter):
self.reader = reader
self.writer = writer
self.suscribers: Writer = []
self.loop = asyncio.get_event_loop()
def stop(self):
self._stop.set()
"""
Add new subscriber's StreamWriter here
"""
def add_suscribers(self, writer: StreamWriter):
# Not sure if this will work
self.suscribers.append(writer)
"""
Read data and broadcast it to subscribed clients
"""
async def broadcast(self):
while not self.reader.at_eof():
data = await self.reader.readline()
if b'\n' in data:
print(True)
data = data.decode()
print(data)
WriterTest.py
import asyncio
from os import linesep
async def tcp_echo_client(message):
reader, writer = await asyncio.open_connection(
'127.0.0.1', 7777)
print(f'Send: {message!r}\n')
writer.write(message.encode())
await writer.drain()
while not writer.is_closing():
data = input("Type a message\n")
data = (data + "\n").encode()
writer.write(data)
await writer.drain()
writer.close()
asyncio.run(tcp_echo_client('Service: TEST'))
I ran both python ServiceSubscription.py and python WriterTest.py at the same time to simulate a client and server.
Upon running ServiceSubscription.py, it will print "Serving on ('127.0.0.1', 7777)". When WriterTest.py is executed, ServiceSubscription.py will print "Received 'Service: TEST' from ('127.0.0.1', 39923)". However, typing anything beyond that will not be printed out until WriterTest.py's connection is closed. When the connection is closed, ServiceSubcription.py prints out the remaining bytes in the buffer and also confirms that there are newlines in the data read but it is not picked up by readline as it doesn't return after encountering a newline.
The problem is here, in your WriterTest:
data = input("Type a message\n")
The input function is blocking, so in an asyncio program it blocks the event loop. All tasks are stopped until you enter something. With asyncio streams, the actual transmission of the bytes occurs in another task. Your call to the input function blocks that task, which prevents the transmission. Your server doesn't respond because nothing is actually sent.
Since this is just a test program, you have a couple of quick solutions. You could put this line:
await asyncio.sleep(1.0)
after the line await writer.drain(). This will keep the other tasks running for one second, plenty of time for the data to get transmitted.
You could, of course, replace the call to input with some hard-coded string.
Better solutions can be found at the following link:
Listen to keypress with asyncio
As a general rule, input and asyncio do not play well together.
Data is encoded alongside a break line and a couple of variables are unpacked as reader and writer objects, then meanwhile is_closing() is waiting for a closing process, that should be close(), you could try to set a conditional when message have no characters.
import asyncio
import sys
from os import linesep
async def tcp_echo_client(message):
reader, writer = await asyncio.open_connection(
'127.0.0.1', 7777)
print(f'Send: {message!r}\n')
writer.write(message.encode())
await writer.drain()
while not writer.is_closing():
await asyncio.get_event_loop().run_in_executor(None, lambda s="Type a message\n": sys.stdout.write(s+' '))
data = await asyncio.get_event_loop().run_in_executor(None, sys.stdin.readline)
if len(data)==0: # empty data
break
data = (data + "\n").encode()
writer.write(data)
await writer.drain()
print('Close the connection')
writer.close()
await writer.wait_closed()
here I change while conditional to check instead if data is empty and use a different corountine from asyncio
async def broadcast(self):
while True:
data = asyncio.wait_for(self.reader.readline(), timeout=10.0)
if data is None or len(data.decode()) == 0:
print("Expected message, received None")
break
data = data.decode().rstrip().upper()
print(data)

Send multiply files with asyncio server

Here is server.py
import asyncio
import aiofile
async def handler(reader, writer):
data = await reader.read(n=-1)
addr = writer.get_extra_info('peername')
print('Addr', addr)
# photo_120721_215652.jpg
name = data[:23].decode()
async with aiofile.async_open(name, 'wb') as afp:
await afp.write(data[23:])
print("handler end")
async def main():
server = await asyncio.start_server(handler, '0.0.0.0', 8888)
addr = server.sockets[0].getsockname()
print(f'Serving on {addr}')
async with server:
await server.serve_forever()
asyncio.run(main())
Here is client.py
async def send_photos_to_server(filelist):
retry = 0
for file in filelist:
while True:
try:
reader, writer = await asyncio.open_connection('192.168.1.100', 8888)
except OSError:
while retry != 5:
await asyncio.sleep(3)
retry += 1
else:
break # exit the loop
if retry == 5:
print("No connection after {} retries")
break
name = file.split('/')[-1].encode()
writer.write(name)
await writer.drain()
async with aiofile.async_open(file, 'rb') as afp:
afp_reader = await afp.read(length=-1)
writer.write(afp_reader)
await writer.drain()
writer.close()
await writer.wait_closed()
filelist = ['./photos/photo_120721_215652.jpg',
'./photos/photo_120721_215654.jpg',
'./photos/photo_120721_215656.jpg']
asyncio.run(send_photos_to_server(filelist))
So this code is working. But I'm not sure that it's a proper way to do that. Every time to send a new file, we create a new connection. Sure, I could pack all three files into an archive, but I'm not sure it is handy. Also, it's inconvenient to put filename to sending a message because then on the server-side, I have to, somehow, find it. So can someone explain how to modify this code for better practice? I've just started to learn async io and not good at it.
Servers use protocols - it meas rules which describe what one side has to send and how to read it on another size.
For example HTTP first sends headers, next empty line, and next data (body). One of the header has information how long is body.
Other side first has to read data byte after byte until it find empty line ('n\n') and then it has headers and it can find how long is body, and read correct number of bytes.
The same way you can create own protocol.
I assumed you want to send all files in one connection.
You can send number of files + \n, and next uses for-loop to send filename + \n, file size + \n, file data.
And other side should do similar - first read all to \n to get number of files, and use for-loop to again read all to \n to get filename, read all to \n to get size, use size to read file data.
And it shows that it may be simpler to compress data with zip and send it - and other side will have all information - filename, size, data - inside this zip file.
Of course you can reduce it and still send every file in separated connection.
Example code
server.py
import asyncio
import aiofile
async def handler(reader, writer):
# --- before for-loop ---
data = await reader.read(n=-1)
addr = writer.get_extra_info('peername')
print('Addr', addr)
# get number of images + `\n`
start = 0
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
number = int(item.decode())
print('number:', number)
print('--------')
# --- for-loop ---
for _ in range(number):
# get filename + '\n'
start = end+1
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
name = item.decode()
print('name:', name)
# get size + '\n'
start = end+1
end = data.find(b'\n', start)
item = data[start:end]
print('[DEBUG] start,end:', start, end, item)
size = int(item.decode())
print('size:', size)
# get data
start = end+1
end = start+size-1
item = data[start:end]
print('[DEBUG] start,end:', start, end, item[:10])
async with aiofile.async_open(name, 'wb') as afp:
await afp.write(item)
print('--------')
# --- after for-loop ---
print("handler end")
async def main():
server = await asyncio.start_server(handler, '0.0.0.0', 8888)
addr = server.sockets[0].getsockname()
print(f'Serving on {addr}')
async with server:
await server.serve_forever()
asyncio.run(main())
client.py
import asyncio
import aiofile
async def send_photos_to_server(filelist):
# --- before for-loop ---
retry = 0
while True:
try:
reader, writer = await asyncio.open_connection('0.0.0.0', 8888)
except OSError:
while retry != 5:
await asyncio.sleep(3)
retry += 1
else:
break # exit the loop
if retry == 5:
print("No connection after {} retries")
return
# send number of images + '\n'
text = str(len(filelist)) + '\n'
writer.write(text.encode())
await writer.drain()
# --- for-loop ---
for filename in filelist:
# send filename + '\n'
name = filename.split('/')[-1]
text = name + '\n'
writer.write(text.encode())
await writer.drain()
async with aiofile.async_open(filename, 'rb') as afp:
afp_reader = await afp.read(length=-1)
# send size + '\n'
size = str(len(afp_reader))
text = size + '\n'
writer.write(text.encode())
await writer.drain()
# send data
writer.write(afp_reader)
await writer.drain()
# --- after for-loop ---
writer.close()
await writer.wait_closed()
filelist = [
'/home/furas/test/image1.png',
'/home/furas/test/image2.png',
'/home/furas/test/image3.png',
]
asyncio.run(send_photos_to_server(filelist))

Python: Correct way of using proxybroker lib with asyncio

I want to use the ProxyBroker lib in my python program to generate a list/queue of 10 working proxies.
Unfortunately I was not able to find anything similar in the example page of the lib.
This is what I got right now, but it feels like I'm using asyncio the wrong way to complete my task. Especially the gather function I'm using in combination with the collect(proxies) call.
def get_proxies(self, limit=10):
async def collect(proxies):
p = []
while True:
proxy = await proxies.get()
if proxy is None:
break
p.append(proxy)
return p
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(
broker.find(types=['HTTP', 'HTTPS'], limit=10),
collect(proxies))
loop = asyncio.get_event_loop()
proxy_list = loop.run_until_complete(tasks)
loop.close()
return proxy_list
What would be the preferred/correct way of generating the proxy list?
You can do it this:
"""Find and show 10 working HTTP(S) proxies."""
import asyncio
from proxybroker import Broker
async def show(proxies):
while True:
proxy = await proxies.get()
if proxy is None: break
print('Found proxy: %s' % proxy)
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(
broker.find(types=['HTTP', 'HTTPS'], limit=10),
show(proxies))
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
or if you want to generate a file:
import asyncio
from proxybroker import Broker
async def save(proxies, filename):
"""Save proxies to a file."""
with open(filename, 'w') as f:
while True:
proxy = await proxies.get()
if proxy is None:
break
proto = 'https' if 'HTTPS' in proxy.types else 'http'
row = '%s://%s:%d\n' % (proto, proxy.host, proxy.port)
f.write(row)
def main():
proxies = asyncio.Queue()
broker = Broker(proxies)
tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=10),
save(proxies, filename='proxies.txt'))
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
if __name__ == '__main__':
main()
Is all!
Good look!

Reading from pipe with pexpect on Linux

I post a new question related the old for a problem with the get from queue. This is the code (thanks to Martijn Pieters)
import asyncio
import sys
import json
import os
import websockets
async def socket_consumer(socket, outgoing):
# take messages from the web socket and push them into the queue
async for message in socket:
await outgoing.put(message)
file = open(r"/home/host/Desktop/FromSocket.txt", "a")
file.write("From socket: " + ascii(message) + "\n")
file.close()
async def socket_producer(socket, incoming):
# take messages from the queue and send them to the socket
while True:
message = await incoming.get()
file = open(r"/home/host/Desktop/ToSocket.txt", "a")
file.write("To socket: " + ascii(message) + "\n")
file.close()
await socket.send(message)
async def connect_socket(incoming, outgoing, loop=None):
header = {"Authorization": r"Basic XXX="}
uri = 'XXXXXX'
async with websockets.connect(uri, extra_headers=header) as web_socket:
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
socket_consumer(web_socket, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
socket_producer(web_socket, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# pipe support
async def stdio(loop=None):
if loop is None:
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
await loop.connect_read_pipe(
lambda: asyncio.StreamReaderProtocol(reader), sys.stdin)
writer_transport, writer_protocol = await loop.connect_write_pipe(
asyncio.streams.FlowControlMixin, os.fdopen(sys.stdout.fileno(), 'wb'))
writer = asyncio.streams.StreamWriter(
writer_transport, writer_protocol, None, loop)
return reader, writer
async def pipe_consumer(pipe_reader, outgoing):
# take messages from the pipe and push them into the queue
while True:
message = await pipe_reader.readline()
if not message:
break
file = open(r"/home/host/Desktop/FromPipe.txt", "a")
file.write("From pipe: " + ascii(message.decode('utf8')) + "\n")
file.close()
await outgoing.put(message.decode('utf8'))
async def pipe_producer(pipe_writer, incoming):
# take messages from the queue and send them to the pipe
while True:
json_message = await incoming.get()
file = open(r"/home/host/Desktop/ToPipe.txt", "a")
file.write("Send to pipe message: " + ascii(json_message) + "\n")
file.close()
try:
message = json.loads(json_message)
message_type = int(message.get('header', {}).get('messageID', -1))
except (ValueError, TypeError, AttributeError):
# failed to decode the message, or the message was not
# a dictionary, or the messageID was convertable to an integer
message_type = None
file = open(r"/home/host/Desktop/Error.txt", "a")
file.write(" Error \n")
file.close()
# 1 is DENM message, 2 is CAM message
file.write("Send to pipe type: " + type)
if message_type in {1, 2}:
file.write("Send to pipe: " + json_message)
pipe_writer.write(json_message.encode('utf8') + b'\n')
await pipe_writer.drain()
async def connect_pipe(incoming, outgoing, loop=None):
reader, writer = await stdio()
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
pipe_consumer(reader, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
pipe_producer(writer, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# force a result check; if there was an exception it'll be re-raised
for task in done:
task.result()
def main():
loop = asyncio.get_event_loop()
pipe_to_socket = asyncio.Queue(loop=loop)
socket_to_pipe = asyncio.Queue(loop=loop)
socket_coro = connect_socket(pipe_to_socket, socket_to_pipe, loop=loop)
pipe_coro = connect_pipe(socket_to_pipe, pipe_to_socket, loop=loop)
loop.run_until_complete(asyncio.gather(socket_coro, pipe_coro))
main()
To send to the Pipe I use this code:
import pexpect
test = r"/home/host/PycharmProjects/Tim/Tim.py"
process = pexpect.spawn("python3 " + test)
message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400},"cam":{"generationDeltaTime":1,"camParameters":{"basicContainer":{"stationType":5}}';
process.write(message + "\n")
process.wait()
but how can I create a script to read instead of write?
I tried with
test = r"/home/host/PycharmProjects/Tim/Tim.py"
p = pexpect.spawn("python3 " + test, timeout=None)
while True:
m = p.read()
file = open(r"/home/host/Desktop/OpeListening.txt", "a")
file.write(str(m))
file.close()
p.wait()
But the read goes immediately to the next step without any message. What is my error?
At the moment I use with success Popen
process = subprocess.Popen(['python3', test], shell=False, stdout=subprocess.PIPE,
stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
while True:
result = process.stdout.readline()
result = result.decode("utf-8")
print(result)
proc.wait()

Categories

Resources