I try to build a base http server with the following code.
async def handle_client(client, address):
print('connection start')
data = await loop.sock_recv(client, 1024)
resp = b'HTTP/1.1 404 NOT FOUND\r\n\r\n<h1>404 NOT FOUND</h1>'
await loop.sock_sendall(client, resp)
client.close()
async def run_server():
while True:
client, address = await loop.sock_accept(server)
print('start')
loop.create_task(handle_client(client,address))
print(client)
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 3006))
server.listen(8)
print(1)
loop = asyncio.get_event_loop()
loop.run_until_complete(run_server())
The output I expect to get is
1
start
connection start
But the actual result of running is
1
start
start
start
It seems that the function in loop.create_task() is not being run, so now I got confuesed., what is the correct way to use loop.create_task()?
You need to await the task that is created via loop.create_task(), otherwise run_server() will schedule the task and then just exit before the result has been returned.
Try changing run_server() to the following:
async def run_server():
while True:
client, address = await loop.sock_accept(server)
print('start')
await loop.create_task(handle_client(client,address))
print(client)
Related
I'm trying to understand how to use asyncio streams for multiple connections that will keep sending messages until a predefined condition or a socket timeout. Looking at Python docs, they provide the following example for a TCP server based on asyncio streams:
import asyncio
async def handle_echo(reader, writer):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
print(f"Send: {message!r}")
writer.write(data)
await writer.drain()
print("Close the connection")
writer.close()
async def main():
server = await asyncio.start_server(
handle_echo, '127.0.0.1', 8888)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
asyncio.run(main())
What I'm trying to do is more complex and it looks more like so (a lot of it is pseudocode, written in capital letters or with implementation omitted):
import asyncio
async def io_control(queue):
while true:
...
# do I/O control in this function ...
async def data_processing(queue):
while true:
...
# perform data handling
async def handle_data(reader, writer):
data = await reader.read()
message = data.decode()
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
#do stuff with a queue - pass messages to other two async functions as needed
#keep open until something happens
if(ERROR or SOCKET_TIMEOUT):
writer.close()
async def server(queue):
server = await asyncio.start_server(
handle_data, '127.0.0.1', 8888)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
async def main():
queue_io = asyncio.Queue()
queue_data = asyncio.Queue()
asyncio.run(server(queue_data))
asyncio.run(data_handling(queue_data))
asyncio.run(io_control(queue_io))
asyncio.run(main())
Does this look feasible? I'm not used to working with co-routines (I'm coming from more of a multi-threading paradigm), so I'm not sure if what I'm doing is right or if I have to explicitly include yields or do any extra stuff.
If I understand correctly, you just need the TCP server to be able to handle multiple concurrent connections. The start_server function should already give you everything you need.
The first parameter client_connected_cb is a coroutine function called whenever a client establishes a connection. If you introduce a loop into that function (in your example code handle_data), you can keep the connection open until some criterion is met. What conditions exactly should lead to closing the connection is up to you, and the implementation details will obviously depend on that. The simplest approach I can imagine is something like this:
import asyncio
import logging
log = logging.getLogger(__name__)
async def handle_data(reader, writer):
while True:
data = (await reader.readline()).decode().strip()
if not data:
log.debug("client disconnected")
break
response = await your_data_processing_function(data)
writer.write(response.encode())
await writer.drain()
...
async def main():
server = await asyncio.start_server(handle_data, '127.0.0.1', 8888)
async with server:
await server.serve_forever()
if __name__ == '__main__':
asyncio.run(main())
There is theoretically no limit for the number of concurrent connections.
If your client_connected_cb is a coroutine function, each new connection will schedule a new task for the event loop. That is where the concurrency comes from. The magic then happens at the point of awaiting new data from the client; that is where the event loop can switch execution to another coroutine. All this happens behind the scenes, so to speak.
If you want to introduce a timeout, you could wrap the awaitable readline coroutine in a wait_for for example and then catch the TimeoutError exiting the loop.
Hope this helps.
I've tried to setup a Python server that allows clients to connect and then wait for commands from the server to then run those commands and return a response. I'm also trying to allow multiple clients to connect to one server. This is the code so far:
server.py
import asyncio
import threading
import datetime
import time
async def client_connected(reader, writer):
data = await reader.read(100)
message = (data.decode())
addr = writer.get_extra_info('peername')
print(f"Received {message!r} from {addr!r}")
print(f"Send: {message!r}")
writer.write(message.encode())
await writer.drain()
async def main():
server = await asyncio.start_server(
client_connected, '127.0.0.1', 8888)
addrs = ', '.join(str(sock.getsockname()) for sock in server.sockets)
print(f'Serving on {addrs}')
async with server:
await server.serve_forever()
def msg(message):
for user in list_of_users:
print('send message - ', message)
user.write(message.encode())
asyncio.run(main())
client.pi
import asyncio
import subprocess
async def tcp_echo_client(message):
reader, writer = await asyncio.open_connection(
'127.0.0.1', 8888)
print(f'Send: {message!r}')
writer.write(message.encode())
data = await reader.read(100)
print(f'Received: {data.decode()!r}')
while True:
print("[-] Awaiting commands...")
command = await reader.read(1024)
command = command.decode()
op = subprocess.Popen(command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
output = op.stdout.read()
output_error = op.stderr.read()
print("[-] Sending response...")
output_msg = output + output_error
writer.write(output_msg.encode)
asyncio.run(tcp_echo_client('Hello World!'))
What I don't understand is:
If I'm handling multiple client connections correctly
How to allow for commands to be input on the server and then sent our properly to each client
If I need a separate "main" client that sends out the commands or if I can run a terminal window directly using the server itself.
Any advice would be much appreciated!
I would like to re-implement my code using asyncio coroutines instead of multi-threading.
server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
try:
while True:
client, _ = server.accept()
threading.Thread(target=handle_client, args=(client,)).start()
except KeyboardInterrupt:
server.close()
client.py
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.connect(('localhost', 15555))
request = None
try:
while request != 'quit':
request = input('>> ')
if request:
server.send(request.encode('utf8'))
response = server.recv(255).decode('utf8')
print(response)
except KeyboardInterrupt:
server.close()
I know there are some appropriate asynchronous network librairies to do that. But I just want to only use asyncio core library on this case in order to have a better understanding of it.
It would have been so nice to only add async keyword before handle client definition... Here a piece of code which seems to work, but I'm still confused about the implementation.
asyncio_server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
def run_server(server):
client, _ = server.accept()
handle_client(client)
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
loop = asyncio.get_event_loop()
asyncio.async(run_server(server))
try:
loop.run_forever()
except KeyboardInterrupt:
server.close()
How adapt this in the best way and using async await keywords.
The closest literal translation of the threading code would create the socket as before, make it non-blocking, and use asyncio low-level socket operations to implement the server. Here is an example, sticking to the more relevant server part (the client is single-threaded and likely fine as-is):
import asyncio, socket
async def handle_client(client):
loop = asyncio.get_event_loop()
request = None
while request != 'quit':
request = (await loop.sock_recv(client, 255)).decode('utf8')
response = str(eval(request)) + '\n'
await loop.sock_sendall(client, response.encode('utf8'))
client.close()
async def run_server():
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
server.setblocking(False)
loop = asyncio.get_event_loop()
while True:
client, _ = await loop.sock_accept(server)
loop.create_task(handle_client(client))
asyncio.run(run_server())
The above works, but is not the intended way to use asyncio. It is very low-level and therefore error-prone, requiring you to remember to set the appropriate flags on the socket. Also, there is no buffering, so something as simple as reading a line from the client becomes a tiresome chore. This API level is really only intended for implementors of alternative event loops, which would provide their implementation of sock_recv, sock_sendall, etc.
Asyncio's public API provides two abstraction layers intended for consumption: the older transport/protocol layer modeled after Twisted, and the newer streams layer. In new code, you almost certainly want to use the streams API, i.e. call asyncio.start_server and avoid raw sockets. That significantly reduces the line count:
import asyncio, socket
async def handle_client(reader, writer):
request = None
while request != 'quit':
request = (await reader.read(255)).decode('utf8')
response = str(eval(request)) + '\n'
writer.write(response.encode('utf8'))
await writer.drain()
writer.close()
async def run_server():
server = await asyncio.start_server(handle_client, 'localhost', 15555)
async with server:
await server.serve_forever()
asyncio.run(run_server())
I have read the answers and comments above, trying to figure out how to use the asyncio lib for sockets.
As it often happens with Python, the official documentation along with the examples is the best source of useful information.
I got understanding of Transports and Protocols (low-level API), and Streams (high-level API) from the examples presented in the end of the support article.
For example, TCP Echo Server:
import asyncio
class EchoServerProtocol(asyncio.Protocol):
def connection_made(self, transport):
peername = transport.get_extra_info('peername')
print('Connection from {}'.format(peername))
self.transport = transport
def data_received(self, data):
message = data.decode()
print('Data received: {!r}'.format(message))
print('Send: {!r}'.format(message))
self.transport.write(data)
print('Close the client socket')
self.transport.close()
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
server = await loop.create_server(
lambda: EchoServerProtocol(),
'127.0.0.1', 8888)
async with server:
await server.serve_forever()
asyncio.run(main())
and TCP Echo Client:
import asyncio
class EchoClientProtocol(asyncio.Protocol):
def __init__(self, message, on_con_lost):
self.message = message
self.on_con_lost = on_con_lost
def connection_made(self, transport):
transport.write(self.message.encode())
print('Data sent: {!r}'.format(self.message))
def data_received(self, data):
print('Data received: {!r}'.format(data.decode()))
def connection_lost(self, exc):
print('The server closed the connection')
self.on_con_lost.set_result(True)
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
on_con_lost = loop.create_future()
message = 'Hello World!'
transport, protocol = await loop.create_connection(
lambda: EchoClientProtocol(message, on_con_lost),
'127.0.0.1', 8888)
# Wait until the protocol signals that the connection
# is lost and close the transport.
try:
await on_con_lost
finally:
transport.close()
asyncio.run(main())
Hope it help someone searching for simple explanation of asyncio.
I would like to re-implement my code using asyncio coroutines instead of multi-threading.
server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
try:
while True:
client, _ = server.accept()
threading.Thread(target=handle_client, args=(client,)).start()
except KeyboardInterrupt:
server.close()
client.py
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.connect(('localhost', 15555))
request = None
try:
while request != 'quit':
request = input('>> ')
if request:
server.send(request.encode('utf8'))
response = server.recv(255).decode('utf8')
print(response)
except KeyboardInterrupt:
server.close()
I know there are some appropriate asynchronous network librairies to do that. But I just want to only use asyncio core library on this case in order to have a better understanding of it.
It would have been so nice to only add async keyword before handle client definition... Here a piece of code which seems to work, but I'm still confused about the implementation.
asyncio_server.py
def handle_client(client):
request = None
while request != 'quit':
request = client.recv(255).decode('utf8')
response = cmd.run(request)
client.send(response.encode('utf8'))
client.close()
def run_server(server):
client, _ = server.accept()
handle_client(client)
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
loop = asyncio.get_event_loop()
asyncio.async(run_server(server))
try:
loop.run_forever()
except KeyboardInterrupt:
server.close()
How adapt this in the best way and using async await keywords.
The closest literal translation of the threading code would create the socket as before, make it non-blocking, and use asyncio low-level socket operations to implement the server. Here is an example, sticking to the more relevant server part (the client is single-threaded and likely fine as-is):
import asyncio, socket
async def handle_client(client):
loop = asyncio.get_event_loop()
request = None
while request != 'quit':
request = (await loop.sock_recv(client, 255)).decode('utf8')
response = str(eval(request)) + '\n'
await loop.sock_sendall(client, response.encode('utf8'))
client.close()
async def run_server():
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost', 15555))
server.listen(8)
server.setblocking(False)
loop = asyncio.get_event_loop()
while True:
client, _ = await loop.sock_accept(server)
loop.create_task(handle_client(client))
asyncio.run(run_server())
The above works, but is not the intended way to use asyncio. It is very low-level and therefore error-prone, requiring you to remember to set the appropriate flags on the socket. Also, there is no buffering, so something as simple as reading a line from the client becomes a tiresome chore. This API level is really only intended for implementors of alternative event loops, which would provide their implementation of sock_recv, sock_sendall, etc.
Asyncio's public API provides two abstraction layers intended for consumption: the older transport/protocol layer modeled after Twisted, and the newer streams layer. In new code, you almost certainly want to use the streams API, i.e. call asyncio.start_server and avoid raw sockets. That significantly reduces the line count:
import asyncio, socket
async def handle_client(reader, writer):
request = None
while request != 'quit':
request = (await reader.read(255)).decode('utf8')
response = str(eval(request)) + '\n'
writer.write(response.encode('utf8'))
await writer.drain()
writer.close()
async def run_server():
server = await asyncio.start_server(handle_client, 'localhost', 15555)
async with server:
await server.serve_forever()
asyncio.run(run_server())
I have read the answers and comments above, trying to figure out how to use the asyncio lib for sockets.
As it often happens with Python, the official documentation along with the examples is the best source of useful information.
I got understanding of Transports and Protocols (low-level API), and Streams (high-level API) from the examples presented in the end of the support article.
For example, TCP Echo Server:
import asyncio
class EchoServerProtocol(asyncio.Protocol):
def connection_made(self, transport):
peername = transport.get_extra_info('peername')
print('Connection from {}'.format(peername))
self.transport = transport
def data_received(self, data):
message = data.decode()
print('Data received: {!r}'.format(message))
print('Send: {!r}'.format(message))
self.transport.write(data)
print('Close the client socket')
self.transport.close()
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
server = await loop.create_server(
lambda: EchoServerProtocol(),
'127.0.0.1', 8888)
async with server:
await server.serve_forever()
asyncio.run(main())
and TCP Echo Client:
import asyncio
class EchoClientProtocol(asyncio.Protocol):
def __init__(self, message, on_con_lost):
self.message = message
self.on_con_lost = on_con_lost
def connection_made(self, transport):
transport.write(self.message.encode())
print('Data sent: {!r}'.format(self.message))
def data_received(self, data):
print('Data received: {!r}'.format(data.decode()))
def connection_lost(self, exc):
print('The server closed the connection')
self.on_con_lost.set_result(True)
async def main():
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
on_con_lost = loop.create_future()
message = 'Hello World!'
transport, protocol = await loop.create_connection(
lambda: EchoClientProtocol(message, on_con_lost),
'127.0.0.1', 8888)
# Wait until the protocol signals that the connection
# is lost and close the transport.
try:
await on_con_lost
finally:
transport.close()
asyncio.run(main())
Hope it help someone searching for simple explanation of asyncio.
I am trying to add two coroutines to asyncio loop and getting an error:
RuntimeError: This event loop is already running
My objective is to communicate to a server (that I have no control of). This server expects an initial connection from the client. The server then provided a port to the client on this connection. The client has to use this port to create a second connection. This second connection is used by the server to send unsolicited messages to the client. The first connection remains up throughout for other two-way communications.
To recreate this scenario, I have some code that reproduces the error:
class Connection():
def __init__(self, ip, port, ioloop):
self.ip = ip
self.port = port
self.ioloop = ioloop
self.reader, self.writer = None, None
self.protocol = None
self.fileno = None
async def __aenter__(self):
# Applicable when doing 'with Connection(...'
log.info("Entering and Creating Connection")
self.reader, self.writer = (
await asyncio.open_connection(self.ip, self.port, loop=self.ioloop)
)
self.protocol = self.writer.transport.get_protocol()
self.fileno = self.writer.transport.get_extra_info('socket').fileno()
log.info(f"Created connection {self}")
return self
async def __aexit__(self, *args):
# Applicable when doing 'with Connection(...'
log.info(f"Exiting and Destroying Connection {self}")
if self.writer:
self.writer.close()
def __await__(self):
# Applicable when doing 'await Connection(...'
return self.__aenter__().__await__()
def __repr__(self):
return f"[Connection {self.ip}:{self.port}, {self.protocol}, fd={self.fileno}]"
async def send_recv_message(self, message):
log.debug(f"send: '{message}'")
self.writer.write(message.encode())
await self.writer.drain()
log.debug("awaiting data...")
data = await self.reader.read(9999)
data = data.decode()
log.debug(f"recv: '{data}'")
return data
class ServerConnection(Connection):
async def setup_connection(self):
event_port = 8889 # Assume this came from the server
print("In setup connection")
event_connection = await EventConnection('127.0.0.1', event_port, self.ioloop)
self.ioloop.run_until_complete(event_connection.recv_message())
class EventConnection(Connection):
async def recv_message(self):
log.debug("awaiting recv-only data...")
data = await self.reader.read(9999)
data = data.decode()
log.debug(f"recv only: '{data}'")
return data
async def main(loop):
client1 = await ServerConnection('127.0.0.1', 8888, loop)
await client1.setup_connection()
await client1.send_recv_message("Hello1")
await client1.send_recv_message("Hello2")
await asyncio.sleep(5)
if __name__ == '__main__':
#logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger()
ioloop = asyncio.get_event_loop()
print('starting loop')
ioloop.run_until_complete(main(ioloop))
print('completed loop')
ioloop.close()
The error occurs in ServerConnection.setup_connection() method where run_until_complete is being called.
I am probably doing something wrong due to lack of understanding asyncio. Basically, how do I setup a secondary connection which will get event notifications (unsolicited) while setting up the first connection?
Thanks.
Followup
Since the code is very similar (a few changes to add more functionality to it), I hope it's not bad etiquette to followup to the original post as the resulting error is still the same.
The new issue is that when it receives the unsolicited message (which is received by EventConnection), the recv_message calls process_data method. I would like to make process_data be a future so that recv_message completes (ioloop should stop). The ensure_future would then pick it up and continue running again to use ServerConnection to do a request/response to the server. Before it does that though, it has to go to some user code (represented by external_command() and from whom I would prefer to hide the async stuff). This would make it synchronous again. Hence, once they've done what they need to, they should call execute_command on ServerConnection, which then kicks off the loop again.
The problem is, my expectation for using ensure_future didn't pan out as it seems the loop didn't stop from running. Hence, when the code execution reaches execute_command which does the run_until_complete, an exception with the error "This event loop is already running" occurs.
I have two questions:
How can I make it so that the ioloop can stop after process_data is
placed into ensure_future, and subsequently be able to run it again
in execute_command?
Once recv_message has received something, how can we make it so that
it can receive more unsolicited data? Is it enough/safe to just use
ensure_future to call itself again?
Here's the example code that simulates this issue.
client1 = None
class ServerConnection(Connection):
connection_type = 'Server Connection'
async def setup_connection(self):
event_port = 8889 # Assume this came from the server
print("In setup connection")
event_connection = await EventConnection('127.0.0.1', event_port, self.ioloop)
asyncio.ensure_future(event_connection.recv_message())
async def _execute_command(self, data):
return await self.send_recv_message(data)
def execute_command(self, data):
response_str = self.ioloop.run_until_complete(self._execute_command(data))
print(f"exec cmd response_str: {response_str}")
def external_command(self, data):
self.execute_command(data)
class EventConnection(Connection):
connection_type = 'Event Connection'
async def recv_message(self):
global client1
log.debug("awaiting recv-only data...")
data = await self.reader.read(9999)
data = data.decode()
log.debug(f"recv-only: '{data}'")
asyncio.ensure_future(self.process_data(data))
asyncio.ensure_future(self.recv_message())
async def process_data(self, data):
global client1
await client1.external_command(data)
async def main(ioloop):
global client1
client1 = await ServerConnection('127.0.0.1', 8888, ioloop)
await client1.setup_connection()
print(f"after connection setup loop running is {ioloop.is_running()}")
await client1.send_recv_message("Hello1")
print(f"after Hello1 loop running is {ioloop.is_running()}")
await client1.send_recv_message("Hello2")
print(f"after Hello2 loop running is {ioloop.is_running()}")
while True:
print(f"inside while loop running is {ioloop.is_running()}")
t = 10
print(f"asyncio sleep {t} sec")
await asyncio.sleep(t)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger()
ioloop = asyncio.get_event_loop()
print('starting loop')
ioloop.run_until_complete(main(ioloop))
print('completed loop')
ioloop.close()
Try replacing:
self.ioloop.run_until_complete
With
await