I have a bog-standard synchronous python program that needs to be able to read data from websockets and update the GUI with the data. However, asyncio creep is constantly tripping me up.
How do I make a module that:
accepts multiple subscriptions to multiple sources
sends an update to the requester whenever there's data
opens exactly one websocket connection per URL
resets the websocket if it closes
Here's what I have already, but it's failing at many points:
run_forever() means that the loop gets stuck before the subscription completes and then handle() is stuck in the falsey while loop
it does not seem to want to restart sockets when they're down because a websockets object does not have a connected property (websocket without an s does, but I'm not clear on the differences and can't find info online either)
I'm absolutely not sure if my approach is remotely correct.
Been fighting with this for weeks. Would appreciate some pointers.
class WSClient():
subscriptions = set()
connections = {}
started = False
def __init__(self):
self.loop = asyncio.get_event_loop()
def start(self):
self.started = True
self.loop.run_until_complete(self.handle())
self.loop.run_until_forever() # problematic, because it does not allow new subscribe() events
async def handle(self):
while len(self.connections) > 0:
# listen to every websocket
futures = [self.listen(self.connections[url]) for url in self.connections]
done, pending = await asyncio.wait(futures)
# the following is apparently necessary to avoid warnings
# about non-retrieved exceptions etc
try:
data, ws = done.pop().result()
except Exception as e:
print("OTHER EXCEPTION", e)
for task in pending:
task.cancel()
async def listen(self, ws):
try:
async for data in ws:
data = json.loads(data)
# call the subscriber (listener) back when there's data
[s.listener._handle_result(data) for s in self.subscriptions if s.ws == ws]
except Exception as e:
print('ERROR LISTENING; RESTARTING SOCKET', e)
await asyncio.sleep(2)
self.restart_socket(ws)
def subscribe(self, subscription):
task = self.loop.create_task(self._subscribe(subscription))
asyncio.gather(task)
if not self.started:
self.start()
async def _subscribe(self, subscription):
try:
ws = self.connections.get(subscription.url, await websockets.connect(subscription.url))
await ws.send(json.dumps(subscription.sub_msg))
subscription.ws = ws
self.connections[subscription.url] = ws
self.subscriptions.add(subscription)
except Exception as e:
print("ERROR SUBSCRIBING; RETRYING", e)
await asyncio.sleep(2)
self.subscribe(subscription)
def restart_socket(self, ws):
for s in self.subscriptions:
if s.ws == ws and not s.ws.connected:
print(s)
del self.connections[s.url]
self.subscribe(s)
I have a bog-standard synchronous python program that needs to be able to read data from websockets and update the GUI with the data. However, asyncio creep is constantly tripping me up.
As you mentioned GUI, then it is probably not a "bog-standard synchronous python program". Usually a GUI program has a non-blocking event-driven main thread, which allows concurrent user behaviors and callbacks. That is very much similar to asyncio, and it is usually a common way for asyncio to work together with GUIs to use GUI-specific event loop to replace default event loop in asyncio, so that your asyncio coroutines just run in GUI event loop and you can avoid calling run_forever() blocking everything.
An alternative way is to run asyncio event loop in a separate thread, so that your program could at the same time wait for websocket data and wait for user clicks. I've rewritten your code as follows:
import asyncio
import threading
import websockets
import json
class WSClient(threading.Thread):
def __init__(self):
super().__init__()
self._loop = None
self._tasks = {}
self._stop_event = None
def run(self):
self._loop = asyncio.new_event_loop()
self._stop_event = asyncio.Event(loop=self._loop)
try:
self._loop.run_until_complete(self._stop_event.wait())
self._loop.run_until_complete(self._clean())
finally:
self._loop.close()
def stop(self):
self._loop.call_soon_threadsafe(self._stop_event.set)
def subscribe(self, url, sub_msg, callback):
def _subscribe():
if url not in self._tasks:
task = self._loop.create_task(
self._listen(url, sub_msg, callback))
self._tasks[url] = task
self._loop.call_soon_threadsafe(_subscribe)
def unsubscribe(self, url):
def _unsubscribe():
task = self._tasks.pop(url, None)
if task is not None:
task.cancel()
self._loop.call_soon_threadsafe(_unsubscribe)
async def _listen(self, url, sub_msg, callback):
try:
while not self._stop_event.is_set():
try:
ws = await websockets.connect(url, loop=self._loop)
await ws.send(json.dumps(sub_msg))
async for data in ws:
data = json.loads(data)
# NOTE: please make sure that `callback` won't block,
# and it is allowed to update GUI from threads.
# If not, you'll need to find a way to call it from
# main/GUI thread (similar to `call_soon_threadsafe`)
callback(data)
except Exception as e:
print('ERROR; RESTARTING SOCKET IN 2 SECONDS', e)
await asyncio.sleep(2, loop=self._loop)
finally:
self._tasks.pop(url, None)
async def _clean(self):
for task in self._tasks.values():
task.cancel()
await asyncio.gather(*self._tasks.values(), loop=self._loop)
You can try tornado and autobahn-twisted for websockets.
Related
I have a sync method that has a connection updater that checks if a connection is still open, that runs in a background thread. This works as expected and continously
How would the same thing be possible in asyncio ? Would asyncio.ensure_future would be the way to do this or is there another pythonic way to accomplish the same?
def main():
_check_conn = threading.Thread(target=_check_conn_bg)
_check_conn.daemon = True
_check_conn.start()
def _check_conn_bg():
while True:
#do checking code
Use asyncio.ensure_future(). In your coroutine make sure your code won't block the loop (otherwise use loop.run_in_executor()) It's also good to catch asyncio.CancelledError exceptions.
import asyncio
class Connection:
pass
async def _check_conn_bg(conn: Connection):
try:
while True:
# do checking code
print(f'Checking connection: {conn}')
await asyncio.sleep(1)
except asyncio.CancelledError:
print('Conn check task cancelled')
def main():
asyncio.ensure_future(_check_conn_bg(Connection()))
asyncio.get_event_loop().run_forever()
My design is probably wrong here but I'll ask anyway. Currently, I am using websockets to create a websocket server. I am performing a lot of other work in my program and would like the websocket server to be in its own thread. I create a class that subclasses threading.Thread and overrides the run method. This class's stop method is where I close the websocket server, and stop & close the event loop that I created.
class MyWsServer(threading.Thread):
def __init__(self, address, port):
threading.Thread.__init__(self)
self.port = port
self.address = address
self.server = None
self.running = False
self.loop = None
def start_ws_server(self):
self.start()
def run(self):
if not self.loop:
self.loop = asyncio.new_event_loop()
ws_server = websockets.serve(self.ws_handler, self.address, self.port,
ping_timeout=None, ping_interval=None, loop=self.loop)
self.running = True
self.server = ws_server
self.loop.run_until_complete(self.server)
self.loop.run_forever()
def stop_ws_server(self):
self.running = False
self.server.ws_server.close()
self.loop.stop()
self.loop.close()
async def ws_handler(self, websocket, path):
while self.running:
print(self.running)
# simulate work
print("doing some work")
sleep(5)
print("Sending data")
data = json.dumps({"test": "test test"})
try:
await websocket.send(data)
result = await websocket.recv()
print(result)
result = json.loads(result)
print(f"json: {result}")
except websockets.ConnectionClosed:
print(f"Terminated")
break
print("out of ws_handler")
This class instance is accessible elsewhere in the program and when calling the stop() method, I get the following error regarding self.loop.close(). Nothing further up in the stack trace is worth noting.
File "C:\Users\User\AppData\Local\Programs\Python\Python39\lib\asyncio\proactor_events.py", line 674, in close
raise RuntimeError("Cannot close a running event loop")
RuntimeError: Cannot close a running event loop
So my questions are:
Why doesn't self.loop.stop() stop the event loop?
Is there a better solution for stopping the event loop and the endgoal, stopping the thread?
Also just to note, I don't think setting self.running to False in stop_ws_server() does anything since this thread should be stuck on result = await websocket.recv() the majority of the time. I don't seem to have a clean way to exit the websocket handler. The last three lines in stop_ws_server() seem rather bruteforce-ish to me.
I've also seen some post on Stack Overflow and regarding asyncio objects and them not being threadsafe as well as plenty of posts recommending some sort of usage for asyncio's loop.call_soon_threadsafe(). I'm not sure I how I can use that with my current setup though.
Well, I greatly appreciate any help. Thanks. Please let me know if things could need more clarification.
Well it seems quite hackish to me but I took guidance from another solution here on Stack Overflow. Here is the answer of use, https://stackoverflow.com/a/67767248/5879710. The new code is as follows. I removed some lines from the previous code as well such as now unused variables and print statements.
class MyWsServer(Process):
def __init__(self, address, port):
super().__init__()
self.port = port
self.address = address
def run(self):
loop = asyncio.new_event_loop()
ws_server = websockets.serve(self.ws_handler, self.address, self.port,
ping_timeout=None, ping_interval=None, loop=loop)
loop.run_until_complete(ws_server)
loop.run_forever()
async def ws_handler(self, websocket, path):
while True:
# simulate work
print("doing some work")
sleep(5)
print("Sending data")
data = json.dumps({"test": "test test"})
try:
await websocket.send(data)
result = await websocket.recv()
print(result)
result = json.loads(result)
print(f"json: {result}")
except websockets.ConnectionClosed:
print(f"Terminated")
break
To start the process, I call start() on the WsListener class instance that I create elsewhere in the code and I call terminate() to kill the process. I would have liked a more graceful way to kill the event loop and other created resources in the process but I haven't found a better way yet.
Edit: Forgot to mention that I am using Multiprocessing now
I'm currently struggling with something "simple".
I'd like to have a python WebSocket Server, which is capable of closing down by outside events (e.g. a Ctrl+C from the command line).
Here is my code so far:
PORT = 8765
class Server(object):
def __init__(self):
self.online_players = dict()
self.online_players_lock = asyncio.Lock()
self.websocket_server = None
async def add_online_player(self, id, player):
async with self.online_players_lock:
self.online_players[id] = player
async def remove_online_player(self, id):
async with self.online_players_lock:
if id in self.online_players.keys():
del self.online_players[id]
def start(self):
end = False
loop = asyncio.new_event_loop()
thread = threading.Thread(target=listen, args=(loop, self))
thread.start()
while not end:
try:
time.sleep(500)
except KeyboardInterrupt:
end = True
loop.call_soon_threadsafe(stop_listening, loop, server)
async def on_connect(websocket, path, server):
print("New user...")
id = await websocket.recv()
player = WebSocketPlayer(id, websocket, server)
await server.add_online_player(id, player)
# from this point on WebSocketPlayer class handles communication
await player.listen()
def listen(loop, server:Server):
asyncio.set_event_loop(loop)
bound_handler = functools.partial(on_connect, server=server)
start_server_task = websockets.serve(bound_handler, "localhost", PORT, ping_timeout=None, loop=loop)
start_server = loop.run_until_complete(start_server_task)
server.websocket_server = start_server
print("Server running ...")
loop.run_forever()
async def stop_listening(loop, server:Server):
await server.websocket_server.wait_close()
loop.stop()
loop.close()
if __name__ == "__main__":
server = Server()
server.start()
Signal handlers from asyncio like loop.add_signal_handler(signum, callback, *args) are not an option for me, because they only work on Unix.
The error that I currently get is that the stop_listening method was never awaited, which kind of makes sense to me. So I am not that much interested in fixing my code example, but more in general how is it possible to achieve my goal, or how is it usually solved?
Thank you very much in advance
Nevermind, this question is related to this question: Why does the asyncio's event loop suppress the KeyboardInterrupt on Windows? which is actually bug of asyncio on Windows.
Trying to use pyserial with asyncio on a windows machine.
Inspired by https://stackoverflow.com/a/27927704/1629704 my code is constantly watching a serial port for incoming data.
# This coroutine is added as a task to the event loop.
#asyncio.coroutine
def get_from_serial_port(self):
while 1:
serial_data = yield from self.get_byte_async()
<doing other stuff with serial_data>
# The method which gets executed in the executor
def get_byte(self):
data = self.s.read(1)
time.sleep(0.5)
tst = self.s.read(self.s.inWaiting())
data += tst
return data
# Runs blocking function in executor, yielding the result
#asyncio.coroutine
def get_byte_async(self):
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
res = yield from self.loop.run_in_executor(executor, self.get_byte)
return res
After serial data has been returned. the coroutine get_byte_async is called inside the while loop creating a new executor. I always learned creating a new thread is expensive so I feel I should take another approach, but I am not sure how to do that.
I've been reading this article https://hackernoon.com/threaded-asynchronous-magic-and-how-to-wield-it-bba9ed602c32#.964j4a5s7
And I guess I need to do the reading of the serial port in another thread. But how to get the serial data back to the "main" loop ?
You can either use the default executor and lock the access to get_byte with an asyncio lock:
async def get_byte_async(self):
async with self.lock:
return await self.loop.run_in_executor(None, self.get_byte)
Or simply create your own executor once:
async def get_byte_async(self):
if self.executor is None:
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
return await self.loop.run_in_executor(self.executor, self.get_byte)
I'm currently playing with aiohttp to see how it will perform as a server application for mobile app with websocket connection.
Here is simple "Hello world" example (as gist here):
import asyncio
import aiohttp
from aiohttp import web
class WebsocketEchoHandler:
#asyncio.coroutine
def __call__(self, request):
ws = web.WebSocketResponse()
ws.start(request)
print('Connection opened')
try:
while True:
msg = yield from ws.receive()
ws.send_str(msg.data + '/answer')
except:
pass
finally:
print('Connection closed')
return ws
if __name__ == "__main__":
app = aiohttp.web.Application()
app.router.add_route('GET', '/ws', WebsocketEchoHandler())
loop = asyncio.get_event_loop()
handler = app.make_handler()
f = loop.create_server(
handler,
'127.0.0.1',
8080,
)
srv = loop.run_until_complete(f)
print("Server started at {sock[0]}:{sock[1]}".format(
sock=srv.sockets[0].getsockname()
))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
loop.run_until_complete(handler.finish_connections(1.0))
srv.close()
loop.run_until_complete(srv.wait_closed())
loop.run_until_complete(app.finish())
loop.close()
The problem
Now I would like to use structure described below (node server = python aiohttp). To be more specific, use Redis Pub/Sub mechanism with asyncio-redis to read and write both to websocket connection and Redis in my WebsocketEchoHandler.
WebsocketEchoHandler is a dead simple loop so I'm not sure how should this be done. Using Tornado and brükva I would just use callbacks.
Extra (offtopic perhaps) question
Since I'm using Redis already, which of two approaches should I take:
Like in "classic" web app, have a controller/view for everything, use Redis just for messaging etc.
Web app should be just a layer between client and Redis used also as task queue (simplest Python RQ). Every request should be delegated to workers.
EDIT
Image from http://goldfirestudios.com/blog/136/Horizontally-Scaling-Node.js-and-WebSockets-with-Redis
EDIT 2
It seems that I need to clarify.
Websocket-only handler is shown above
Redis Pub/Sub handler might look like that:
class WebsocketEchoHandler:
#asyncio.coroutine
def __call__(self, request):
ws = web.WebSocketResponse()
ws.start(request)
connection = yield from asyncio_redis.Connection.create(host='127.0.0.1', port=6379)
subscriber = yield from connection.start_subscribe()
yield from subscriber.subscribe(['ch1', 'ch2'])
print('Connection opened')
try:
while True:
msg = yield from subscriber.next_published()
ws.send_str(msg.value + '/answer')
except:
pass
finally:
print('Connection closed')
return ws
This handler just subscribes to Redis channel ch1 and ch2 and sends every received message from those channels to websocket.
I want to have this handler:
class WebsocketEchoHandler:
#asyncio.coroutine
def __call__(self, request):
ws = web.WebSocketResponse()
ws.start(request)
connection = yield from asyncio_redis.Connection.create(host='127.0.0.1', port=6379)
subscriber = yield from connection.start_subscribe()
yield from subscriber.subscribe(['ch1', 'ch2'])
print('Connection opened')
try:
while True:
# If message recived from redis OR from websocket
msg_ws = yield from ws.receive()
msg_redis = yield from subscriber.next_published()
if msg_ws:
# push to redis / do something else
self.on_msg_from_ws(msg_ws)
if msg_redis:
self.on_msg_from_redis(msg_redis)
except:
pass
finally:
print('Connection closed')
return ws
But following code is always called sequentially so reading from websocket blocks reading from Redis:
msg_ws = yield from ws.receive()
msg_redis = yield from subscriber.next_published()
I want reading to be done on event where event is message received from one of two sources.
You should use two while loops - one that handles messages from the websocket, and one that handles messages from redis. Your main handler can just kick off two coroutines, one handling each loop, and then wait on both of them:
class WebsocketEchoHandler:
#asyncio.coroutine
def __call__(self, request):
ws = web.WebSocketResponse()
ws.start(request)
connection = yield from asyncio_redis.Connection.create(host='127.0.0.1', port=6379)
subscriber = yield from connection.start_subscribe()
yield from subscriber.subscribe(['ch1', 'ch2'])
print('Connection opened')
try:
# Kick off both coroutines in parallel, and then block
# until both are completed.
yield from asyncio.gather(self.handle_ws(ws), self.handle_redis(subscriber))
except Exception as e: # Don't do except: pass
import traceback
traceback.print_exc()
finally:
print('Connection closed')
return ws
#asyncio.coroutine
def handle_ws(self, ws):
while True:
msg_ws = yield from ws.receive()
if msg_ws:
self.on_msg_from_ws(msg_ws)
#asyncio.coroutine
def handle_redis(self, subscriber):
while True:
msg_redis = yield from subscriber.next_published()
if msg_redis:
self.on_msg_from_redis(msg_redis)
This way you can read from any of the two potential sources without having to care about the other.
recent we can use async await in python 3.5 and above..
async def task1(ws):
async for msg in ws:
if msg.type == WSMsgType.TEXT:
data = msg.data
print(data)
if data:
await ws.send_str('pong')
## ch is a redis channel
async def task2(ch):
async for msg in ch1.iter(encoding="utf-8", decoder=json.loads):
print("receving", msg)
user_token = msg['token']
if user_token in r_cons.keys():
_ws = r_cons[user_token]
await _ws.send_json(msg)
coroutines = list()
coroutines.append(task1(ws))
coroutines.append(task2(ch1))
await asyncio.gather(*coroutines)
this is what I do.when the websockets need to wait message from mutli source.
main point here is using asyncio.gather to run two corotine together like
#dano mentioned.