global variables won't update from within asyncio loop inside thread - python

Given the following code:
from collections import deque
import websockets
import threading
import asyncio
# asyncio bugfix where CTRL+C won't exit when run_forever()
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
clients = set()
queue = deque()
async def handle_client(client, path):
try:
clients.add(client)
async for buffer in client:
queue.append(buffer)
await client.send(buffer)
finally:
clients.remove(client)
def start_loop(port):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(websockets.serve(handle_client,
host='localhost',
port=port))
loop.run_forever()
def start(port=1234):
threading.Thread(target=start, args=(port,)).start()
By calling start() and interacting with the server, I expected that I'll be able to pull data from the queue and clients global variables, but they don't seem to be modified from within handle_client(). Why is that?

Related

Python aiohttp + asyncio: How to execute code after loop.run_forever()

Code:
#!/usr/bin/env python
import asyncio
import os
import socket
import time
import traceback
from aiohttp import web
from concurrent.futures import ProcessPoolExecutor
from multiprocessing import cpu_count
CPU_COUNT = cpu_count()
print("CPU Count:", CPU_COUNT)
def mk_socket(host="127.0.0.1", port=9090, reuseport=False):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if reuseport:
SO_REUSEPORT = 15
sock.setsockopt(socket.SOL_SOCKET, SO_REUSEPORT, 1)
sock.bind((host, port))
return sock
async def index(request):
icecast_index_path = os.path.abspath("../test/icecast/icecast_index.html")
print(icecast_index_path)
try:
content = open(icecast_index_path, encoding="utf8").read()
return web.Response(content_type="text/html", text=content)
except Exception as e:
return web.Response(content_type="text/html", text="<!doctype html><body><h1>Error: "+str(e)+"</h1></body></html>")
async def start_server():
try:
host = "127.0.0.1"
port=8080
reuseport = True
app = web.Application()
app.add_routes([web.get('/', index)])
runner = web.AppRunner(app)
await runner.setup()
sock = mk_socket(host, port, reuseport=reuseport)
srv = web.SockSite(runner, sock)
await srv.start()
print('Server started at http://127.0.0.1:8080')
return srv, app, runner
except Exception:
traceback.print_exc()
raise
async def finalize(srv, app, runner):
sock = srv.sockets[0]
app.loop.remove_reader(sock.fileno())
sock.close()
#await handler.finish_connections(1.0)
await runner.cleanup()
srv.close()
await srv.wait_closed()
await app.finish()
def init():
loop = asyncio.get_event_loop()
srv, app, runner = loop.run_until_complete(start_server())
try:
loop.run_forever()
except KeyboardInterrupt:
loop.run_until_complete((finalize(srv, app, runner)))
if __name__ == '__main__':
with ProcessPoolExecutor() as executor:
for i in range(0, int(CPU_COUNT/2)):
executor.submit(init)
#after the aiohttp start i want to execute more code
#for example:
print("Hello world.")
#in actual programm the ProcessPoolExecutor is called
#inside a pyqt5 app
#so i don't want the pyqt5 app to freeze.
The problem is that with that code i can't execute code after the ProcessPoolExecutor calls.
How can i fix that?
I tried to remove this part:
try:
loop.run_forever()
except KeyboardInterrupt:
loop.run_until_complete((finalize(srv, app, runner)))
in init() method but after that the aiohttp server is closed instantly.
Edit: If i use a thread instead of ProcessPoolExecutor then there is an aiohttp errors that says:
RuntimeError: set_wakeup_fd only works in main thread
RuntimeError: There is no current event loop in thread
Aiohttp + asyncio related errors.
Maybe i may use a # signature up to def declarations (i suppose).
Using with with an Executor will cause your process to block until the jobs in the executor are completed; since they’re running infinite event loops, they will never complete and your Executor will never unblock.
Instead, just use the executor to kick off the jobs, and run your stuff afterwards. When you’re finally done, call .shutdown() to wait for processes to exit:
executor = ProcessPoolExecutor()
for i in range(0, int(CPU_COUNT/2)):
executor.submit(init)
# other code…
executor.shutdown()

Starting a new process from an asyncio loop

I want to start a new Process (Pricefeed) from my Executor class and then have the Executor class keep running in its own event loop (the shoot method). In my current attempt, the asyncio loop gets blocked on the line p.join(). However, without that line, my code just exits. How do I do this properly?
Note: fh.run() blocks as well.
import asyncio
from multiprocessing import Process, Queue
from cryptofeed import FeedHandler
from cryptofeed.defines import L2_BOOK
from cryptofeed.exchanges.ftx import FTX
class Pricefeed(Process):
def __init__(self, queue: Queue):
super().__init__()
self.coin_symbol = 'SOL-USD'
self.fut_symbol = 'SOL-USD-PERP'
self.queue = queue
async def _book_update(self, feed, symbol, book, timestamp, receipt_timestamp):
self.queue.put(book)
def run(self):
fh = FeedHandler()
fh.add_feed(FTX(symbols=[self.fut_symbol, self.coin_symbol], channels=[L2_BOOK],
callbacks={L2_BOOK: self._book_update}))
fh.run()
class Executor:
def __init__(self):
self.q = Queue()
async def shoot(self):
print('in shoot')
for i in range(5):
msg = self.q.get()
print(msg)
await asyncio.sleep(1) # do some stuff
async def run(self):
asyncio.create_task(self.shoot())
p = Pricefeed(self.q)
p.start()
p.join()
async def main():
g = Executor()
await g.run()
if __name__ == '__main__':
asyncio.run(main())
Since you're using a queue to communicate this is a somewhat tricky problem. To answer your first question as to why removing join makes the program work, join blocks until the process finishes. In asyncio you can't do anything blocking in a function marked async or it will freeze the event loop. To do this properly you'll need to run your process with the asyncio event loop's run_in_executor method which will run things in a process pool and return an awaitable that is compatible with the asyncio event loop.
Secondly, you'll need to use a multiprocessing Manager which creates shared state that can be used by multiple processes to properly share your queue. Managers directly support creation of a shared queue. Using these two bits of knowledge you can adapt your code to something like the following which works:
import asyncio
import functools
import time
from multiprocessing import Manager
from concurrent.futures import ProcessPoolExecutor
def run_pricefeed(queue):
i = 0
while True: #simulate putting an item on the queue every 250ms
queue.put(f'test-{i}')
i += 1
time.sleep(.25)
class Executor:
async def shoot(self, queue):
print('in shoot')
for i in range(5):
while not queue.empty():
msg = queue.get(block=False)
print(msg)
await asyncio.sleep(1) # do some stuff
async def run(self):
with ProcessPoolExecutor() as pool:
with Manager() as manager:
queue = manager.Queue()
asyncio.create_task(self.shoot(queue))
await asyncio.get_running_loop().run_in_executor(pool, functools.partial(run_pricefeed, queue))
async def main():
g = Executor()
await g.run()
if __name__ == '__main__':
asyncio.run(main())
This code has a drawback in that you need to empty the queue in a non-blocking fashing from your asyncio process and wait for a while for new items to come in before emptying it again, effectively implementing a polling mechanism. If you don't wait after emptying, you'll wind up with blocking code and you will freeze the event loop again. This isn't as good as just waiting for the queue to have an item in it by blocking, but may suit your needs. If possible, I would avoid asyncio here and use multiprocessing entirely, for example, by implementing queue processing as a separate process.

Send message using websockets with another thread without RuntimeWarning: coroutine 'WebSocketCommonProtocol.send' was never awaited

Am trying to send motion sensor data in json string using websocket client in another thread to avoid execution blocking for the rest of the code down by an infinite loop in MotionSensor class. but apparently the ws.send() needs await keyword. And if i add it throught i get an error
RuntimeWarning: coroutine 'MotionSensors.run' was never awaited
self.run()
RuntimeWarning: Enable tracemalloc to get the object allocation traceback
and it doesn't send anything to the server
# motionSensor.py
import threading
import time
from client.ClientRequest import Request
class MotionSensors(threading.Thread):
def __init__(self, ws):
threading.Thread.__init__(self)
self.ws = ws
self.open = True
async def run(self):
await self.SendData()
async def SendData(self):
while self.open:
print("Sending motion state....")
state = 1 # Motion state demo value
request = Request()
request.push("mcu/sensors/motion")
request.addBody({
"state_type": "single",
"devices": {"state": state, "device_no": "DVC-876435"}
})
await self.ws.send(request.getAsJsonString())
print("sleeping now for 2 seconds....")
time.sleep(2)
here is my main code
client.py
# client.py
import settings
import asyncio
import websockets
from client.ClientHandler import Devices
from client.Rounte import Route
from ClientRequest import Request
from client.dbHandler import mcuConfig
from client.devices.motionSensor import MotionSensors
def ResponseMSG(request):
print(request)
route = Route()
route.addRoute("/response", ResponseMSG)
def onMessage(request):
route.fireRequest(request)
async def WsClient():
uri = settings.WS_URL
async with websockets.connect(uri) as websocket:
#####################################
###INITIALIZE DEVICES
motion = MotionSensors(websocket)
motion.start()
while True:
print("waiting to recieve......")
message = await websocket.recv()
onMessage(message)
loop = asyncio.get_event_loop()
loop.run_until_complete(WsClient())
loop.run_forever()
Guys i need your help to send data in another thread with a while loop without blocking the execution of code down and without errors. Thank you so much in advance
Actually i changed the code of motionSensor.py
i created a new event loop and i set it to the new thread
and it worked for the case of even those using python 3.7 and below. it works. All thanks to #user4815162342
# motionSensor.py
import threading
import time
import asyncio
from client.ClientRequest import Request
class MotionSensor(threading.Thread):
def __init__(self, ws):
threading.Thread.__init__(self)
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
self.ws = ws
self.open = True
def run(self):
self.loop.run_until_complete(self.SendData())
# #asyncio.coroutine
async def SendData(self):
while True:
print("Sending motion state....")
state = 0
request = Request()
request.push("mcu/sensors/motion")
request.addBody({
"state_type": "single",
"devices": {"state": state, "device_no": "DVC-876435"}
})
await self.ws.send(request.getAsJsonString())
print("sleeping now for 5 seconds....")
time.sleep(5)

Simple Python multi threaded webserver with Asyncio and events called in main function

I have a simple Python program that I want to do three things:
Serve an HTTP document
Serve Websockets
Interact with the Websocket data
I am trying to use / grok asyncio. The issue is that I can't figure out how to access data acquired from a function in the main event loop.
For example in my code below I have two threads.
One thread is the HTTP server thread, one thread is the Websocket server thread and there is the main thread.
What I want to do is to print data captured in the websocket receiving thread in the main thread.
The only way I know how to do this is to use Queues to pass data between threads at which point I do not even know what the advantage of using asyncio is.
Similarly, it feels weird to pass the event loop to the serve_websocket function.
Can anyone please explain how to architect this to get data from the Websocket function into the main function?
It seems like / I want a way to do this without using the threading library at all, which seems possible. In an async project I would want to react to websocket events in different function than where they are called.
NOTE: I know there are other libraries for websockets and http serving with asyncio but this is an example to help me understarnd how to structure projects using this paradigm.
Thanks
#!/usr/bin/env python
import json
import socketserver
import threading
import http.server
import asyncio
import time
import websockets
SERVER_ADDRESS = '127.0.0.1'
HTTP_PORT = 8087
WEBSOCKET_PORT = 5678
def serve_http():
http_handler = http.server.SimpleHTTPRequestHandler
with socketserver.TCPServer(("", HTTP_PORT), http_handler) as httpd:
print(f'HTTP server listening on port {HTTP_PORT}')
httpd.serve_forever()
def serve_websocket(server, event_loop):
print(f'Websocket server listening on port {WEBSOCKET_PORT}')
event_loop.run_until_complete(server)
event_loop.run_forever()
async def ws_callback(websocket, path):
while True:
data = await websocket.recv()
# How do I access parsed_data in the main function below
parsed_data = json.loads(data)
await websocket.send(data)
def main():
event_loop = asyncio.get_event_loop()
ws_server = websockets.serve(ws_callback, SERVER_ADDRESS, WEBSOCKET_PORT)
threading.Thread(target=serve_http, daemon=True).start()
threading.Thread(target=serve_websocket, args=(ws_server, event_loop), daemon=True).start()
try:
while True:
# Keep alive - this is where I want to access the data from ws_callback
# i.e.
# print(data.values)
time.sleep(.01)
except KeyboardInterrupt:
print('Exit called')
if __name__ == '__main__':
main()
I believe that you should not mix asyncio and multithreading without special need. And in your case, use only asyncio tools.
In this case, you have no problem sharing data between coroutines, because they all run on the same thread using cooperative multitasking.
Your code can be rewtitten as:
#!/usr/bin/env python
import json
import socketserver
import threading
import http.server
import asyncio
import time
import websockets
SERVER_ADDRESS = '127.0.0.1'
HTTP_PORT = 8087
WEBSOCKET_PORT = 5678
parsed_data = {}
async def handle_http(reader, writer):
data = await reader.read(100)
message = data.decode()
writer.write(data)
await writer.drain()
writer.close()
async def ws_callback(websocket, path):
global parsed_data
while True:
data = await websocket.recv()
# How do I access parsed_data in the main function below
parsed_data = json.loads(data)
await websocket.send(data)
async def main():
ws_server = await websockets.serve(ws_callback, SERVER_ADDRESS, WEBSOCKET_PORT)
print(f'Websocket server listening on port {WEBSOCKET_PORT}')
http_server = await asyncio.start_server(
handle_http, SERVER_ADDRESS, HTTP_PORT)
print(f'HTTP server listening on port {HTTP_PORT}')
try:
while True:
if parsed_data:
print(parsed_data.values())
await asyncio.sleep(0.1)
except KeyboardInterrupt:
print('Exit called')
if __name__ == '__main__':
asyncio.run(main())

async queue hangs when used with background thread

It seems asyncio.Queue only can be pushed by the same thread reading it? For instance:
import asyncio
from threading import Thread
import time
q = asyncio.Queue()
def produce():
for i in range(100):
q.put_nowait(i)
time.sleep(0.1)
async def consume():
while True:
i = await q.get()
print('consumed', i)
Thread(target=produce).start()
asyncio.get_event_loop().run_until_complete(consume())
only prints
consumed 0
and then hangs. What am I missing?
You can't call asyncio methods from another thread directly.
Either use loop.call_soon_threadsafe:
loop.call_soon_threadsafe(q.put_nowait, i)
Or asyncio.run_coroutine_threadsafe:
future = asyncio.run_coroutine_threadsafe(q.put(i), loop)
where loop is the loop returned by asyncio.get_event_loop() in your main thread.

Categories

Resources