Python Quart Unable to shutdown background task - python

I am working on a Python app, but I am moving from Flask to Quart. The application needs a background task that runs constantly whilst the application is running.
When I try to stop the process using control-c, the thread doesn't close cleanly and sits in the while loop in the shutdown routine.
while not self._master_thread_class.shutdown_completed:
if not pro:
print('[DEBUG] Thread is not complete')
pro = True
I have followed this Stackoverflow question, but I can't figure out how to cleanly shutdown the background thread so I would love an explanation please as it seems like the Quart Documentation is lacking a bit.
MasterThread class:
import asyncio
class MasterThread:
def __init__(self, shutdown_requested_event):
self._shutdown_completed = False
self._shutdown_requested_event = shutdown_requested_event
self._shutdown_requested = False
def __del__(self):
print('Thread was deleted')
def run(self, loop) -> None:
asyncio.set_event_loop(loop)
loop.run_until_complete(self._async_entrypoint())
#asyncio.coroutine
def _async_entrypoint(self) -> None:
while not self. _shutdown_requested and \
not self._shutdown_requested_event.isSet():
#print('_main_loop()')
pass
if self._shutdown_requested_event.wait(0.1):
self. _shutdown_requested = True
print('[DEBUG] thread has completed....')
self._shutdown_completed = True
def _main_loop(self) -> None:
print('_main_loop()')
Main application module:
import asyncio
import threading
from quart import Quart
from workthr import MasterThread
app = Quart(__name__)
class Service:
def __init__(self):
self._shutdown_thread_event = threading.Event()
self._master_thread = MasterThread(self._shutdown_thread_event)
self._thread = None
def __del__(self):
self.stop()
def start(self):
loop = asyncio.get_event_loop()
self._thread = threading.Thread(target=self._master_thread.run, args=(loop,))
self._thread.start()
return True
def stop(self) -> None:
print('[DEBUG] Stop signal caught...')
self._shutdown_thread_event.set()
while not self._master_thread.shutdown_completed:
print('[DEBUG] Thread is not complete')
print('[DEBUG] Thread has completed')
self._shutdown()
def _shutdown(self):
print('Shutting down...')
service = Service()
service.start()

Quart has startup and shutdown methods that allow something to be started before the server starts serving and stopped when the server finishes serving. If your background task is mostly IO bound I'd recommend just using a coroutine function rather than a thread,
async def background_task():
while True:
...
#app.before_serving
async def startup():
app.background_task = asyncio.ensure_future(background_task())
#app.after_serving
async def shutdown():
app.background_task.cancel() # Or use a variable in the while loop
Or you can do the same with your Service,
#app.before_serving
async def startup():
service.start()
#app.after_serving
async def shutdown():
service.stop()

Related

How to call Python Tornado Websocket Server inside another Python

I would like to implement Python Tornado Websocket Server inside another Python (main) and trigger send messages when needed. The main creates two threads. One of them is for Python Server and the other is for my loop that will trigger message.
When I start server from initial, server works fine however because its endless following main files doesn't run. So I start server inside a thread but this time I receive "RuntimeError: There is no current event loop in thread 'Thread-1 (start_server)'"
Main.py
import tornadoserver
import time
from threading import Lock, Thread
class Signal:
def __init__(self):
#self.socket = tornadoserver.initiate_server()
print("start")
def start_server(self):
print("start Server")
self.socket = tornadoserver.initiate_server()
def brd(self):
print("start Broad")
i = 0
while True:
time.sleep(3)
self.socket.send(i)
i = i + 1
def job(self):
# --------Main--------
threads = []
for func in [self.start_server, self.brd, ]:
threads.append(Thread(target=func))
threads[-1].start()
for thread in threads:
thread.join()
Signal().job()
tornadoserver.py
import tornado.web
import tornado.httpserver
import tornado.ioloop
import tornado.websocket as ws
from tornado.options import define, options
import time
define('port', default=4041, help='port to listen on')
ws_clients = []
class web_socket_handler(ws.WebSocketHandler):
#classmethod
def route_urls(cls):
return [(r'/', cls, {}), ]
def simple_init(self):
self.last = time.time()
self.stop = False
def open(self):
self.simple_init()
if self not in ws_clients:
ws_clients.append(self)
print("New client connected")
self.write_message("You are connected")
def on_message(self, message):
if self in ws_clients:
print("received message {}".format(message))
self.write_message("You said {}".format(message))
self.last = time.time()
def on_close(self):
if self in ws_clients:
ws_clients.remove(self)
print("connection is closed")
self.loop.stop()
def check_origin(self, origin):
return True
def send_message(self, message):
self.write_message("You said {}".format(message))
def send(message):
for c in ws_clients:
c.write_message(message)
def initiate_server():
# create a tornado application and provide the urls
app = tornado.web.Application(web_socket_handler.route_urls())
# setup the server
server = tornado.httpserver.HTTPServer(app)
server.listen(options.port)
# start io/event loop
tornado.ioloop.IOLoop.instance().start()
Using Google I found tornado issue
Starting server in separate thread gives... RuntimeError: There is no current event loop in thread 'Thread-4' · Issue #2308 · tornadoweb/tornado
and it shows that it has to use
asyncio.set_event_loop(asyncio.new_event_loop())
to run event loop in new thread
Something like this
import asyncio
# ...
def initiate_server():
asyncio.set_event_loop(asyncio.new_event_loop()) # <---
# create a tornado application and provide the urls
app = tornado.web.Application(web_socket_handler.route_urls())
# setup the server
server = tornado.httpserver.HTTPServer(app)
server.listen(options.port)
# start io/event loop
tornado.ioloop.IOLoop.instance().start()

Interrupt python websocket server

I'm currently struggling with something "simple".
I'd like to have a python WebSocket Server, which is capable of closing down by outside events (e.g. a Ctrl+C from the command line).
Here is my code so far:
PORT = 8765
class Server(object):
def __init__(self):
self.online_players = dict()
self.online_players_lock = asyncio.Lock()
self.websocket_server = None
async def add_online_player(self, id, player):
async with self.online_players_lock:
self.online_players[id] = player
async def remove_online_player(self, id):
async with self.online_players_lock:
if id in self.online_players.keys():
del self.online_players[id]
def start(self):
end = False
loop = asyncio.new_event_loop()
thread = threading.Thread(target=listen, args=(loop, self))
thread.start()
while not end:
try:
time.sleep(500)
except KeyboardInterrupt:
end = True
loop.call_soon_threadsafe(stop_listening, loop, server)
async def on_connect(websocket, path, server):
print("New user...")
id = await websocket.recv()
player = WebSocketPlayer(id, websocket, server)
await server.add_online_player(id, player)
# from this point on WebSocketPlayer class handles communication
await player.listen()
def listen(loop, server:Server):
asyncio.set_event_loop(loop)
bound_handler = functools.partial(on_connect, server=server)
start_server_task = websockets.serve(bound_handler, "localhost", PORT, ping_timeout=None, loop=loop)
start_server = loop.run_until_complete(start_server_task)
server.websocket_server = start_server
print("Server running ...")
loop.run_forever()
async def stop_listening(loop, server:Server):
await server.websocket_server.wait_close()
loop.stop()
loop.close()
if __name__ == "__main__":
server = Server()
server.start()
Signal handlers from asyncio like loop.add_signal_handler(signum, callback, *args) are not an option for me, because they only work on Unix.
The error that I currently get is that the stop_listening method was never awaited, which kind of makes sense to me. So I am not that much interested in fixing my code example, but more in general how is it possible to achieve my goal, or how is it usually solved?
Thank you very much in advance
Nevermind, this question is related to this question: Why does the asyncio's event loop suppress the KeyboardInterrupt on Windows? which is actually bug of asyncio on Windows.

CherryPy waits for extra thread to end that is stopped later

I am building a application that uses CherryPy to serve a REST API, and another thread that does background work (in fact, it reads data from a serial port).
import cherrypy
import threading
class main:
#cherrypy.expose
def index(self):
return "Hello World."
def run():
while running == True:
# read data from serial port and store in a variable
running = True
t = threading.Thread(target = run)
t.start()
if __name__ == '__main__':
cherrypy.quickstart(main())
running = False
Both api.pc_main() and run work fine. The trouble is, I use a running boolean to stop my thread, but that piece of code is never reached, because CherryPy waits for that thread to finish when I push Ctrl-C. I actually have to use kill -9 to stop the process.
I fixed it by making my thread a CherryPy plugin. I used the code found here: Why is CTRL-C not captured and signal_handler called?
from cherrypy.process.plugins import SimplePlugin
class myplugin(SimplePlugin):
running = False
thread = None
def __init__(self, bus):
SimplePlugin.__init__(self, bus)
def start(self):
print "Starting thread."
self.running = True
if not self.thread:
self.thread = threading.Thread(target = self.run)
self.thread.start()
def stop(self):
print "Stopping thread."
self.running = False
if self.thread:
self.thread.join()
self.thread = None
def run(self):
while self.running == True:
print "Thread runs."
time.sleep(1)
then in the main script:
if __name__ == '__main__':
mythread(cherrypy.engine).subscribe()
cherrypy.quickstart(main())

interrupt thread with start_consuming method of pika

I have a thread which listens for new messages from rabbitmq using pika. After configuring the connection using BlockingConnection, I start consuming messages throught start_consuming. How can I interrupt the start consuming method call to, for example, stop the thread in a gracefully manner?
You can use consume generator instead of start_consuming.
import threading
import pika
class WorkerThread(threading.Thread):
def __init__(self):
super(WorkerThread, self).__init__()
self._is_interrupted = False
def stop(self):
self._is_interrupted = True
def run(self):
connection = pika.BlockingConnection(pika.ConnectionParameters())
channel = connection.channel()
channel.queue_declare("queue")
for message in channel.consume("queue", inactivity_timeout=1):
if self._is_interrupted:
break
if not message:
continue
method, properties, body = message
print(body)
def main():
thread = WorkerThread()
thread.start()
# some main thread activity ...
thread.stop()
thread.join()
if __name__ == "__main__":
main()

Python queue linking object running asyncio coroutines with main thread input

I have a script running where the main thread takes input from stdin and then passes it to a child thread using a queue. In the child thread I'm using asyncio coroutines to spin up a listener on a socket and wait for connections. Once a connection is made I can now send data through the listener from the main thread.
It all seems to work well enough, but since asyncio.BaseEventLoop is not thread safe am I going to run into problems?
This is my attempt to solve the problem of using a blocking library like python's cmd module with asyncio.
My code is below.
import sys
import asyncio
from time import sleep
from threading import Thread
from queue import Queue
stdin_q = Queue()
clients = {} # task -> (reader, writer)
def client_connected_handler(client_reader, client_writer):
# Start a new asyncio.Task to handle this specific client connection
task = asyncio.Task(handle_client(client_reader, client_writer))
clients[task] = (client_reader, client_writer)
def client_done(task):
# When the tasks that handles the specific client connection is done
del clients[task]
# Add the client_done callback to be run when the future becomes done
task.add_done_callback(client_done)
#asyncio.coroutine
def handle_client(client_reader, client_writer):
# Handle the requests for a specific client with a line oriented protocol
while True:
cmd = yield from get_input()
client_writer.write(cmd.encode())
data = yield from client_reader.read(1024)
print(data.decode(),end="",flush=True)
#asyncio.coroutine
def get_input():
while True:
try:
return stdin_q.get()
except:
pass
class Control:
def start(self):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
self.loop = asyncio.get_event_loop()
server = self.loop.run_until_complete(asyncio.start_server(client_connected_handler, '0.0.0.0', 2222))
self.loop.run_forever()
self.stop()
def stop(self):
self.loop.stop()
self.loop.close()
def fire_control():
con = Control()
con.start()
if __name__ == "__main__":
stdin_q.put("\n")
t = Thread(target=fire_control)
t.start()
sleep(2)
_cmd = ""
while _cmd.lower() != "exit":
_cmd = input("")
if _cmd == "":
_cmd = "\r\n"
stdin_q.put(_cmd)
This isn't going to work quite right, because the call to stdin_q.get() is going to block your event loop. This means that if your server has multiple clients, all of them will be completely blocked by whichever one happens to get to stdin_q.get() first, until you send data into the queue. The simplest way to get around this is use BaseEvent.loop.run_in_executor to run the stdin_q.get in a background ThreadPoolExecutor, which allows you to wait for it without blocking the event loop:
#asyncio.coroutine
def get_input():
loop = asyncio.get_event_loop()
return (yield from loop.run_in_executor(None, stdin_q.get)) # None == use default executor.
Edit (1/27/16):
There is a library called janus, which provides an asyncio-friendly, thread-safe queue implementation.
Using that library, your code would look like this (I left out unchanged parts):
...
import janus
loop = asyncio.new_event_loop()
stdin_q = janus.Queue(loop=loop)
...
#asyncio.coroutine
def get_input():
loop = asyncio.get_event_loop()
return (yield from stdin_q.async_q.get())
class Control:
def start(self):
asyncio.set_event_loop(loop)
self.loop = asyncio.get_event_loop()
server = self.loop.run_until_complete(asyncio.start_server(client_connected_handler, '0.0.0.0', 2222))
self.loop.run_forever()
self.stop()
def stop(self):
self.loop.stop()
self.loop.close()
...
if __name__ == "__main__":
stdin_q.sync_q.put("\n")
t = Thread(target=runner)
t.start()
sleep(2)
_cmd = ""
while _cmd.lower() != "exit":
_cmd = input("")
if _cmd == "":
_cmd = "\r\n"
stdin_q.sync_q.put(_cmd)

Categories

Resources