Python consume RabbitMQ and run SocketIO server - python

Setup
I have a python application, which should consume messages from a RabbitMQ and act as a SocketIO server to a Vue2 APP. When it receives messages from RabbitMQ it should send out a message over SocketIO to the Vue2 APP. Therefore I wrote 2 classes RabbitMQHandler and SocketIOHandler. I am starting the RabbitMQHandler in a separate thread so that both the RabbitMQ consume and the wsgi server can run in parallel.
Code
import random
import threading
import socketio
import eventlet
import sys
import os
import uuid
import pika
from dotenv import load_dotenv
import logging
class RabbitMQHandler():
def __init__(self, RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP):
self.queue_name = 'myqueue'
self.exchange_name = 'myqueue'
credentials = pika.PlainCredentials(RABBITMQ_USER, RABBITMQ_PW)
self.connection = pika.BlockingConnection(pika.ConnectionParameters(RABBITMQ_IP, 5672, '/', credentials))
self.channel = self.connection.channel()
self.channel.queue_declare(queue=self.queue_name)
self.channel.exchange_declare(exchange=self.exchange_name, exchange_type='fanout')
self.channel.queue_bind(exchange=self.exchange_name, queue=self.queue_name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.close()
def run(self, callback):
logging.info('start consuming messages...')
self.channel.basic_consume(queue=self.queue_name,auto_ack=True, on_message_callback=callback)
self.channel.start_consuming()
class SocketIOHandler():
def __init__(self):
self.id = str(uuid.uuid4())
# create a Socket.IO server
self.sio = socketio.Server(async_mode='eventlet', cors_allowed_origins='*')
# wrap with a WSGI application
self.app = socketio.WSGIApp(self.sio)
self.sio.on('connect_to_backend', self.handle_connect)
self.sio.on('random_number', self.handle_random_number)
def handle_connect(self, sid, msg):
logging.info('new socket io message')
self.emit('connect_success', {
'success': True,
})
def handle_random_number(self, sid, msg):
logging.info('handle_random_number')
self.emit('response_random_number', { 'number': random.randint(0,10)})
def emit(self, event, msg):
logging.info('socket server: {}'.format(self.id))
logging.info('sending event: "{}"'.format(event))
self.sio.emit(event, msg)
logging.info('sent event: "{}"'.format(event))
def run(self):
logging.info('start web socket on port 8765...')
eventlet.wsgi.server(eventlet.listen(('', 8765)), self.app)
def start_rabbitmq_handler(socketio_handler, RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP):
def callback(ch, method, properties, body):
logging.info('rabbitmq handler')
socketio_handler.emit('response_random_number', { 'number': random.randint(0,10)})
with RabbitMQHandler(RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP) as rabbitmq_handler:
rabbitmq_handler.run(callback=callback)
threads = []
def main():
global threads
load_dotenv()
RABBITMQ_USER = os.getenv('RABBITMQ_USER')
RABBITMQ_PW = os.getenv('RABBITMQ_PW')
RABBITMQ_IP = os.getenv('RABBITMQ_IP')
socketio_handler = SocketIOHandler()
rabbitmq_thread = threading.Thread(target=start_rabbitmq_handler, args=(socketio_handler, RABBITMQ_USER, RABBITMQ_PW, RABBITMQ_IP))
threads.append(rabbitmq_thread)
rabbitmq_thread.start()
socketio_handler.run()
if __name__ == '__main__':
try:
logging.basicConfig(level=logging.INFO)
logging.getLogger("pika").propagate = False
main()
except KeyboardInterrupt:
try:
for t in threads:
t.exit()
sys.exit(0)
except SystemExit:
for t in threads:
t.exit()
os._exit(0)
Problem
The Problem is, that when the RabbitMQHandler receives a message the event response_random_number does not get through to the Vue2 APP. Even though it is emited in the callback function. When I send the random_number event from the Vue2 APP to the python application I do get the response_random_number event back from the python application in the Vue2 APP.
So all connections work on their own, but not together. My guess would be, that there is some sort of threading communication error. I added the id to the SocketIOHandler class to make sure it is the same instanced object and the prints are the same.
The logs 'socket server: ...', sending event: ... and sent event: ... tell me, that the function is being called correctly.

Related

How to call Python Tornado Websocket Server inside another Python

I would like to implement Python Tornado Websocket Server inside another Python (main) and trigger send messages when needed. The main creates two threads. One of them is for Python Server and the other is for my loop that will trigger message.
When I start server from initial, server works fine however because its endless following main files doesn't run. So I start server inside a thread but this time I receive "RuntimeError: There is no current event loop in thread 'Thread-1 (start_server)'"
Main.py
import tornadoserver
import time
from threading import Lock, Thread
class Signal:
def __init__(self):
#self.socket = tornadoserver.initiate_server()
print("start")
def start_server(self):
print("start Server")
self.socket = tornadoserver.initiate_server()
def brd(self):
print("start Broad")
i = 0
while True:
time.sleep(3)
self.socket.send(i)
i = i + 1
def job(self):
# --------Main--------
threads = []
for func in [self.start_server, self.brd, ]:
threads.append(Thread(target=func))
threads[-1].start()
for thread in threads:
thread.join()
Signal().job()
tornadoserver.py
import tornado.web
import tornado.httpserver
import tornado.ioloop
import tornado.websocket as ws
from tornado.options import define, options
import time
define('port', default=4041, help='port to listen on')
ws_clients = []
class web_socket_handler(ws.WebSocketHandler):
#classmethod
def route_urls(cls):
return [(r'/', cls, {}), ]
def simple_init(self):
self.last = time.time()
self.stop = False
def open(self):
self.simple_init()
if self not in ws_clients:
ws_clients.append(self)
print("New client connected")
self.write_message("You are connected")
def on_message(self, message):
if self in ws_clients:
print("received message {}".format(message))
self.write_message("You said {}".format(message))
self.last = time.time()
def on_close(self):
if self in ws_clients:
ws_clients.remove(self)
print("connection is closed")
self.loop.stop()
def check_origin(self, origin):
return True
def send_message(self, message):
self.write_message("You said {}".format(message))
def send(message):
for c in ws_clients:
c.write_message(message)
def initiate_server():
# create a tornado application and provide the urls
app = tornado.web.Application(web_socket_handler.route_urls())
# setup the server
server = tornado.httpserver.HTTPServer(app)
server.listen(options.port)
# start io/event loop
tornado.ioloop.IOLoop.instance().start()
Using Google I found tornado issue
Starting server in separate thread gives... RuntimeError: There is no current event loop in thread 'Thread-4' · Issue #2308 · tornadoweb/tornado
and it shows that it has to use
asyncio.set_event_loop(asyncio.new_event_loop())
to run event loop in new thread
Something like this
import asyncio
# ...
def initiate_server():
asyncio.set_event_loop(asyncio.new_event_loop()) # <---
# create a tornado application and provide the urls
app = tornado.web.Application(web_socket_handler.route_urls())
# setup the server
server = tornado.httpserver.HTTPServer(app)
server.listen(options.port)
# start io/event loop
tornado.ioloop.IOLoop.instance().start()

Consume RabbitMq using Pika and push using Socket.io

I am building a service that receives messages from rabbitmq using pika. and push messages to clients using socket.io.
The socket.io server and pika server are both blocking the main thread.
This will be the same also for celery with flask or Django.
What is the proper approach to solving this and run them both under the same context?
You can use the Pub/Sub model, Start the consume process in another thread register user that want to receive from the queue and send data to subscribed users.
import json
import pika
import gevent
from flask import Flask
from flask_sockets import Sockets
connection_url = 'localhost'
channel_queue = 'test'
class PubSubListener(threading.Thread):
def __init__(self, queue_name):
threading.Thread.__init__(self)
self.clients = []
self.queue_name = queue_name
connection = pika.BlockingConnection(pika.ConnectionParameters(connection_url))
self.channel = connection.channel()
self.channel.queue_declare(queue=self.queue_name)
threading.Thread(target=self.channel.basic_consume(queue=self.queue_name,
auto_ack=True,
on_message_callback=self._callback))
def run(self):
self.channel.start_consuming()
def publish(self, body):
self.channel.basic_publish(exchange='',
routing_key=self.queue_name,
body=body)
def subscribe(self, client):
self.clients.append(client)
def _callback(self, channel, method, properties, body):
time.sleep(0.001)
message = json.loads(body)
print(message)
self.send(message)
def send(self, data):
for client in self.clients:
try:
client.send(data)
except Exception:
self.clients.remove(client)
pslistener = PubSubListener(channel_queue)
app = Flask(__name__)
sockets = Sockets(app)
#sockets.route('/echo')
def echo_socket(ws):
pslistener.subscribe(ws)
while not ws.closed:
gevent.sleep(0.1)
#app.route('/')
def hello():
return 'Hello World!'
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
pslistener.start()
print("Started")
server = pywsgi.WSGIServer(('', 5000), app, handler_class=WebSocketHandler)
server.serve_forever()

Integrate Autobahn|Python with aiohttp

I'm trying to integrate an aiohttp web server into a Crossbar+Autobahn system architecture.
More in detail, when the aiohttp server receive a certain API call, it has to publish a message to a Crossbar router.
I've seen this example on the official repos but i've no clue on how to integrate it on my application.
Ideally, i would like to be able to do this
# class SampleTaskController(object):
async def handle_get_request(self, request: web.Request) -> web.Response:
self.publisher.publish('com.myapp.topic1', 'Hello World!')
return web.HTTPOk()
where self il an instance of SampleTaskController(object) which defines all the routes handler of the web server.
def main(argv):
cfg_path = "./task_cfg.json"
if len(argv) > 1:
cfg_path = argv[0]
logging.basicConfig(level=logging.DEBUG,
format=LOG_FORMAT)
loop = zmq.asyncio.ZMQEventLoop()
asyncio.set_event_loop(loop)
app = web.Application(loop=loop)
with open(cfg_path, 'r') as f:
task_cfg = json.load(f)
task_cfg['__cfg_path'] = cfg_path
controller = SampleTaskController(task_cfg)
controller.restore()
app['controller'] = controller
controller.setup_routes(app)
app.on_startup.append(controller.on_startup)
app.on_cleanup.append(controller.on_cleanup)
web.run_app(app,
host=task_cfg['webserver_address'],
port=task_cfg['webserver_port'])
Notice that i'm using an zmq.asyncio.ZMQEventLoop because the server is also listening on a zmq socket, which is configured inside the controller.on_startup method.
Instead of using autobahn, i've also tried to publish the message to Crossbar using wampy and it works, but the autobahn subscribers couldn't correctly parse the message.
# autobahn subscriber
class ClientSession(ApplicationSession):
async def onJoin(self, details):
self.log.info("Client session joined {details}", details=details)
self.log.info("Connected: {details}", details=details)
self._ident = details.authid
self._type = u'Python'
self.log.info("Component ID is {ident}", ident=self._ident)
self.log.info("Component type is {type}", type=self._type)
# SUBSCRIBE
def gen_on_something(thing):
def on_something(counter, id, type):
print('----------------------------')
self.log.info("'on_{something}' event, counter value: {message}",something=thing, message=counter)
self.log.info("from component {id} ({type})", id=id, type=type)
return on_something
await self.subscribe(gen_on_something('landscape'), 'landscape')
await self.subscribe(gen_on_something('nature'), 'nature')
-
# wampy publisher
async def publish():
router = Crossbar(config_path='./crossbar.json')
logging.getLogger().debug(router.realm)
logging.getLogger().debug(router.url)
logging.getLogger().debug(router.port)
client = Client(router=router)
client.start()
result = client.publish(topic="nature", message=0)
logging.getLogger().debug(result)
With this configuration the subscriber receive the message published, but it get an exception while parsing it.
TypeError: on_something() got an unexpected keyword argument 'message'
Recently I tried to use aiohttp and autobahn simultaneously. I reworked the example from crossbar documentation (originally using twisted) and got the following code:
import asyncio
import logging
from aiohttp import web
from aiohttp.web_exceptions import HTTPOk, HTTPInternalServerError
from autobahn.asyncio.component import Component
# Setup logging
logger = logging.getLogger(__name__)
class WebApplication(object):
"""
A simple Web application that publishes an event every time the
url "/" is visited.
"""
count = 0
def __init__(self, app, wamp_comp):
self._app = app
self._wamp = wamp_comp
self._session = None # "None" while we're disconnected from WAMP router
# associate ourselves with WAMP session lifecycle
self._wamp.on('join', self._initialize)
self._wamp.on('leave', self._uninitialize)
self._app.router.add_get('/', self._render_slash)
def _initialize(self, session, details):
logger.info("Connected to WAMP router (session: %s, details: %s)", session, details)
self._session = session
def _uninitialize(self, session, reason):
logger.warning("Lost WAMP connection (session: %s, reason: %s)", session, reason)
self._session = None
async def _render_slash(self, request):
if self._session is None:
return HTTPInternalServerError(reason="No WAMP session")
self.count += 1
self._session.publish(u"com.myapp.request_served", self.count, count=self.count)
return HTTPOk(text="Published to 'com.myapp.request_served'")
def main():
REALM = "crossbardemo"
BROKER_URI = "ws://wamp_broker:9091/ws"
BIND_ADDR = "0.0.0.0"
BIND_PORT = 8080
logging.basicConfig(
level='DEBUG',
format='[%(asctime)s %(levelname)s %(name)s:%(lineno)d]: %(message)s')
logger.info("Starting aiohttp backend at %s:%s...", BIND_ADDR, BIND_PORT)
loop = asyncio.get_event_loop()
component = Component(
transports=BROKER_URI,
realm=REALM,
)
component.start(loop=loop)
# When not using run() we also must start logging ourselves.
import txaio
txaio.start_logging(level='info')
app = web.Application(
loop=loop)
_ = WebApplication(app, component)
web.run_app(app, host=BIND_ADDR, port=BIND_PORT)
if __name__ == '__main__':
main()

Twisted SSE server subscribed to Redis via pubsub

I'm trying to build a server in Twisted which would let clients connect using Server Sent Events. I would like this server also to listen to Redis and if a message comes then push it to the connected SSE clients.
I have the SSE server working. I know how to subscribe to Redis. I can't figure out how to have both pieces running without blocking each other.
I'm aware of https://github.com/leporo/tornado-redis and https://github.com/fiorix/txredisapi, which were recommended in related questions. No idea how this helps :/
How to solve this? Could you help with both: conceptual tips and code snippets?
My Twisted SSE server code:
# coding: utf-8
from twisted.web import server, resource
from twisted.internet import reactor
class Subscribe(resource.Resource):
isLeaf = True
sse_conns = set()
def render_GET(self, request):
request.setHeader('Content-Type', 'text/event-stream; charset=utf-8')
request.write("")
self.add_conn(request)
return server.NOT_DONE_YET
def add_conn(self, conn):
self.sse_conns.add(conn)
finished = conn.notifyFinish()
finished.addBoth(self.rm_conn)
def rm_conn(self, conn):
self.sse_conns.remove(conn)
def broadcast(self, event):
for conn in self.sse_conns:
event_line = "data: {}'\r\n'".format(event)
conn.write(event_line + '\r\n')
if __name__ == "__main__":
sub = Subscribe()
reactor.listenTCP(9000, server.Site(sub))
reactor.run()
My Redis subscribe code:
import redis
redis = redis.StrictRedis.from_url('redis://localhost:6379')
class RedisSub(object):
def __init__(self):
self.pubsub = redis.pubsub()
self.pubsub.subscribe('foobar-channel')
def listen(self):
for item in self.pubsub.listen():
print str(item)
This is what works for me.
I've ended up using txredis lib with a slight change to the RedisClient (added minimal subscribe capabilities).
# coding: utf-8
import os
import sys
import weakref
from txredis.client import RedisClient
from twisted.web import server, resource
from twisted.internet import reactor, protocol, defer
from twisted.python import log
from utils import cors, redis_conf_from_url
log.startLogging(sys.stdout)
PORT = int(os.environ.get('PORT', 9000))
REDIS_CONF = redis_conf_from_url(os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379'))
REDIS_SUB_CHANNEL = 'votes'
class RedisBroadcaster(RedisClient):
def subscribe(self, *channels):
self._send('SUBSCRIBE', *channels)
def handleCompleteMultiBulkData(self, reply):
if reply[0] == u"message":
message = reply[1:][1]
self.sse_connector.broadcast(message)
else:
super(RedisClient, self).handleCompleteMultiBulkData(reply)
#defer.inlineCallbacks
def redis_sub():
clientCreator = protocol.ClientCreator(reactor, RedisBroadcaster, password=REDIS_CONF.get('password'))
redis = yield clientCreator.connectTCP(REDIS_CONF['host'], REDIS_CONF['port'])
redis.subscribe(REDIS_SUB_CHANNEL)
class Subscribe(resource.Resource):
isLeaf = True
sse_conns = weakref.WeakSet()
#cors
def render_GET(self, request):
request.setHeader('Content-Type', 'text/event-stream; charset=utf-8')
request.write("")
self.sse_conns.add(request)
return server.NOT_DONE_YET
def broadcast(self, event):
for conn in self.sse_conns:
event_line = "data: {}\r\n".format(event)
conn.write(event_line + '\r\n')
if __name__ == "__main__":
sub = Subscribe()
reactor.listenTCP(PORT, server.Site(sub))
RedisBroadcaster.sse_connector = sub
reactor.callLater(0, redis_sub)
reactor.run()

How can I use Tornado and Redis asynchronously?

I'm trying to find how can I use Redis and Tornado asynchronously. I found the tornado-redis but I need more than just add a yield in the code.
I have the following code:
import redis
import tornado.web
class WaiterHandler(tornado.web.RequestHandler):
#tornado.web.asynchronous
def get(self):
client = redis.StrictRedis(port=6279)
pubsub = client.pubsub()
pubsub.subscribe('test_channel')
for item in pubsub.listen():
if item['type'] == 'message':
print item['channel']
print item['data']
self.write(item['data'])
self.finish()
class GetHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello world")
application = tornado.web.Application([
(r"/", GetHandler),
(r"/wait", WaiterHandler),
])
if __name__ == '__main__':
application.listen(8888)
print 'running'
tornado.ioloop.IOLoop.instance().start()
I need getting access the / url and get the "Hello World" while there's a request pending in the /wait.
How can I do it?
You should not use Redis pub/sub in the main Tornado thread, as it will block the IO loop. You can handle the long polling from web clients in the main thread, but you should create a separate thread for listening to Redis. You can then use ioloop.add_callback() and/or a threading.Queue to communicate with the main thread when you receive messages.
You need to use Tornado IOLoop compatible redis client.
There are few of them available, toredis, brukva, etc.
Here's pubsub example in toredis: https://github.com/mrjoes/toredis/blob/master/tests/test_handler.py
For Python >= 3.3, I would advise you to use aioredis.
I did not test the code below but it should be something like that:
import redis
import tornado.web
from tornado.web import RequestHandler
import aioredis
import asyncio
from aioredis.pubsub import Receiver
class WaiterHandler(tornado.web.RequestHandler):
#tornado.web.asynchronous
def get(self):
client = await aioredis.create_redis((host, 6279), encoding="utf-8", loop=IOLoop.instance().asyncio_loop)
ch = redis.channels['test_channel']
result = None
while await ch.wait_message():
item = await ch.get()
if item['type'] == 'message':
print item['channel']
print item['data']
result = item['data']
self.write(result)
self.finish()
class GetHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello world")
application = tornado.web.Application([
(r"/", GetHandler),
(r"/wait", WaiterHandler),
])
if __name__ == '__main__':
print 'running'
tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop')
server = tornado.httpserver.HTTPServer(application)
server.bind(8888)
# zero means creating as many processes as there are cores.
server.start(0)
tornado.ioloop.IOLoop.instance().start()
Okay, so here's my example of how I would do it with get requests.
I added two main components:
The first is a simple threaded pubsub listener which appends new messages into a local list object.
I also added list accessors to the class, so you can read from the listener thread as if you were reading from a regular list. As far as your WebRequest is concerned, you're just reading data from a local list object. This returns immediately and doesn't block current request from completing or future requests from being accepted and processed.
class OpenChannel(threading.Thread):
def __init__(self, channel, host = None, port = None):
threading.Thread.__init__(self)
self.lock = threading.Lock()
self.redis = redis.StrictRedis(host = host or 'localhost', port = port or 6379)
self.pubsub = self.redis.pubsub()
self.pubsub.subscribe(channel)
self.output = []
# lets implement basic getter methods on self.output, so you can access it like a regular list
def __getitem__(self, item):
with self.lock:
return self.output[item]
def __getslice__(self, start, stop = None, step = None):
with self.lock:
return self.output[start:stop:step]
def __str__(self):
with self.lock:
return self.output.__str__()
# thread loop
def run(self):
for message in self.pubsub.listen():
with self.lock:
self.output.append(message['data'])
def stop(self):
self._Thread__stop()
The second is the ApplicationMixin class. This a secondary object you have your web request class inherit in order to add functionality and attributes. In this case it checks whether a channel listener already exists for the requested channel, creates one if none was found, and returns the listener handle to the WebRequest.
# add a method to the application that will return existing channels
# or create non-existing ones and then return them
class ApplicationMixin(object):
def GetChannel(self, channel, host = None, port = None):
if channel not in self.application.channels:
self.application.channels[channel] = OpenChannel(channel, host, port)
self.application.channels[channel].start()
return self.application.channels[channel]
The WebRequest class now treats the listener as if it were a static list (bearing in mind that you need to give self.write a string)
class ReadChannel(tornado.web.RequestHandler, ApplicationMixin):
#tornado.web.asynchronous
def get(self, channel):
# get the channel
channel = self.GetChannel(channel)
# write out its entire contents as a list
self.write('{}'.format(channel[:]))
self.finish() # not necessary?
Finally, after application is created, I added an empty dictionary as an attribute
# add a dictionary containing channels to your application
application.channels = {}
As well as some cleanup of the running threads, once you exit the application
# clean up the subscribed channels
for channel in application.channels:
application.channels[channel].stop()
application.channels[channel].join()
The complete code:
import threading
import redis
import tornado.web
class OpenChannel(threading.Thread):
def __init__(self, channel, host = None, port = None):
threading.Thread.__init__(self)
self.lock = threading.Lock()
self.redis = redis.StrictRedis(host = host or 'localhost', port = port or 6379)
self.pubsub = self.redis.pubsub()
self.pubsub.subscribe(channel)
self.output = []
# lets implement basic getter methods on self.output, so you can access it like a regular list
def __getitem__(self, item):
with self.lock:
return self.output[item]
def __getslice__(self, start, stop = None, step = None):
with self.lock:
return self.output[start:stop:step]
def __str__(self):
with self.lock:
return self.output.__str__()
# thread loop
def run(self):
for message in self.pubsub.listen():
with self.lock:
self.output.append(message['data'])
def stop(self):
self._Thread__stop()
# add a method to the application that will return existing channels
# or create non-existing ones and then return them
class ApplicationMixin(object):
def GetChannel(self, channel, host = None, port = None):
if channel not in self.application.channels:
self.application.channels[channel] = OpenChannel(channel, host, port)
self.application.channels[channel].start()
return self.application.channels[channel]
class ReadChannel(tornado.web.RequestHandler, ApplicationMixin):
#tornado.web.asynchronous
def get(self, channel):
# get the channel
channel = self.GetChannel(channel)
# write out its entire contents as a list
self.write('{}'.format(channel[:]))
self.finish() # not necessary?
class GetHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello world")
application = tornado.web.Application([
(r"/", GetHandler),
(r"/channel/(?P<channel>\S+)", ReadChannel),
])
# add a dictionary containing channels to your application
application.channels = {}
if __name__ == '__main__':
application.listen(8888)
print 'running'
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
pass
# clean up the subscribed channels
for channel in application.channels:
application.channels[channel].stop()
application.channels[channel].join()

Categories

Resources