Sharing a variable between different classes in Tornado - python

I am trying to write a Tornado TCP + HTTP Server application.
My use case is a Tornado TCP + HTTP Server application which accepts data from a TCP client and pass the data to display it on a webpage hosted on the HTTP server.
Here is my tornado server code:
#!/usr/bin/env python
import os.path
import tornado.httpserver
import tornado.web
import logging
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.iostream import StreamClosedError
from tornado.tcpserver import TCPServer
from tornado.options import options, define
define("port", default=6642, help="TCP port to listen on")
logger = logging.getLogger(__name__)
test = {}
class IndexHandler(tornado.web.RequestHandler):
def get(self):
global test
self.render('index.html', test=test)
class EchoServer(TCPServer):
#gen.coroutine
def handle_stream(self, stream, address):
global test
while True:
try:
test = yield stream.read_until("\n")
logger.info("Received bytes: %s", test)
except StreamClosedError:
logger.warning("Lost client at host %s", address[0])
break
except Exception as e:
print(e)
if __name__ == "__main__":
options.parse_command_line()
app = tornado.web.Application( handlers=[
(r'/', IndexHandler)],
static_path=os.path.join(os.path.dirname(__file__), "static"),
template_path=os.path.join(os.path.dirname(__file__), "templates"))
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
server = EchoServer()
server.listen(6641)
logger.info("Listening on TCP port %d",6641)
IOLoop.current().start()
Here is the python client code :
# echo_client.py
import socket
import time
counter = 0
host = '192.168.43.59'
port = 6641 # The same port as used by the server
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
while True:
s.sendall("s\n")
counter = counter + 1
time.sleep(5)
I want to pass the data received from TCP client application into the variable "test" to the render template to display in index.html webpage but I am getting no data displayed.
I am using the global variable concept but no success since couldn't pass the updated "test" variable to index.html page.
If anyone could throw light on using common vaiables across different classes or handlers would help me.
Javascript file iam using is this :
/* test.js */
var test = ""
function set_test(val)
{
test=val
}
function show_test()
{
alert(test);
}
The HTML template used is this :
<!DOCTYPE html>
<html>
<meta http-equiv="refresh" content="30" />
<head>
<title>Test</title>
<script src="{{ static_url('scripts/test.js') }}"
type="application/javascript"></script>
</head>
<body>
<input type="button" onclick="show_test()" value="alert" />
<script type="application/javascript">
set_test("{{test}}");
</script>
</body>
</html>
Hi xyres,
Thank you for your spontaneous reply.I went through the link provided by you and after going through it i could understand that q.get() and q.put() can be used to store and retrieve data as you said.But i could not after modifying the tornado server code in the following manner I couldn't receive the data from TCP client , before this i could at least get data from TCP client .Can you let me know what is the mistake i have done in queue implementation
Here is my tornado server code:
#!/usr/bin/env python
import os.path
import tornado.httpserver
import tornado.web
import logging
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.iostream import StreamClosedError
from tornado.tcpserver import TCPServer
from tornado.options import options, define
define("port", default=6642, help="TCP port to listen on")
logger = logging.getLogger(__name__)
#test = {}
q = Queue(maxsize=2)
class IndexHandler(tornado.web.RequestHandler):
def get(self):
#global test
test = yield q.get
self.render('index.html', test=test)
class EchoServer(TCPServer):
#gen.coroutine
def handle_stream(self, stream, address):
#global test
yield q.put(test)
yield q.join()
while True:
try:
test = yield stream.read_until("\n")
logger.info("Received bytes: %s", test)
except StreamClosedError:
logger.warning("Lost client at host %s", address[0])
break
except Exception as e:
print(e)
if __name__ == "__main__":
options.parse_command_line()
app = tornado.web.Application( handlers=[
(r'/', IndexHandler)],
static_path=os.path.join(os.path.dirname(__file__), "static"),
template_path=os.path.join(os.path.dirname(__file__), "templates"))
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
server = EchoServer()
server.listen(6641)
logger.info("Listening on TCP port %d",6641)
IOLoop.current().start()
As per the tornado documentation it seems that queue can be applied to coroutines and here iam trying to replicate the same to two different classes.Is that a mistake ..Iam new to tornado so please bear my silly questions ..

You've a multiple options:
If you want to have a long-running connection, for example, if a client sends a request to IndexHandler and you want the client to wait until a message is in the queue, you can convert your handler to a coroutine.
If you want to return the response immediately, regardless of the availability of the data in the queue, you can use a queue's get_nowait() method.
Example for case #1:
from tornado.queues import Queue
q = Queue()
class IndexHandler(tornado.web.RequestHandler):
#gen.coroutine
def get(self):
self.data_future = q.get()
data = yield self.data_future
self.render('index.html', data=data)
def on_connection_close(self):
# set an empty result on the future
# if connection is closed so that
# the messages don't get removed from
# the queue unnecessariliy for
# closed connections
self.msg_future.set_result(None)
Example for case #2:
from tornado.queues import Queue, QueueEmpty
q = Queue()
def get(self):
try:
data = q.get_nowait()
except QueueEmpty:
data = None
self.render(...)

Related

Web server in python in plainText

I am looking for a way to expose a text file with Python web server.
I get some python code to run a web server :
import http.server
import socketserver
port = 9500
address = ("", port)
handler = http.server.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer(address, handler)
print(f"Serveur démarré sur le PORT {port}")
httpd.serve_forever()
It's working fine. but i would :
Run a web sever exposing textplain content (and not Html content).
Set manually the workpath and name of index file (default: index.html)
keep Python server Code simple and light
I found some help on the web :
handler.extensions_map['Content-type'] = 'text/plain'
or
handler.send_header('Content-Type','text/plain')
But none os this proposition work.
Could you help me to build a simple python code to do this ?
Thanks a lot,
Script for Python 2 with using only built-in modules, just place the absolute path of the file which you want to be served <INSERT_FILE>:
#!/usr/bin/python
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
from io import StringIO
import sys
import os
class MyHandler(SimpleHTTPRequestHandler):
def send_head(self):
# Place here the absolute path of the file
with open("<INSERT_FILE>", "r") as f:
body = unicode("".join( f.readlines()))
self.send_response(200)
self.send_header("Content-type", "text/html; charset=UTF-8")
self.send_header("Content-Length", str(len(body)))
#self.send_header("Server", "SimpleHTTP/1.1 Python/2.7.5")
self.end_headers()
# text I/O binary, and raw I/O binary
# initial value must be unicode or None
return StringIO(body)
if __name__ == "__main__":
HandlerClass = MyHandler
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.1"
server_address = ('', 5555)
HandlerClass.protocol_version = Protocol
httpd = ServerClass (server_address, HandlerClass)
print("serving on port 5555")
httpd.serve_forever()
For python3 (SimpleHTTPServer module has been merged into http.server), place absolute path <INSERT_FILE>:
from http.server import HTTPServer, BaseHTTPRequestHandler
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
# place absolute path here
f_served = open('<INSERT_FILE>','rb')
f_content = f_served.read()
f_served.close()
self.wfile.write(f_content)
if __name__ == "__main__":
httpd = HTTPServer(('localhost', 5555), SimpleHTTPRequestHandler)
httpd.serve_forever()
I recommend using aiohttp with its lowlevel server, which is described here:
You can either return plain text, or you change the content type of your web.Response to text/html to send data that will be interpreted as html.
You can just replace the "OK" in the text="OK" with whatever plain text you wish. Or you replace it with the content of your *.html and change the content_type.
import asyncio
from aiohttp import web
async def handler(request):
return web.Response(text="OK")
async def main():
server = web.Server(handler)
runner = web.ServerRunner(server)
await runner.setup()
site = web.TCPSite(runner, 'localhost', 8080)
await site.start()
print("======= Serving on http://127.0.0.1:8080/ ======")
# pause here for very long time by serving HTTP requests and
# waiting for keyboard interruption
await asyncio.sleep(100*3600)
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(main())
except KeyboardInterrupt:
pass
loop.close()

How to send message from server to client using Flask-Socket IO

I'm trying to create a python app that can send message from server to client. Currently I'm using this sample code from here. It's a chat app and it's working fine. I tried to modified the app and add a new function in the server side python code that will print a message "Dummy" into the client but seems like it didn't work.
Here's my html code:
index.html
<body>
<ul id="messages"></ul>
<ul id="output"></ul>
<form action="">
<input id="m" autocomplete="off" /><button>Send</button>
</form>
<script src="{{url_for('static', filename='assets/vendor/socket.io.min.js')}}"></script>
<script src="{{url_for('static', filename='assets/vendor/jquery.js')}}"></script>
<script>
var socket = io.connect('http://127.0.0.1:5000/chat');
$('form').submit(function(){
socket.emit('chat message', $('#m').val());
$('#m').val('');
return false;
});
socket.on('chat message', function(msg){
$('#messages').html($('<li>').text(msg));
});
socket.on('output', function(msg){
alert(msg)
$('#messages').html($('<li>').text(msg));
});
</script>
Here's my backend code:
web_app.py
from flask import Flask
from flask import render_template
from flask_socketio import SocketIO
from flask_socketio import emit
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)
connected = False
def socket_onload(json):
socketio.emit('output', str(json), namespace='/chat')
print('received message: ' + str(json))
#socketio.on('chat message', namespace='/chat')
def handle_chat_message(json):
print('received message: ' + str(json))
emit('chat message', str(json), broadcast=True)
#socketio.on('connect') # global namespace
def handle_connect():
global connected
connected = True
print('Client connected')
#socketio.on('connect', namespace='/chat')
def handle_chat_connect():
print('Client connected to chat namespace')
emit('chat message', 'welcome!')
#socketio.on('disconnect', namespace='/chat')
def test_disconnect():
print('Client disconnected')
#app.route('/')
def index():
return render_template('index.html')
#app.route('/blah/')
def blah():
return render_template('blah.html')
main.py
import web_app
import threading
import time
def main():
import web_app
webapp_thread = threading.Thread(target=run_web_app)
webapp_thread.start()
# webapp_thread = threading.Thread(target=run_web_app, args=(i,))
while web_app.connected==False:
print "waiting for client to connect"
time.sleep(1)
pass
print "Connected..."
time.sleep(3)
print "Trying to print dummy message..."
web_app.socket_onload("Dummy")
def run_web_app():
web_app.socketio.run(web_app.app)
if __name__ == '__main__':
main()
I can see "received message: Dummy" in the terminal but nothing's change on the web browser.
You have two mistakes which prevent you from doing so:
First, you are trying to emit an event with socket.io outside from the socket context.
When a function is wraped with #socketio.on decorator, it becomes an Event-Handlers.
While an event is fired on the server-side it will search for the right handler to handle the event and initialize the context to the specific client that emitted the event.
Without this context initializing, your socketio.emit('output', str(json), namespace='/chat') will do nothing because the server doesn't know to whom it should emit back the response.
Anyway, there is a little trick for emitting events manually to a specific client (even if you are not in its context). Each time a socket has opened, the server assign it to a "private" room with the same name as the socket id (sid). So in order to send a message to the client outside from the client context, you can create a list of connected client's ids and call the emit function with the room=<id> argument.
For example:
web_app.py:
...
from flask import Flask, request
clients = []
#socketio.on('connect')
def handle_connect():
print('Client connected')
clients.append(request.sid)
#socketio.on('disconnect')
def handle_disconnect():
print('Client disconnected')
clients.remove(request.sid)
def send_message(client_id, data):
socketio.emit('output', data, room=client_id)
print('sending message "{}" to client "{}".'.format(data, client_id))
...
Then you would probably use this as follow:
main.py:
import web_app
import threading
import time
def main():
webapp_thread = threading.Thread(target=run_web_app)
webapp_thread.start()
while not web_app.clients:
print "waiting for client to connect"
time.sleep(1)
print "Connected..."
time.sleep(3)
print "Trying to print dummy message..."
web_app.send_message(web_app.clients[0], "Dummy")
...
But even if you try this, it will not work (which brings us to the second mistake).
Second, you are mixing eventlet with regular Python threads and it's not a good idea. the green threads that eventlet uses do not work well with regular threads. Instead, you should use green threads for all your threading needs.
One option which I found in the internet, is to monkey patch the Python standard library, so that threading, sockets, etc. are replaced with eventlet friendly versions. You can do this at the very top of your main.py script:
import eventlet
eventlet.monkey_patch()
After that it should work fine (I tried it on my own). Let me know if you have another problems...
These decorators are there for simply loading the pages index.html and blah.html these don't do anything --- don't pass any variable or don't write anything to the template they just render it, meanwhile all the things you are doing are there for the command line if you have to print or pass anything it would have to be in these functions :
#app.route('/')
def index():
return render_template('index.html')
#app.route('/blah/')
def blah():
return render_template('blah.html')

Serve proxy app through sockets Tornado 4.4

I have a little question.
I'm new to the whole socket theory nad the Tornado Framework. Lately I've written a proxy server using the Tornado framework. My app is standing in the middle of a connection client(browser) <===> remote address. So the connection looks like this:
client(my web browser) <=== PROXY(my app) ===> Remote address (stackoverflow.com)
Everything works fine if I use the standard "main" function. But I want to go a little more low - level, I mean I want to create sockets and offer my proxy app through that connection.
My proxy app urls:
# coding: utf-8
"""URL's for proxy app."""
from settings import settings
from tornado.web import (
StaticFileHandler,
url,
)
from handlers import (
mainHandlers,
myProxy,
)
urls = [
url(r"/admin/$", mainHandlers.MainHandler),
url(r"/admin/delete_filter/", mainHandlers.DataDeleteHandler),
url(r"/admin/filters/$", mainHandlers.DataGetter),
url(r"/admin/new_filter/$", mainHandlers.FormHandler),
url(r"/admin/stats/$", mainHandlers.StatsTableHandler),
url(r"/admin/stats/query/$", mainHandlers.AjaxStatsGetHandler),
url(r"/static/", StaticFileHandler, dict(path=settings['static_path'])),
url(r'.*', myProxy.ProxyHandler),
]
My ProxyHandler:
class ProxyHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ['GET', 'POST']
def data_received(self, chunk):
pass
def compute_etag(self):
return None # disable tornado Etag
def handle_response(self, response):
if response.error and not isinstance(response.error, tornado.httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
else:
self.set_status(response.code, response.reason)
self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header
for header, v in response.headers.get_all():
if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'):
self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie'
secured_page = False
for page in secure_pages:
if page in self.request.uri:
secured_page = True
self.set_header('Content-Length', len(response.body))
self.write(response.body)
break
if response.body and not secured_page:
c.execute('SELECT filter_name FROM filters WHERE filter_type=1')
tags = c.fetchall()
soup = BeautifulSoup(response.body, 'html.parser')
for row in tags:
catched_tags = soup.find_all(str(row[0]))
if catched_tags:
print 'catched: %s of <%s> tags' % (len(catched_tags), str(row[0]))
for tag in catched_tags:
tag.extract()
new_body = str(soup)
self.set_header('Content-Length', len(new_body))
self.write(new_body)
self.finish()
#tornado.web.asynchronous
def get(self):
logger.debug('Handle %s request to %s', self.request.method, self.request.uri)
body = self.request.body
if not body:
body = None
try:
if 'Proxy-Connection' in self.request.headers:
del self.request.headers['Proxy-Connection']
c.execute('SELECT filter_name FROM filters WHERE filter_type=2')
urls = c.fetchall()
for url in urls:
if url[0] in self.request.path:
self.set_status(403)
self.finish()
return
fetch_request(self.request.uri, self.handle_response,
method=self.request.method, body=body, headers=self.request.headers, follow_redirects=False,
allow_nonstandard_methods=True)
except tornado.httpclient.HTTPError as e:
if hasattr(e, 'response') and e.response:
self.handle_response(e.response)
else:
self.set_status(500)
self.write('Internal server error:\n' + str(e))
self.finish()
#tornado.web.asynchronous
def post(self):
return self.get()
The easy main function:
# coding: utf-8
import sys
import tornado.web
from tornado.options import options
from configuration.application import MyApplication
from proxy.urls import proxy_urls
def make_app():
"""Create my application with my settings and urls."""
return MyApplication(proxy_urls)
if __name__ == "__main__":
u"""Main loop."""
app = make_app()
port = options.port
if len(sys.argv) > 1:
port = int(sys.argv[1])
app.listen(port)
print 'tornado working on port %s' % port
tornado.ioloop.IOLoop.current().start()
So I want to change the easy way to the low-level way based on the docs:
import errno
import functools
import tornado.ioloop
import socket
def connection_ready(sock, fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
connection.setblocking(0)
handle_connection(connection, address)
if __name__ == '__main__':
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
sock.bind(("", port))
sock.listen(128)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
io_loop.start()
To accomplish this I read a little about the whole network programming using sockets (https://www.tutorialspoint.com/python/python_networking.htm).
The example in the tutorial works well so I tried to connect the tutorial with the example in the Tornado docs:
# coding: utf-8
import errno
import functools
import socket
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.process
import tornado.web
from tornado.options import options
from configuration.application import MyApplication
def make_app():
u"""Create my application with my settings and urls."""
return MyApplication()
def connection_ready(sock, fd, events):
u"""Function to handle an incoming connection."""
proxy_app = make_app()
server = tornado.httpserver.HTTPServer(proxy_app)
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
print 'Got connection from', address
# connection.setblocking(False)
connection.send(server)
connection.close()
if __name__ == "__main__":
u"""Main loop."""
port = options.port
if len(sys.argv) > 1:
port = int(sys.argv[1])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(False)
sock.bind(('', port))
sock.listen(5)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
print 'Tornado Proxy working on port: %s' % port
io_loop.start()
But when I try to connect to my proxy (for example to add filters: http://127.0.0.1:8000/admin/filters/ - I have an handler written to handle this url)
I get specyfic errors:
ERROR:tornado.application:Exception in callback (3, )
Traceback (most recent call last):
File "/home/dave/.virtualenvs/teleV1/local/lib/python2.7/site-packages/tornado/ioloop.py", line 887, in start
handler_func(fd_obj, events)
File "/home/dave/.virtualenvs/teleV1/local/lib/python2.7/site-packages/tornado/stack_context.py", line 275, in null_wrapper
return fn(*args, **kwargs)
File "manage.py", line 35, in connection_ready
connection.send(server)
TypeError: send() argument 1 must be convertible to a buffer, not HTTPServer
I understand that I cannot send a HTTPServer through the connection (from one end to the other), it must be a buffer.
My first idea was to send the buffor from the handler that handles a URL (for example class ProxyHandler(tornado.web.RequestHandler)),
but how can I handle diffrent urls and diffrent handlers?
After a few approaches I changed my Proxy app written as a Tornado app to a pure Python code, that handles responses from remote addresses and does the filter stuff. I think this is the best and fastest thing that I could do.

Either the websocket or the tornado goes down everytime.

I am new to asynchronous programming. I have been using python 3.5 asyncio for a few days. I wanted to make a server capable of receiving data from a websocket machine client (GPS) as well as rendering a html page as the browser client for the websocket server. I have used websockets for the connection between my machine client and server at port 8765. For rendering the webpage I have used tornado at port 8888 (The html file is at ./views/index.html ). The code works fine for only the websocket server. When I added the tornado server, the code behaved weird and I don't know why. There must be something with the asyncio usage. If I place
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
just before
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
, the websocket server doesn't connect. If I do the reverse, the tornado server doesn't run.
Please help me out as I am new to asynchronous programming. The server.py, index.html and the client.py (machine clients) are given below.
server.py
#!/usr/bin/env python
import tornado.ioloop
import tornado.web
import asyncio
import websockets
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render("./views/index.html", title = "GPS")
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
])
clients = []
async def hello(websocket, path):
clients.append(websocket)
while True:
name = await websocket.recv()
print("< {}".format(name))
print(clients)
greeting = "Hello {}!".format(name)
for each in clients:
await each.send(greeting)
print("> {}".format(greeting))
start_server = websockets.serve(hello, 'localhost', 8765)
print("Listening on *8765")
app = make_app()
app.listen(8888)
print("APP is listening on *8888")
tornado.ioloop.IOLoop.current().start()
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
client.py
#!/usr/bin/env python
import serial
import time
import asyncio
import websockets
ser =serial.Serial("/dev/tty.usbmodem1421", 9600, timeout=1)
async def hello():
async with websockets.connect('ws://localhost:8765') as websocket:
while True:
data = await retrieve()
await websocket.send(data)
print("> {}".format(data))
greeting = await websocket.recv()
print("< {}".format(data))
async def retrieve():
data = ser.readline()
return data #return the location from your example
asyncio.get_event_loop().run_until_complete(hello())
asyncio.get_event_loop().run_forever()
./views/index.html
<html>
<head>
<title>{{ title }}</title>
</head>
<body>
<script>
var ws = new WebSocket("ws://localhost:8765/"),
messages = document.createElement('ul');
ws.onopen = function(){
ws.send("Hello From Browser")
}
ws.onmessage = function (event) {
var messages = document.getElementsByTagName('ul')[0],
message = document.createElement('li'),
content = document.createTextNode(event.data);
message.appendChild(content);
messages.appendChild(message);
};
document.body.appendChild(messages);
</script>
You can only run one event loop at a time (unless you give each one its own thread, but that's significantly more complicated). Fortunately, there's a bridge between Tornado and asyncio to let them share the same IOLoop.
Early in your program (before any tornado-related code like app = make_app()), do this:
import tornado.platform.asyncio
tornado.platform.asyncio.AsyncIOMainLoop().install()
and do not call IOLoop.current().start(). This will redirect all Tornado-using components to use the asyncio event loop instead.

Twisted SSE server subscribed to Redis via pubsub

I'm trying to build a server in Twisted which would let clients connect using Server Sent Events. I would like this server also to listen to Redis and if a message comes then push it to the connected SSE clients.
I have the SSE server working. I know how to subscribe to Redis. I can't figure out how to have both pieces running without blocking each other.
I'm aware of https://github.com/leporo/tornado-redis and https://github.com/fiorix/txredisapi, which were recommended in related questions. No idea how this helps :/
How to solve this? Could you help with both: conceptual tips and code snippets?
My Twisted SSE server code:
# coding: utf-8
from twisted.web import server, resource
from twisted.internet import reactor
class Subscribe(resource.Resource):
isLeaf = True
sse_conns = set()
def render_GET(self, request):
request.setHeader('Content-Type', 'text/event-stream; charset=utf-8')
request.write("")
self.add_conn(request)
return server.NOT_DONE_YET
def add_conn(self, conn):
self.sse_conns.add(conn)
finished = conn.notifyFinish()
finished.addBoth(self.rm_conn)
def rm_conn(self, conn):
self.sse_conns.remove(conn)
def broadcast(self, event):
for conn in self.sse_conns:
event_line = "data: {}'\r\n'".format(event)
conn.write(event_line + '\r\n')
if __name__ == "__main__":
sub = Subscribe()
reactor.listenTCP(9000, server.Site(sub))
reactor.run()
My Redis subscribe code:
import redis
redis = redis.StrictRedis.from_url('redis://localhost:6379')
class RedisSub(object):
def __init__(self):
self.pubsub = redis.pubsub()
self.pubsub.subscribe('foobar-channel')
def listen(self):
for item in self.pubsub.listen():
print str(item)
This is what works for me.
I've ended up using txredis lib with a slight change to the RedisClient (added minimal subscribe capabilities).
# coding: utf-8
import os
import sys
import weakref
from txredis.client import RedisClient
from twisted.web import server, resource
from twisted.internet import reactor, protocol, defer
from twisted.python import log
from utils import cors, redis_conf_from_url
log.startLogging(sys.stdout)
PORT = int(os.environ.get('PORT', 9000))
REDIS_CONF = redis_conf_from_url(os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379'))
REDIS_SUB_CHANNEL = 'votes'
class RedisBroadcaster(RedisClient):
def subscribe(self, *channels):
self._send('SUBSCRIBE', *channels)
def handleCompleteMultiBulkData(self, reply):
if reply[0] == u"message":
message = reply[1:][1]
self.sse_connector.broadcast(message)
else:
super(RedisClient, self).handleCompleteMultiBulkData(reply)
#defer.inlineCallbacks
def redis_sub():
clientCreator = protocol.ClientCreator(reactor, RedisBroadcaster, password=REDIS_CONF.get('password'))
redis = yield clientCreator.connectTCP(REDIS_CONF['host'], REDIS_CONF['port'])
redis.subscribe(REDIS_SUB_CHANNEL)
class Subscribe(resource.Resource):
isLeaf = True
sse_conns = weakref.WeakSet()
#cors
def render_GET(self, request):
request.setHeader('Content-Type', 'text/event-stream; charset=utf-8')
request.write("")
self.sse_conns.add(request)
return server.NOT_DONE_YET
def broadcast(self, event):
for conn in self.sse_conns:
event_line = "data: {}\r\n".format(event)
conn.write(event_line + '\r\n')
if __name__ == "__main__":
sub = Subscribe()
reactor.listenTCP(PORT, server.Site(sub))
RedisBroadcaster.sse_connector = sub
reactor.callLater(0, redis_sub)
reactor.run()

Categories

Resources