I have got and error ([Errno 24] Too many open files) while testing tornado.websocket on a local machine.
server.py
import tornado.ioloop
import tornado.web
import tornado.websocket
import tornado.options
class ChatSocketHandler(tornado.websocket.WebSocketHandler):
waiters = set()
def open(self):
ChatSocketHandler.waiters.add(self)
print "Clients: ", len(ChatSocketHandler.waiters)
def on_close(self):
ChatSocketHandler.waiters.remove(self)
#classmethod
def send_updates(cls, chat):
for waiter in cls.waiters:
try:
waiter.write_message(chat)
except:
logging.error("Error sending message", exc_info=True)
def on_message(self, message):
ChatSocketHandler.send_updates(message)
app = tornado.web.Application([
(r"/ws", ChatSocketHandler)
])
def main():
tornado.options.parse_command_line()
app.listen(8888)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
clients.py (using websocket-client)
from multiprocessing import Pool, Process
from websocket import create_connection
def go():
ws = create_connection("ws://127.0.0.1:8888/ws")
while True:
try:
ws.send("Message ...")
result = ws.recv()
print "Received '%s'" % result
except KeyboardInterrupt:
break
ws.close()
for i in range(1000):
Process(target=go).start()
The server dies after ~800 connections ;/
Additional question: is it ok to set up a Nginx proxy to a tornado server instance? Do I get some benefits?
Your process likely runs out of file descriptors. Here is a recipe for network tuning on Linux including how to increase max. FDs. (This is for Crossbar.io, but will work for Tornado also).
As to your question "does it make sense to put Nginx in front of Tornado": yes, definitely. Tornado's native TLS support is limited. Have a look at Hynek Schlawack: The Sorry State of SSL - PyCon 2014
Note: the latter does not apply to Twisted (or Crossbar.io, which is based on Twisted) - since Twisted uses pyOpenSSL and can be made to have high-quality TLS. So there is no need for Nginx with these (at least not for TLS reasons).
Related
How can I create an HTTP echo server from Tornado?
#!/usr/bin/env python
import signal
from tornado.ioloop import IOLoop
from tornado.tcpserver import TCPServer
import tornado.web
def handle_signal(sig, frame):
IOLoop.instance().add_callback(IOLoop.instance().stop)
class EchoServer(TCPServer):
def handle_stream(self, stream, address):
self._stream = stream
self._read_line()
def _read_line(self):
self._stream.read_until('\n' ,self._handle_read)
def _handle_read(self, data):
self._stream.write(data, '\n')
self._read_line()
if __name__ == '__main__':
signal.signal(signal.SIGINT, handle_signal)
signal.signal(signal.SIGTERM, handle_signal)
server = EchoServer()
server.bind(8001)
server.start(25)
IOLoop.instance().start()
IOLoop.instance().close()
How do I make of this http echo server
what's wrong? not much i am newbie
Thanks!
Your question would be more clear if you explained what happened when you run this code, and what you expected instead.
One TCPServer object may handle many connections, so instead of assigning to self.stream in handle_stream, you should make a new object to handle this stream.
The second argument to stream.write is a callback; it looks like you meant self._stream.write(data + '\n').
Let's take the hello world application in the Tornado home page:
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
application = tornado.web.Application([
(r"/", MainHandler),
])
if __name__ == "__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
Is there a way, after the IOloop has been started and without stopping it, to essentially stop the application and start another one (on the same port or on another)?
I saw that I can add new application (listening on different ports) at runtime, but I do not know how I could stop existing ones.
Application.listen() method actually creates a HTTPServer and calls its listen() medthod. HTTPServer objects has stop() method which is probably what you need. But in order to do it you have to explicitly create HTTPServer object in your script.
server = HTTPServer(application)
server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
#somewhere in your code
server.stop()
Here is a gist about how to gracefully and safely shutdown the tornado ioloop.
https://gist.github.com/nicky-zs/6304878
However, you can refer to this implementation to achieve your goal.
To add to #Alex Shkop's answer a few years later, as of Tornado 4.3 .listen() returns a reference to its HTTPServer!
https://www.tornadoweb.org/en/stable/web.html#tornado.web.Application.listen
server = app.listen()
... # later
server.stop()
Further, if you're working in a Jupyter notebook and for some reason need a Tornado server, you can try to close the HTTPServer before you recreate it to avoid OSError: [Errno 98] Address already in use on re-running the cell
# some Jupyter cell
#
import tornado.web
try:
server.stop() # NameError on first cell run
except Exception as ex:
print(f"server not started to stop: {repr(ex)}")
else: # did not raise NameError: server was running
print(f"successfully stopped server: {server}")
app = tornado.web.Application(...)
server = app.listen(9006) # arbitrary listening port
Recently I started a small personal project. It's a realtime web system based on asyncio and autobahn-python. However I also would like to serve some static files via HTTP and do it from the same process. My HTTP server is Tornado sitting on top of asyncio event loop and everything works perfectly fine except that I have to start tornado and autobahn handlers on different ports. Here is a stripped down version of what I currently have:
import six
import datetime
import asyncio
import tornado.web
import tornado.httpserver
import tornado.netutil
from tornado.platform.asyncio import AsyncIOMainLoop
from autobahn.wamp import router
from autobahn.asyncio import wamp, websocket
# WAMP server
class MyBackendComponent(wamp.ApplicationSession):
def onConnect(self):
self.join(u"realm1")
#asyncio.coroutine
def onJoin(self, details):
def utcnow():
now = datetime.datetime.utcnow()
return six.u(now.strftime("%Y-%m-%dT%H:%M:%SZ"))
reg = yield from self.register(utcnow, 'com.timeservice.now')
# HTTP server
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world!")
tornado_app = tornado.web.Application(
[
(r"/", MainHandler),
],
)
if __name__ == '__main__':
router_factory = router.RouterFactory()
session_factory = wamp.RouterSessionFactory(router_factory)
session_factory.add(MyBackendComponent())
transport_factory = websocket.WampWebSocketServerFactory(session_factory,
debug=True,
debug_wamp=True)
AsyncIOMainLoop().install()
tornado_app.listen(80, "127.0.0.1")
loop = asyncio.get_event_loop()
coro = loop.create_server(transport_factory, "127.0.0.1", 8080)
server = loop.run_until_complete(coro)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
server.close()
loop.close()
Question: Is there the Right Way to make autobahn-wamp and tornado handlers listen on the same port?
My initial idea was to implement some kind of socket.socket wrapper and dispatch incoming messages there but it turned out to be awfully messy. I don't want to use any external proxies because the backend should be portable as much as possible.
Also I'm not asking anybody to implement it for me(but of course you can if you want to!) - only to know if somebody have already done something similar before diving into autobahn/tornado code.
Thanks in advance!
PS: Sorry for my poor English - it's not my mother tongue.
I am trying to cobble together a test which allows websockets clients to connect to a Tornado server and I want the Tornado server to send out a message to all clients every X seconds.
The reason I am doing this is because wbesockets connections are being silently dropped somewhere and I am wondering of periodic "pings" sent by the websockets server will maintain the connection.
I'm afraid it's a pretty noobish question and the code below is rather a mess. I just don't have my head wrapped around Tornado and scope enough to make it work.
import tornado.httpserver
import tornado.websocket
import tornado.ioloop
import tornado.web
import tornado.gen
import time
from tornado import gen
class WSHandler(tornado.websocket.WebSocketHandler):
def open(self):
print 'http://mailapp.crowdwave.com/girlthumb.jpg'
self.write_message("http://www.example.com/girlthumb.jpg")
def on_message(self, message):
print 'Incoming message:', message
self.write_message("http://www.example.com/girlthumb.jpg")
def on_close(self):
print 'Connection was closed...'
#gen.engine
def f():
yield gen.Task(tornado.ioloop.IOLoop.instance().add_timeout, time.time() + 8)
self.write_message("http://www.example.com/x.png")
print 'x'
#gen.engine
def g():
yield gen.Task(tornado.ioloop.IOLoop.instance().add_timeout, time.time() + 4)
self.write_message("http://www.example.com/y.jpg")
print 'y'
application = tornado.web.Application([
(r'/ws', WSHandler),
])
if __name__ == "__main__":
tornado.ioloop.IOLoop.instance().add_callback(f)
tornado.ioloop.IOLoop.instance().add_callback(g)
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
Why don't you try write a scheduler inside it? :)
def schedule_func():
#DO SOMETHING#
#milliseconds
interval_ms = 15
main_loop = tornado.ioloop.IOLoop.instance()
sched = tornado.ioloop.PeriodicCallback(schedule_func,interval_ms, io_loop = main_loop)
#start your period timer
sched.start()
#start your loop
main_loop.start()
Found that the accepted answer for this is almost exactly what I want:
How to run functions outside websocket loop in python (tornado)
With a slight modification, the accepted answer at the above link continually sends out ping messages. Here is the mod:
Change:
def test(self):
self.write_message("scheduled!")
to:
def test(self):
self.write_message("scheduled!")
tornado.ioloop.IOLoop.instance().add_timeout(datetime.timedelta(seconds=5), self.test)
I'm trying to implement something like "tail -f" over HTTP with Python. Currently, I'm trying to use Tornado, but it only is handling one connection at a time, even when I do asynchronous requests.
import socket
import subprocess
import tornado.gen as gen
import tornado.httpserver
import tornado.ioloop
import tornado.iostream
import tornado.options
import tornado.web
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
define(
"inputfile",
default="test.txt",
help="the path to the file which we will 'tail'",
type=str)
class MainHandler(tornado.web.RequestHandler):
#tornado.web.asynchronous
#gen.engine
def get(self):
print "GOT REQUEST"
inputfile = open(options.inputfile)
p = subprocess.Popen(
"./nettail.py",
stdin=inputfile,
stdout=subprocess.PIPE)
port_number = int(p.stdout.readline().strip())
self.write("<pre>")
self.write("Hello, world\n")
self.flush()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
stream = tornado.iostream.IOStream(s)
yield gen.Task(stream.connect, ("127.0.0.1", port_number))
while True:
data = yield gen.Task(stream.read_until, "\n")
self.write(data)
self.flush()
def main():
tornado.options.parse_command_line()
application = tornado.web.Application([
(r"/", MainHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
The process I am starting is a simple "tail" which outputs to a socket.
import random
import socket
import sys
import time
#create an INET, STREAMing socket
s = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
# Open the connection.
try:
for attempt_number in xrange(5):
port_number = random.randint(9000, 65000)
try:
s.bind(("localhost", port_number))
except socket.error:
continue
# We successfully bound!
sys.stdout.write("{0}".format(port_number))
sys.stdout.write("\n")
sys.stdout.flush()
break
#become a server socket
s.listen(5)
# Accept a connection.
try:
(clientsocket, address) = s.accept()
while True:
line = sys.stdin.readline()
if not line:
time.sleep(1)
continue
clientsocket.sendall(line)
finally:
clientsocket.close()
finally:
s.close()
./nettail.py works as I expect, but the Tornado HTTP server is only handling one request at a time.
I would like to use long-running, persistent HTTP connections to do this, as it is compatible with older browsers. I understand that Web Sockets is how it would be done in modern browsers.
Edit:
I'm running this on Linux and Solaris, not Windows. That means I could use tornado.iostream on the file, rather than through a sockets program. Still, that is not a documented feature, so I launch a sockets program for each connection.
After doing some more debugging, it turns out that this tail server was not blocking, after all.
I was trying to test concurrent connections with two windows of Firefox open, but Firefox would not start fetching the second window until the first window was manually stopped. I guess Firefox does not like to have two concurrent HTTP connections to fetch the same resource.
Opening a Firefox window and a Chromium window, I can see the "tail" output pushed to both tabs.
Thank you for all your help. #abarnert's comments were especially helpful.
Edit:
In the to-be-release 2.4.2 version of Tornado, a "Pipe" IOStream is implemented. Using this and regular "tail" simplified the code a lot.
import subprocess
import tornado.httpserver
import tornado.ioloop
import tornado.iostream
import tornado.options
import tornado.web
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
define(
"inputfile",
default="test.txt",
help="the path to the file which we will 'tail'",
type=str)
class MainHandler(tornado.web.RequestHandler):
#tornado.web.asynchronous
def get(self):
print "GOT REQUEST"
self.p = subprocess.Popen(
["tail", "-f", options.inputfile, "-n+1"],
stdout=subprocess.PIPE)
self.write("<pre>")
self.write("Hello, world\n")
self.flush()
self.stream = tornado.iostream.PipeIOStream(self.p.stdout.fileno())
self.stream.read_until("\n", self.line_from_nettail)
def on_connection_close(self, *args, **kwargs):
"""Clean up the nettail process when the connection is closed.
"""
print "CONNECTION CLOSED!!!!"
self.p.terminate()
tornado.web.RequestHandler.on_connection_close(self, *args, **kwargs)
def line_from_nettail(self, data):
self.write(data)
self.flush()
self.stream.read_until("\n", self.line_from_nettail)
def main():
tornado.options.parse_command_line()
application = tornado.web.Application([
(r"/", MainHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
I created this recently as an experiment. Works for me with multiple connections is it any use?
class TailHandler(BaseHandler):
#asynchronous
def get(self):
self.file = open('data/to_read.txt', 'r')
self.pos = self.file.tell()
def _read_file():
line = self.file.read()
last_pos = self.file.tell()
if not line:
self.file.close()
self.file = open('data/to_read.txt', 'r')
self.file.seek(last_pos)
pass
else:
self.write(line)
self.flush()
IOLoop.instance().add_timeout(time.time() + 1, _read_file)
_read_file()
You shouldn't have blocking calls like this in the handler.
port_number = int(p.stdout.readline().strip())
You'll need to use select or a similar mechanism ti avoid the blocking call
Edit: ok I went and checked the docs. You should use their iostream to read from p