I have this fairly simple code, taken from an example.
#!/usr/bin/python
import tornado.ioloop
import tornado.web
import tornado.gen
import time
class MainHandler(tornado.web.RequestHandler):
#tornado.web.asynchronous
#tornado.gen.engine
def get(self):
for i in range(1,10):
self.write("%d<br>" % i)
self.flush()
yield tornado.gen.Task(tornado.ioloop.IOLoop.instance().add_timeout, time.time() + 1)
self.finish()
application = tornado.web.Application([
(r"/", MainHandler),
])
if __name__ == "__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
It isn't behaving how i'm expecting it to. If i open a browser window and point it to localhost:8888/, it will show 1 [pause 1 sec] 2 [pause 1 sec], etc. If i open a second tab doing the same request, it will block until the first request is finished. What am i missing?
Seems i should have used a different browser or an incognito window.
#Germano is right, it is the chrome shares the same connection for the same url.you can test with below code.
#coding:utf8
from tornado import ioloop
from tornado import web
from tornado import gen
class MainHandler(web.RequestHandler):
#gen.coroutine
def get(self):
client_address = self.request.connection.stream.socket.getpeername()
print repr(client_address), 'enter'
yield gen.sleep(10)
self.write("Hello, world")
print repr(client_address), 'leave'
self.finish()
if __name__ == "__main__":
application = web.Application([
(r"/", MainHandler),
])
application.listen(8888)
ioloop.IOLoop.current().start()
Related
I create a simple test app to check timeout in tornado
import tornado.ioloop
import tornado.web
class LoopHandler(tornado.web.RequestHandler):
def get(self):
while (True):
print ("in loop")
self.write("Loop, Handler")
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
(r"/loop", LoopHandler),
])
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
Then I call http://localhost:8888/loop the endpoint never response because the infinite loop the problems is that http://localhost:8888/ not responding either. the question is why this happened and how can solve this?
EDIT
Update code that solve the problemn
import tornado.ioloop
import tornado.web
#unblock
class LoopHandler(tornado.web.RequestHandler):
def get(self):
while (True):
print ("in loop")
return "Loop, Handler"
#unblock
class MainHandler(tornado.web.RequestHandler):
def get(self):
return "Hello, world"
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
(r"/loop", LoopHandler),
])
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
#unblock.py
import tornado.web
import tornado.ioloop
from concurrent.futures import ThreadPoolExecutor
from functools import partial, wraps
EXECUTOR = ThreadPoolExecutor(max_workers=4)
def unblock(f):
#tornado.web.asynchronous
#wraps(f)
def wrapper(*args, **kwargs):
self = args[0]
def callback(future):
self.write(future.result())
self.finish()
EXECUTOR.submit(
partial(f, *args, **kwargs)
).add_done_callback(
lambda future: tornado.ioloop.IOLoop.instance().add_callback(
partial(callback, future)))
return wrapper
These are basics of async programming. To point you in the right direction take a look at the reactor pattern and especially at the event loop.
The reactor pattern is one implementation technique of event-driven
architecture. In simple terms, it uses a single threaded event loop
blocking on resource-emitting events and dispatches them to
corresponding handlers and callbacks.
Both functions LoopHandler and MainHandler are processed in the same event loop, therefore MainHandler gets queued but never executed since the event loop is busy executing LoopHandler.
One of the challenges (at least for me) in async programming is to be careful about blocking calls like database operations with for example SQLAlchemy, file operations, expensive calculations, etc. There are some interesting approaches using thread pools to solve this but you won't need them to get you started.
Ah, and in case you stumbling over the first sentence of the wiki article I have linked take a look here to understand the difference between parallel and concurrent. It helped me a lot.
I am using the tornado library in python. I have a queue where data gets added in. I have to keep connection open so that when client requests I send out items from queue. Here is a simple implementation of mine. The problem I face is when I add new elements to queue, I don't see it being it returned. Infact, I don't see any code executed below IOLoop.current().start() line.
from tornado.ioloop import IOLoop
from tornado.web import RequestHandler, Application, url,asynchronous
from Queue import Queue
import json
q=Queue()
q.put("one")
q.put("two")
class HelloHandler(RequestHandler):
def get(self):
data=q.get()
self.write(data)
def make_app():
return Application([
url(r"/", HelloHandler),
])
def main():
app = make_app()
app.listen(8888)
IOLoop.current().start() # stops here
time.sleep(2)
q.put("three")
print q
if __name__=='__main__':
main()
first time on this :
http://localhost:8888/
returns "one"
second time on this:
http://localhost:8888/
return "two"
Third time on this"
http://localhost:8888/
blocking
The problem you have is that calling IOLoop.current().start() transfers control to Tornado. It loops until IOLoop.stop() is called.
If you need to do something after the IOLoop has started, then you can use one of the callbacks. For example, here is your code modified to use IOLoop.call_later(). You could also use IOLoop.add_timeout() if you are using an earlier (<4.0) version of Tornado.
from tornado.ioloop import IOLoop
from tornado.web import RequestHandler, Application, url,asynchronous
from Queue import Queue
import json
q=Queue()
q.put("one")
q.put("two")
class HelloHandler(RequestHandler):
def get(self):
data=q.get()
self.write(data)
def make_app():
return Application([
url(r"/", HelloHandler),
])
def main():
app = make_app()
app.listen(8888)
IOLoop.current().call_later(2, q.put, "three")
IOLoop.current().start()
if __name__=='__main__':
main()
The following is my test code. I am using Python2.7, with futures installed using:
pip install futures
The following is my demo code:
import tornado.ioloop
import tornado.web
from tornado.gen import coroutine, Task
from tornado.concurrent import Future
import time
class MainHandler(tornado.web.RequestHandler):
#coroutine
def get(self):
print "in"
res = yield Task(self._work)
self.write(res)
def _work(self, callback):
time.sleep(10)
callback("hello world!")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
This code should go concurrently, shouldn't it? However, it just doesn't.
I tested with Firefox and IE. I think I made some mistakes. It would be nice for you to point out my error.
only one request at a time(http://localhost:8888/:
import tornado.ioloop
import tornado.web
from tornado.gen import coroutine, Return, Task
from tornado.process import Subprocess
from tornado.concurrent import Future
from threading import Thread
import time
#coroutine
def async_sleep(timeout):
""" Sleep without blocking the IOLoop. """
yield Task(tornado.ioloop.IOLoop.instance().add_timeout, time.time() + timeout)
class MainHandler(tornado.web.RequestHandler):
#coroutine
def get(self):
print "in"
res = yield self._work()
self.write(res)
#coroutine
def _work(self):
yield async_sleep(5)
raise Return("hello world!")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
ioloop=tornado.ioloop.IOLoop.instance()
Thread(target=ioloop.start).start()
Since you indicated in the comments you want to run a subprocess via tornado, here's an example illustrating how to do that. I also fixed a logic error where you were creating a Task when calling _work, which wasn't going to work the way you intended:
import tornado.ioloop
import tornado.web
from tornado.gen import coroutine, Return
from tornado.process import Subprocess
from tornado.concurrent import Future
class MainHandler(tornado.web.RequestHandler):
#coroutine
def get(self):
print "in"
res = yield self._work()
self.write(res)
#coroutine
def _work(self):
p = Subprocess(['sleep', '10'])
f = Future()
p.set_exit_callback(f.set_result)
yield f
raise Return("hello world!")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
As you can see, I made _work a coroutine, and then used tornado's built-in Subprocess class to execute a command. I created a Future object, and instructed the Subprocess to call Future.set_result(return_code_of_subprocess) when it completed. Then I called yield on the Future instance. That makes the code wait until the subprocess completes, without blocking the I/O loop.
The time.sleep in your code is a blocking method.
You need tornado.gen.sleep(a non-blocking method, new in tornado 4.1) here to instead.
Can I realize "long connection(persistent connection, not long poll)" use tornado.web.RequestHandler? Because I need contain the connection, I need receive "heart message" which send by client, So, if tornado.web.RequestHandler can do it, How can I do? And have some demo like this?
Thanks!
RequestHandler isn't right for this, use WebSocketHandler instead. Here's an example app that receives a heartbeat from each client every second:
import tornado.ioloop
import tornado.web
import tornado.websocket
class HeartBeatReceiver(tornado.websocket.WebSocketHandler):
def open(self):
pass
def on_message(self, message):
print message
def on_close(self):
pass
class Main(tornado.web.RequestHandler):
def get(self):
# This could be a template, too.
self.write('''
<script>
function sendHeartBeat() {
ws.send("heartbeat");
setTimeout(sendHeartBeat, 1000);
}
ws = new WebSocket("ws://localhost:8888/websocket");
ws.onopen = sendHeartBeat;
</script>''')
application = tornado.web.Application([
(r"/", Main),
(r"/websocket", HeartBeatReceiver),
])
if __name__ == "__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
Below is the simple hello world app. How can I have tornado call '/' in two threads with a 1 sec sleep. Thus the page will be called 2 times per second. I will need later to extend to a redis call but want to start simple now since I am new to this logic. I need to build a web page that is called repeatably asynchronously.
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
application = tornado.web.Application([
(r"/", MainHandler),
])
if __name__ == "__main__":
application.listen(8880)
tornado.ioloop.IOLoop.instance().start()
If you are new to tornado, check out tornadogist. They have a lot of useful code snippets. Here is one adapted to your need:
from time import sleep
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.web import Application, asynchronous, RequestHandler
from multiprocessing.pool import ThreadPool
# 2 threads
_workers = ThreadPool(2)
def run_background(func, callback, args=(), kwds={}):
def _callback(result):
IOLoop.instance().add_callback(lambda: callback(result))
_workers.apply_async(func, args, kwds, _callback)
# blocking task like querying to MySQL
def blocking_task(n):
sleep(n)
return n
class MainHandler(RequestHandler):
#asynchronous
def get(self):
run_background(blocking_task, self.on_complete, (1,))
def on_complete(self, res):
self.write("Test {0}<br/>".format(res))
self.finish()
HTTPServer(Application([("/", MainHandler)],debug=True)).listen(8888)
IOLoop.instance().start()