I have a little question.
I'm new to the whole socket theory nad the Tornado Framework. Lately I've written a proxy server using the Tornado framework. My app is standing in the middle of a connection client(browser) <===> remote address. So the connection looks like this:
client(my web browser) <=== PROXY(my app) ===> Remote address (stackoverflow.com)
Everything works fine if I use the standard "main" function. But I want to go a little more low - level, I mean I want to create sockets and offer my proxy app through that connection.
My proxy app urls:
# coding: utf-8
"""URL's for proxy app."""
from settings import settings
from tornado.web import (
StaticFileHandler,
url,
)
from handlers import (
mainHandlers,
myProxy,
)
urls = [
url(r"/admin/$", mainHandlers.MainHandler),
url(r"/admin/delete_filter/", mainHandlers.DataDeleteHandler),
url(r"/admin/filters/$", mainHandlers.DataGetter),
url(r"/admin/new_filter/$", mainHandlers.FormHandler),
url(r"/admin/stats/$", mainHandlers.StatsTableHandler),
url(r"/admin/stats/query/$", mainHandlers.AjaxStatsGetHandler),
url(r"/static/", StaticFileHandler, dict(path=settings['static_path'])),
url(r'.*', myProxy.ProxyHandler),
]
My ProxyHandler:
class ProxyHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ['GET', 'POST']
def data_received(self, chunk):
pass
def compute_etag(self):
return None # disable tornado Etag
def handle_response(self, response):
if response.error and not isinstance(response.error, tornado.httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
else:
self.set_status(response.code, response.reason)
self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header
for header, v in response.headers.get_all():
if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'):
self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie'
secured_page = False
for page in secure_pages:
if page in self.request.uri:
secured_page = True
self.set_header('Content-Length', len(response.body))
self.write(response.body)
break
if response.body and not secured_page:
c.execute('SELECT filter_name FROM filters WHERE filter_type=1')
tags = c.fetchall()
soup = BeautifulSoup(response.body, 'html.parser')
for row in tags:
catched_tags = soup.find_all(str(row[0]))
if catched_tags:
print 'catched: %s of <%s> tags' % (len(catched_tags), str(row[0]))
for tag in catched_tags:
tag.extract()
new_body = str(soup)
self.set_header('Content-Length', len(new_body))
self.write(new_body)
self.finish()
#tornado.web.asynchronous
def get(self):
logger.debug('Handle %s request to %s', self.request.method, self.request.uri)
body = self.request.body
if not body:
body = None
try:
if 'Proxy-Connection' in self.request.headers:
del self.request.headers['Proxy-Connection']
c.execute('SELECT filter_name FROM filters WHERE filter_type=2')
urls = c.fetchall()
for url in urls:
if url[0] in self.request.path:
self.set_status(403)
self.finish()
return
fetch_request(self.request.uri, self.handle_response,
method=self.request.method, body=body, headers=self.request.headers, follow_redirects=False,
allow_nonstandard_methods=True)
except tornado.httpclient.HTTPError as e:
if hasattr(e, 'response') and e.response:
self.handle_response(e.response)
else:
self.set_status(500)
self.write('Internal server error:\n' + str(e))
self.finish()
#tornado.web.asynchronous
def post(self):
return self.get()
The easy main function:
# coding: utf-8
import sys
import tornado.web
from tornado.options import options
from configuration.application import MyApplication
from proxy.urls import proxy_urls
def make_app():
"""Create my application with my settings and urls."""
return MyApplication(proxy_urls)
if __name__ == "__main__":
u"""Main loop."""
app = make_app()
port = options.port
if len(sys.argv) > 1:
port = int(sys.argv[1])
app.listen(port)
print 'tornado working on port %s' % port
tornado.ioloop.IOLoop.current().start()
So I want to change the easy way to the low-level way based on the docs:
import errno
import functools
import tornado.ioloop
import socket
def connection_ready(sock, fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
connection.setblocking(0)
handle_connection(connection, address)
if __name__ == '__main__':
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
sock.bind(("", port))
sock.listen(128)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
io_loop.start()
To accomplish this I read a little about the whole network programming using sockets (https://www.tutorialspoint.com/python/python_networking.htm).
The example in the tutorial works well so I tried to connect the tutorial with the example in the Tornado docs:
# coding: utf-8
import errno
import functools
import socket
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.process
import tornado.web
from tornado.options import options
from configuration.application import MyApplication
def make_app():
u"""Create my application with my settings and urls."""
return MyApplication()
def connection_ready(sock, fd, events):
u"""Function to handle an incoming connection."""
proxy_app = make_app()
server = tornado.httpserver.HTTPServer(proxy_app)
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
print 'Got connection from', address
# connection.setblocking(False)
connection.send(server)
connection.close()
if __name__ == "__main__":
u"""Main loop."""
port = options.port
if len(sys.argv) > 1:
port = int(sys.argv[1])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(False)
sock.bind(('', port))
sock.listen(5)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
print 'Tornado Proxy working on port: %s' % port
io_loop.start()
But when I try to connect to my proxy (for example to add filters: http://127.0.0.1:8000/admin/filters/ - I have an handler written to handle this url)
I get specyfic errors:
ERROR:tornado.application:Exception in callback (3, )
Traceback (most recent call last):
File "/home/dave/.virtualenvs/teleV1/local/lib/python2.7/site-packages/tornado/ioloop.py", line 887, in start
handler_func(fd_obj, events)
File "/home/dave/.virtualenvs/teleV1/local/lib/python2.7/site-packages/tornado/stack_context.py", line 275, in null_wrapper
return fn(*args, **kwargs)
File "manage.py", line 35, in connection_ready
connection.send(server)
TypeError: send() argument 1 must be convertible to a buffer, not HTTPServer
I understand that I cannot send a HTTPServer through the connection (from one end to the other), it must be a buffer.
My first idea was to send the buffor from the handler that handles a URL (for example class ProxyHandler(tornado.web.RequestHandler)),
but how can I handle diffrent urls and diffrent handlers?
After a few approaches I changed my Proxy app written as a Tornado app to a pure Python code, that handles responses from remote addresses and does the filter stuff. I think this is the best and fastest thing that I could do.
Related
I have a code below that does the client-server communication properly.
The client:
# Client
import socket
import pickle
class Model:
def __init__(self, host, port):
self.port = port
self.host = host
def snd_query(self, query):
received_data = []
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect((self.host, self.port))
sock.sendall(pickle.dumps(query))
while True:
packet = sock.recv(4096)
if not packet or packet == b'':
break
received_data.append(packet)
try:
content = pickle.loads(b"".join(received_data))
return content
except EOFError:
return None
and a server:
# Server.py
import socketserver
import pickle
import configparser
from data_manipulation import DataManipulation
class MyTCPHandler(socketserver.BaseRequestHandler):
def handle(self):
db = DataManipulation("data.db")
request = pickle.loads(self.request.recv(1024))
if request['command'] == 'GET':
content = db.get_data()
#elif ...:
#... some other logic and database interactions
self.request.sendall(pickle.dumps((content)))
def main(HOST, PORT):
try:
my_server = socketserver.TCPServer((HOST, PORT), MyTCPHandler)
my_server.serve_forever()
except KeyboardInterrupt:
my_server.shutdown()
my_server.server_close()
if __name__ == "__main__":
config = configparser.ConfigParser()
config.read('../params.ini')
main(config['SERVER']['host'], int(config['SERVER']['port']))
In that form with every new handled request a database connection db instance is created.
I would like to avoid it. I would like to make db object ones and raise it with argument read from ini file. Obviously it is enough to read it once.
All examples that I found show simple echo server application. handle() doesn't do much, only prints some stuff.
I am not sure where is the correct place to call db object to have only one instance?
How to properly incorporate some advanced logic in handle() method?
I am trying to write a Tornado TCP + HTTP Server application.
My use case is a Tornado TCP + HTTP Server application which accepts data from a TCP client and pass the data to display it on a webpage hosted on the HTTP server.
Here is my tornado server code:
#!/usr/bin/env python
import os.path
import tornado.httpserver
import tornado.web
import logging
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.iostream import StreamClosedError
from tornado.tcpserver import TCPServer
from tornado.options import options, define
define("port", default=6642, help="TCP port to listen on")
logger = logging.getLogger(__name__)
test = {}
class IndexHandler(tornado.web.RequestHandler):
def get(self):
global test
self.render('index.html', test=test)
class EchoServer(TCPServer):
#gen.coroutine
def handle_stream(self, stream, address):
global test
while True:
try:
test = yield stream.read_until("\n")
logger.info("Received bytes: %s", test)
except StreamClosedError:
logger.warning("Lost client at host %s", address[0])
break
except Exception as e:
print(e)
if __name__ == "__main__":
options.parse_command_line()
app = tornado.web.Application( handlers=[
(r'/', IndexHandler)],
static_path=os.path.join(os.path.dirname(__file__), "static"),
template_path=os.path.join(os.path.dirname(__file__), "templates"))
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
server = EchoServer()
server.listen(6641)
logger.info("Listening on TCP port %d",6641)
IOLoop.current().start()
Here is the python client code :
# echo_client.py
import socket
import time
counter = 0
host = '192.168.43.59'
port = 6641 # The same port as used by the server
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
while True:
s.sendall("s\n")
counter = counter + 1
time.sleep(5)
I want to pass the data received from TCP client application into the variable "test" to the render template to display in index.html webpage but I am getting no data displayed.
I am using the global variable concept but no success since couldn't pass the updated "test" variable to index.html page.
If anyone could throw light on using common vaiables across different classes or handlers would help me.
Javascript file iam using is this :
/* test.js */
var test = ""
function set_test(val)
{
test=val
}
function show_test()
{
alert(test);
}
The HTML template used is this :
<!DOCTYPE html>
<html>
<meta http-equiv="refresh" content="30" />
<head>
<title>Test</title>
<script src="{{ static_url('scripts/test.js') }}"
type="application/javascript"></script>
</head>
<body>
<input type="button" onclick="show_test()" value="alert" />
<script type="application/javascript">
set_test("{{test}}");
</script>
</body>
</html>
Hi xyres,
Thank you for your spontaneous reply.I went through the link provided by you and after going through it i could understand that q.get() and q.put() can be used to store and retrieve data as you said.But i could not after modifying the tornado server code in the following manner I couldn't receive the data from TCP client , before this i could at least get data from TCP client .Can you let me know what is the mistake i have done in queue implementation
Here is my tornado server code:
#!/usr/bin/env python
import os.path
import tornado.httpserver
import tornado.web
import logging
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.iostream import StreamClosedError
from tornado.tcpserver import TCPServer
from tornado.options import options, define
define("port", default=6642, help="TCP port to listen on")
logger = logging.getLogger(__name__)
#test = {}
q = Queue(maxsize=2)
class IndexHandler(tornado.web.RequestHandler):
def get(self):
#global test
test = yield q.get
self.render('index.html', test=test)
class EchoServer(TCPServer):
#gen.coroutine
def handle_stream(self, stream, address):
#global test
yield q.put(test)
yield q.join()
while True:
try:
test = yield stream.read_until("\n")
logger.info("Received bytes: %s", test)
except StreamClosedError:
logger.warning("Lost client at host %s", address[0])
break
except Exception as e:
print(e)
if __name__ == "__main__":
options.parse_command_line()
app = tornado.web.Application( handlers=[
(r'/', IndexHandler)],
static_path=os.path.join(os.path.dirname(__file__), "static"),
template_path=os.path.join(os.path.dirname(__file__), "templates"))
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
server = EchoServer()
server.listen(6641)
logger.info("Listening on TCP port %d",6641)
IOLoop.current().start()
As per the tornado documentation it seems that queue can be applied to coroutines and here iam trying to replicate the same to two different classes.Is that a mistake ..Iam new to tornado so please bear my silly questions ..
You've a multiple options:
If you want to have a long-running connection, for example, if a client sends a request to IndexHandler and you want the client to wait until a message is in the queue, you can convert your handler to a coroutine.
If you want to return the response immediately, regardless of the availability of the data in the queue, you can use a queue's get_nowait() method.
Example for case #1:
from tornado.queues import Queue
q = Queue()
class IndexHandler(tornado.web.RequestHandler):
#gen.coroutine
def get(self):
self.data_future = q.get()
data = yield self.data_future
self.render('index.html', data=data)
def on_connection_close(self):
# set an empty result on the future
# if connection is closed so that
# the messages don't get removed from
# the queue unnecessariliy for
# closed connections
self.msg_future.set_result(None)
Example for case #2:
from tornado.queues import Queue, QueueEmpty
q = Queue()
def get(self):
try:
data = q.get_nowait()
except QueueEmpty:
data = None
self.render(...)
I'm a newbie to creating manually sockets. My OS is ubuntu. I've got an proxy server written python using Tornado, everything is fine when I use the "fast version" starting the app, I mean the:
if __name__ == "__main__":
app = make_app()
port = options.port # default 8000
if len(sys.argv) > 1:
port = int(sys.argv[1])
app.listen(port)
print 'tornado working on port %s' % port
tornado.ioloop.IOLoop.current().start()
But when i want to change it to use the 'socket version' it seems that I'm doing something wrong. I get an error saying that the address is already used.
code:
def make_app():
return MyApplication()
def connection_ready(sock, fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
connection.setblocking(0)
app = make_app()
app.listen(8000) # I get here an error: [Errno 98] Address already in use
if __name__ == "__main__":
port = options.port # default port 8000
if len(sys.argv) > 1:
port = int(sys.argv[1])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(False)
sock.bind(("", port))
sock.listen(128)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
io_loop.start()
I'm trying to implement the same way that the documentation says (http://www.tornadoweb.org/en/stable/ioloop.html) but I don't see it starting the app in there.
Could someone tell me what is the proper way to start an app using sockets? I'm trying to accomplish an application that is available when the sever accepts the incoming socket. (So every client that connects to my listining port described in the main function at lines:sock.bind(("", port)) and sock.listen(128) will get a new socket and have access to the application).
Edit: I'm adding my proxy class:
class ProxyHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ['GET', 'POST']
def data_received(self, chunk):
pass
def compute_etag(self):
return None # disable tornado Etag
def handle_response(self, response):
if response.error and not isinstance(response.error, tornado.httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
else:
self.set_status(response.code, response.reason)
self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header
for header, v in response.headers.get_all():
if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'):
self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie'
secured_page = False
for page in secure_pages:
if page in self.request.uri:
secured_page = True
self.set_header('Content-Length', len(response.body))
self.write(response.body)
break
if response.body and not secured_page:
c.execute('SELECT filter_name FROM filters WHERE filter_type=1')
tags = c.fetchall()
soup = BeautifulSoup(response.body, 'html.parser')
for row in tags:
catched_tags = soup.find_all(str(row[0]))
if catched_tags:
print 'catched: %s of <%s> tags' % (len(catched_tags), str(row[0]))
for tag in catched_tags:
tag.extract()
new_body = str(soup)
self.set_header('Content-Length', len(new_body))
self.write(new_body)
self.finish()
#tornado.web.asynchronous
def get(self):
logger.debug('Handle %s request to %s', self.request.method, self.request.uri)
body = self.request.body
if not body:
body = None
try:
if 'Proxy-Connection' in self.request.headers:
del self.request.headers['Proxy-Connection']
c.execute('SELECT filter_name FROM filters WHERE filter_type=2')
urls = c.fetchall()
for url in urls:
if url[0] in self.request.path:
self.set_status(403)
self.finish()
return
fetch_request(self.request.uri, self.handle_response,
method=self.request.method, body=body, headers=self.request.headers, follow_redirects=False,
allow_nonstandard_methods=True)
except tornado.httpclient.HTTPError as e:
if hasattr(e, 'response') and e.response:
self.handle_response(e.response)
else:
self.set_status(500)
self.write('Internal server error:\n' + str(e))
self.finish()
#tornado.web.asynchronous
def post(self):
return self.get()
And my urls for the application:
urls = [
url(r"/admin/$", mainHandlers.MainHandler),
url(r"/admin/delete_filter/", mainHandlers.DataDeleteHandler),
url(r"/admin/filters/$", mainHandlers.DataGetter),
url(r"/admin/new_filter/$", mainHandlers.FormHandler),
url(r"/admin/stats/$", mainHandlers.StatsTableHandler),
url(r"/admin/stats/query/$", mainHandlers.AjaxStatsGetHandler),
url(r"/static/", StaticFileHandler, dict(path=settings['static_path'])),
url(r'.*', myProxy.ProxyHandler),
]
It says the port is already in use because it is. You're listening on port 8000 at least twice: once in the __main__ block when you call sock.listen, and again in the connection_ready handler when you call app.listen() (which creates another socket and tries to bind it to port 8000). You need to remove the app.listen() line, but I don't understand what you're trying to do well enough to say what you should do instead.
If you start app on Windows, you must wait for the firewall unblock. In windows it is safe to assume that if an application occupies a port it is blocked for use by other processes that might listen to packets not intended for them.
I've rewitten my Proxy to pure Python code on sockets, I'm not using URL's now and I only handle the responses from the remote addresses. I'm not using any framework
I'm trying to implement something like "tail -f" over HTTP with Python. Currently, I'm trying to use Tornado, but it only is handling one connection at a time, even when I do asynchronous requests.
import socket
import subprocess
import tornado.gen as gen
import tornado.httpserver
import tornado.ioloop
import tornado.iostream
import tornado.options
import tornado.web
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
define(
"inputfile",
default="test.txt",
help="the path to the file which we will 'tail'",
type=str)
class MainHandler(tornado.web.RequestHandler):
#tornado.web.asynchronous
#gen.engine
def get(self):
print "GOT REQUEST"
inputfile = open(options.inputfile)
p = subprocess.Popen(
"./nettail.py",
stdin=inputfile,
stdout=subprocess.PIPE)
port_number = int(p.stdout.readline().strip())
self.write("<pre>")
self.write("Hello, world\n")
self.flush()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
stream = tornado.iostream.IOStream(s)
yield gen.Task(stream.connect, ("127.0.0.1", port_number))
while True:
data = yield gen.Task(stream.read_until, "\n")
self.write(data)
self.flush()
def main():
tornado.options.parse_command_line()
application = tornado.web.Application([
(r"/", MainHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
The process I am starting is a simple "tail" which outputs to a socket.
import random
import socket
import sys
import time
#create an INET, STREAMing socket
s = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
# Open the connection.
try:
for attempt_number in xrange(5):
port_number = random.randint(9000, 65000)
try:
s.bind(("localhost", port_number))
except socket.error:
continue
# We successfully bound!
sys.stdout.write("{0}".format(port_number))
sys.stdout.write("\n")
sys.stdout.flush()
break
#become a server socket
s.listen(5)
# Accept a connection.
try:
(clientsocket, address) = s.accept()
while True:
line = sys.stdin.readline()
if not line:
time.sleep(1)
continue
clientsocket.sendall(line)
finally:
clientsocket.close()
finally:
s.close()
./nettail.py works as I expect, but the Tornado HTTP server is only handling one request at a time.
I would like to use long-running, persistent HTTP connections to do this, as it is compatible with older browsers. I understand that Web Sockets is how it would be done in modern browsers.
Edit:
I'm running this on Linux and Solaris, not Windows. That means I could use tornado.iostream on the file, rather than through a sockets program. Still, that is not a documented feature, so I launch a sockets program for each connection.
After doing some more debugging, it turns out that this tail server was not blocking, after all.
I was trying to test concurrent connections with two windows of Firefox open, but Firefox would not start fetching the second window until the first window was manually stopped. I guess Firefox does not like to have two concurrent HTTP connections to fetch the same resource.
Opening a Firefox window and a Chromium window, I can see the "tail" output pushed to both tabs.
Thank you for all your help. #abarnert's comments were especially helpful.
Edit:
In the to-be-release 2.4.2 version of Tornado, a "Pipe" IOStream is implemented. Using this and regular "tail" simplified the code a lot.
import subprocess
import tornado.httpserver
import tornado.ioloop
import tornado.iostream
import tornado.options
import tornado.web
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
define(
"inputfile",
default="test.txt",
help="the path to the file which we will 'tail'",
type=str)
class MainHandler(tornado.web.RequestHandler):
#tornado.web.asynchronous
def get(self):
print "GOT REQUEST"
self.p = subprocess.Popen(
["tail", "-f", options.inputfile, "-n+1"],
stdout=subprocess.PIPE)
self.write("<pre>")
self.write("Hello, world\n")
self.flush()
self.stream = tornado.iostream.PipeIOStream(self.p.stdout.fileno())
self.stream.read_until("\n", self.line_from_nettail)
def on_connection_close(self, *args, **kwargs):
"""Clean up the nettail process when the connection is closed.
"""
print "CONNECTION CLOSED!!!!"
self.p.terminate()
tornado.web.RequestHandler.on_connection_close(self, *args, **kwargs)
def line_from_nettail(self, data):
self.write(data)
self.flush()
self.stream.read_until("\n", self.line_from_nettail)
def main():
tornado.options.parse_command_line()
application = tornado.web.Application([
(r"/", MainHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
I created this recently as an experiment. Works for me with multiple connections is it any use?
class TailHandler(BaseHandler):
#asynchronous
def get(self):
self.file = open('data/to_read.txt', 'r')
self.pos = self.file.tell()
def _read_file():
line = self.file.read()
last_pos = self.file.tell()
if not line:
self.file.close()
self.file = open('data/to_read.txt', 'r')
self.file.seek(last_pos)
pass
else:
self.write(line)
self.flush()
IOLoop.instance().add_timeout(time.time() + 1, _read_file)
_read_file()
You shouldn't have blocking calls like this in the handler.
port_number = int(p.stdout.readline().strip())
You'll need to use select or a similar mechanism ti avoid the blocking call
Edit: ok I went and checked the docs. You should use their iostream to read from p
I want to set up a basic ssl-authenticated socket server to do some network communication. I'm getting the error below. It seems to be coming from the SSLIOStream not handshaking before reading:
File "simple_ssl_server.py", line 70, in connection_ready
node_io_stream.read_until("OK", on_ok)
File "/home/tombrown/skyhook/lib/python2.7/site-packages/tornado-2.1.1-py2.7.egg/tornado/iostream.py", line 161, in read_until
if self._read_to_buffer() == 0:
File "/home/tombrown/skyhook/lib/python2.7/site-packages/tornado-2.1.1-py2.7.egg/tornado/iostream.py", line 375, in _read_to_buffer
chunk = self._read_from_socket()
File "/home/tombrown/skyhook/lib/python2.7/site-packages/tornado-2.1.1-py2.7.egg/tornado/iostream.py", line 635, in _read_from_socket
chunk = self.socket.read(self.read_chunk_size)
File "/usr/lib/python2.7/ssl.py", line 151, in read
return self._sslobj.read(len)
SSLError: [Errno 1] _ssl.c:1354: error:1408F044:SSL routines:SSL3_GET_RECORD:internal error
Here's my server code:
import tornado.web
import tornado.httpserver
import select
import socket
import tornado.iostream
import random
import logging
import ssl
import functools
class SSLSocketServer(object):
def __init__(self, io_loop=None, config_file=None, debug=False):
if io_loop is None: io_loop = tornado.ioloop.IOLoop.instance()
# Set up our node-accepting socket on port 8013
HOST = '' # Symbolic name meaning all available interfaces
PORT = 8013 # Arbitrary non-privileged port
server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_sock.setblocking(0)
server_sock.bind((HOST, PORT))
# We allow a backlog of up to 128 pending connections.
server_sock.listen(128)
callback = functools.partial(self.connection_ready, server_sock)
io_loop.add_handler(server_sock.fileno(),
callback, io_loop.READ)
def connection_ready(self, sock, fd, events):
# In part from: https://github.com/saucelabs/monocle/blob/7bd978f1c6a2ad3d78dd3da0b5b73c3e215ebbf3/monocle/tornado_stack/network/__init__.py
while True:
# Wait for the basic socket to be available.
try:
node_sock, address = sock.accept()
except socket.error, e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
# Wait for the ssl socket to be available.
try:
node_sock = ssl.wrap_socket(node_sock,
do_handshake_on_connect=False,
server_side=True,
certfile="cert.pem",
ssl_version=ssl.PROTOCOL_TLSv1)
except ssl.SSLError, err:
if err.args[0] == ssl.SSL_ERROR_EOF:
s.close()
return
else:
raise
except socket.error, err:
if err.args[0] == errno.ECONNABORTED:
s.close()
return
else:
raise
node_io_stream = tornado.iostream.SSLIOStream(node_sock)
def on_ok():
print "recieved OK!"
node_io_stream.read_until("OK", on_ok)
if __name__ == '__main__':
# Get a handle to the instance of IOLoop
io_loop = tornado.ioloop.IOLoop.instance()
worker = SSLSocketServer(io_loop)
# Start the IOLoop
io_loop.start()
And here's the client code:
import sys
import logging
import socket
from tornado import iostream
from tornado import ioloop
import uuid
from tornado.options import define, options
import json
import ssl
def main():
delim = '\r\n\r\n'
def send_request():
print "sending OK"
stream.write("OK")
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
# stream = iostream.IOStream(s)
stream = iostream.SSLIOStream(s,
ssl_options= dict(
ca_certs="fake_auth/server_certfile.pems",
cert_reqs=ssl.CERT_NONE))
print "about to connect"
stream.connect(('', 8013), send_request)
ioloop.IOLoop.instance().start()
if __name__ == '__main__':
main()
I created the keyfile and certificate with the following command:
openssl req -new -x509 -days 365 -nodes -out cert.pem -keyout cert.pem
Tornado is able to maintain an SSL connection itself using python 2.6+ and OpenSSL. Why are you attempting to manually build out the SSL socket connection?
Check out: http://www.tornadoweb.org/documentation/httpserver.html#http-server
Key Caption:
HTTPServer can serve SSL traffic with Python 2.6+ and OpenSSL. To make
this server serve SSL traffic, send the ssl_options dictionary
argument with the arguments required for the ssl.wrap_socket method,
including “certfile” and “keyfile”:
HTTPServer(applicaton, ssl_options={
"certfile": os.path.join(data_dir, "mydomain.crt"),
"keyfile": os.path.join(data_dir, "mydomain.key"),
})
pems? Shouldn't this be pem?
ca_certs="fake_auth/server_certfile.pems",
Did you get it to work?