I'm a newbie to creating manually sockets. My OS is ubuntu. I've got an proxy server written python using Tornado, everything is fine when I use the "fast version" starting the app, I mean the:
if __name__ == "__main__":
app = make_app()
port = options.port # default 8000
if len(sys.argv) > 1:
port = int(sys.argv[1])
app.listen(port)
print 'tornado working on port %s' % port
tornado.ioloop.IOLoop.current().start()
But when i want to change it to use the 'socket version' it seems that I'm doing something wrong. I get an error saying that the address is already used.
code:
def make_app():
return MyApplication()
def connection_ready(sock, fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
connection.setblocking(0)
app = make_app()
app.listen(8000) # I get here an error: [Errno 98] Address already in use
if __name__ == "__main__":
port = options.port # default port 8000
if len(sys.argv) > 1:
port = int(sys.argv[1])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(False)
sock.bind(("", port))
sock.listen(128)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
io_loop.start()
I'm trying to implement the same way that the documentation says (http://www.tornadoweb.org/en/stable/ioloop.html) but I don't see it starting the app in there.
Could someone tell me what is the proper way to start an app using sockets? I'm trying to accomplish an application that is available when the sever accepts the incoming socket. (So every client that connects to my listining port described in the main function at lines:sock.bind(("", port)) and sock.listen(128) will get a new socket and have access to the application).
Edit: I'm adding my proxy class:
class ProxyHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ['GET', 'POST']
def data_received(self, chunk):
pass
def compute_etag(self):
return None # disable tornado Etag
def handle_response(self, response):
if response.error and not isinstance(response.error, tornado.httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
else:
self.set_status(response.code, response.reason)
self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header
for header, v in response.headers.get_all():
if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'):
self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie'
secured_page = False
for page in secure_pages:
if page in self.request.uri:
secured_page = True
self.set_header('Content-Length', len(response.body))
self.write(response.body)
break
if response.body and not secured_page:
c.execute('SELECT filter_name FROM filters WHERE filter_type=1')
tags = c.fetchall()
soup = BeautifulSoup(response.body, 'html.parser')
for row in tags:
catched_tags = soup.find_all(str(row[0]))
if catched_tags:
print 'catched: %s of <%s> tags' % (len(catched_tags), str(row[0]))
for tag in catched_tags:
tag.extract()
new_body = str(soup)
self.set_header('Content-Length', len(new_body))
self.write(new_body)
self.finish()
#tornado.web.asynchronous
def get(self):
logger.debug('Handle %s request to %s', self.request.method, self.request.uri)
body = self.request.body
if not body:
body = None
try:
if 'Proxy-Connection' in self.request.headers:
del self.request.headers['Proxy-Connection']
c.execute('SELECT filter_name FROM filters WHERE filter_type=2')
urls = c.fetchall()
for url in urls:
if url[0] in self.request.path:
self.set_status(403)
self.finish()
return
fetch_request(self.request.uri, self.handle_response,
method=self.request.method, body=body, headers=self.request.headers, follow_redirects=False,
allow_nonstandard_methods=True)
except tornado.httpclient.HTTPError as e:
if hasattr(e, 'response') and e.response:
self.handle_response(e.response)
else:
self.set_status(500)
self.write('Internal server error:\n' + str(e))
self.finish()
#tornado.web.asynchronous
def post(self):
return self.get()
And my urls for the application:
urls = [
url(r"/admin/$", mainHandlers.MainHandler),
url(r"/admin/delete_filter/", mainHandlers.DataDeleteHandler),
url(r"/admin/filters/$", mainHandlers.DataGetter),
url(r"/admin/new_filter/$", mainHandlers.FormHandler),
url(r"/admin/stats/$", mainHandlers.StatsTableHandler),
url(r"/admin/stats/query/$", mainHandlers.AjaxStatsGetHandler),
url(r"/static/", StaticFileHandler, dict(path=settings['static_path'])),
url(r'.*', myProxy.ProxyHandler),
]
It says the port is already in use because it is. You're listening on port 8000 at least twice: once in the __main__ block when you call sock.listen, and again in the connection_ready handler when you call app.listen() (which creates another socket and tries to bind it to port 8000). You need to remove the app.listen() line, but I don't understand what you're trying to do well enough to say what you should do instead.
If you start app on Windows, you must wait for the firewall unblock. In windows it is safe to assume that if an application occupies a port it is blocked for use by other processes that might listen to packets not intended for them.
I've rewitten my Proxy to pure Python code on sockets, I'm not using URL's now and I only handle the responses from the remote addresses. I'm not using any framework
Related
I've been working on an application which contains a small, simple http server to handle post requests on occasion. The server and all functionality around it works fine, but each time the server runs, log output would tell me that my code is being run multiple times, plus one time for each request the http server handles.
class HttpApp:
def __init__(self, host="localhost", port=25565):
logging = Util.configure_logging(__name__)
server_address = (host, port)
httpd = HTTPServer(server_address, ServerObject)
logging.info('Starting httpd...\n')
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
logging.info('Stopping httpd...\n')
class ServerObject(BaseHTTPRequestHandler):
def _set_response(self):
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
def do_GET(self):
print("GET request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers))
self._set_response()
self.wfile.write("GET request for {}".format(self.path).encode('utf-8'))
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content_type = str(self.headers['Content-Type'])
# print(content_length)
post_data = self.rfile.read(content_length)
if content_type == "application/json":
parsed_data = json.loads(post_data.decode('utf-8'))
else:
print("Bad request!")
self._set_response()
self.wfile.write(json.dumps({"Response": "Bad Request"}).encode('utf-8'))
print("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n" %
(str(self.path), str(self.headers), parsed_data))
print("Parsed Params: %s" % parsed_data)
def free_port():
free_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
free_socket.bind(('0.0.0.0', 0))
free_socket.listen(5)
port = free_socket.getsockname()[1]
free_socket.close()
return port
rand_port = free_port()
SpawnSlave(category=parsed_data["category"], tag=parsed_data["tag"], filename=parsed_data["filename"], port=rand_port)
self._set_response()
self.wfile.write(json.dumps({"port": rand_port}).encode('utf-8'))
A cli application passes information to HttpApp, which then starts based on that information. Once it receives a connection the first time, it goes through its steps normally and only prints output once. The second time, output is printed twice, and so on. Only post requests are handled by this server. I have gone over my code a few times to make sure I'm not calling it more than once, but I seem to be stumped. For more context, more of this code is available on github, but this is the only relevant piece.
It turns out that this wasn't an issue with my code, but rather an issue with the logger I was using which was adding multiple console handlers for the same logger, causing output to be repeated. I fixed this in my cli library.
This is my code for my seminar at university in computer network course.
Require: write a proxy web server. Receive HTTP request from the browser port 8888 and send HTTP request to web server port 80. It seems like I had trouble sending request to web server.
Could you show me my error in this situation?
Many thanks
import socket
import sys
import _thread
import traceback
import ssl
def CreateServer(host, port):
Server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
Server.bind((host,port))
Server.listen(5)
return Server
def ReadRequest(Client):
re = ""
Client.settimeout(10.0)
try:
re = Client.recv(1024).decode()
while (re):
re = re + Client.recv(1024).decode()
except socket.timeout: # fail after 1 second of no activity
if not re:
print("Didn't receive data! [Timeout]")
finally:
return re
#2. Client connect Server + 3. Read HTTP Request
def ReadHTTPRequest(Server):
re = ""
while (re == ""):
Client, address = Server.accept()
print("Client: ", address," da ket noi toi Server")
re = ReadRequest(Client)
return Client,address, re
def proxy_server(webserver, port, conn, data, addr):
print("{} {} {} {}".format(webserver, port, conn, addr))
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(10.0)
s.connect((webserver, port))
s.sendall(data)
while 1:
reply = s.recv(1024)
if len(reply) > 0:
conn.send(reply)
print("[*] Request sent: {} > {}".format(addr[0],webserver))
else:
break
s.close()
conn.close()
except Exception as e:
print(e)
traceback.print_exc()
s.close()
conn.close()
sys.exit(1)
def conn_string(Client,Request,addr):
try:
#print(addr)
first_line=Request.split('\n')[0]
url=first_line.split(" ")[1]
http_pos=url.find("://")
if http_pos==-1:
temp=url
else:
temp=url[(http_pos+3):]
port_pos=temp.find(":")
webserver_pos=temp.find("/")
if webserver_pos == -1:
webserver_pos = len(temp)
webserver = ""
port = -1
if port_pos == -1 or webserver_pos < port_pos:
port = 80
webserver = temp[:webserver_pos]
else:
port = 80
#port = int(temp[(port_pos + 1):][:webserver_pos - port_pos -1])
webserver = temp[:port_pos]
proxy_server(webserver,port,Client,Request.encode(),addr)
except Exception as e:
print (e)
traceback.print_exc()
if __name__=="__main__":
try:
Server=CreateServer("",8888)
print("[*] Intializing socket. Done.")
print("[*] Socket binded successfully...")
print("[*] Server started successfully [{}]".format(8888))
except Exception as e:
print(e)
sys.exit(2)
while True:
try:
Client,addr,Request=ReadHTTPRequest(Server)
print("---------HTTP request: ")
print(Request)
_thread.start_new_thread(conn_string,(Client,Request,addr))
except KeyboardInterrupt:
Server.close()
print("\n[*] Shutting down..")
sys.exit()
Server.close()
[1]: https://i.stack.imgur.com/216ZO.png
You are simply forwarding the original request to the server, i.e. with the full URL inside the request:
GET http://ktdbcl.hcmus.edu.vn/ HTTP/1.1
Host: ktdbcl.hcmus.edu.vn
...
Instead only the path should be forwarded:
GET / HTTP/1.1
Host: ktdbcl.hcmus.edu.vn
...
Apart from that the reading of request and response is severely broken. You don't properly parse the HTTP protocol but instead simply wait some time and treat no data for some seconds as end of message. Since today multiple requests and responses will be done over the same connection your code will thus severely slow down any browsing.
Instead of waiting for multiple seconds for no data as the marker for end of request/response you should properly parse the HTTP protocol and wait for the appropriate end of request marker defined by the HTTP standard - see RFC 7230.
I am able to serve http2 using this tutorial. https://python-hyper.org/projects/h2/en/stable/basic-usage.html. This version works fine. But this server is able to serve only one client in a time, becouse of the blocking sockets.
Then I looked for multiclient server implementations, based on select() systemcall. This tutorial showed me how to do it. https://realpython.com/python-sockets/. I merged the two tutorials, to create an http2 multiclient-serving server.
This is what I ended up with:
server.py
import json
import socket
import h2.connection
import h2.events
import types
from thread import start_new_thread
try:
import selectors
except ImportError:
import selectors2 as selectors
sel = selectors.DefaultSelector()
h2conn = h2.connection.H2Connection(client_side=False)
def send_response(hyperConn, event):
print ('sending respond')
stream_id = event.stream_id
response_data = json.dumps(dict(event.headers))+'KUKA MAKI'.encode('utf-8')
hyperConn.send_headers(
stream_id=stream_id,
headers=[
(':status', '200'),
('server', 'basic-h2-server/1.0'),
('content-length', str(len(response_data))),
('content-type', 'application/json'),
],
)
hyperConn.send_data(
stream_id=stream_id,
data=response_data,
end_stream=True
)
def accept_wrapper(sock):
conn, addr = sock.accept() # Should be ready to read
print("accepted connection from", addr)
conn.setblocking(False)
data = lambda: None # Dummy namespace
data.addr = addr
data.inb = b''
data.outb = b''
events = selectors.EVENT_READ | selectors.EVENT_WRITE
sel.register(conn, events, data=data)
h2InitConn = h2.connection.H2Connection(client_side=False)
h2InitConn.initiate_connection()
conn.sendall(h2InitConn.data_to_send())
def service_connection(key, mask):
sock = key.fileobj
data = key.data
if mask & selectors.EVENT_READ:
recv_data = sock.recv(65535) # Should be ready to read
if recv_data:
# data.outb += recv_data
print ('raw data: ', recv_data)
events = h2conn.receive_data(recv_data)
print 'events received:' , events
for event in events:
if isinstance(event, h2.events.RequestReceived):
print ('request recieved')
send_response(h2conn, event)
else:
print("closing connection to", data.addr)
sel.unregister(sock)
sock.close()
if mask & selectors.EVENT_WRITE:
data_to_send = h2conn.data_to_send()
if data_to_send:
print("replying with stuff to", data.addr)
sock.sendall(data_to_send)
lsock = socket.socket()
lsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
host, port = '0.0.0.0' , 8080
lsock.bind((host, port))
lsock.listen(5)
print("listening on", (host, port))
lsock.setblocking(False)
sel.register(lsock, selectors.EVENT_READ, data=None)
try:
while True:
events = sel.select(timeout=None)
for key, mask in events:
if key.data is None:
accept_wrapper(key.fileobj)
else:
service_connection(key, mask)
except KeyboardInterrupt:
print("caught keyboard interrupt, exiting")
finally:
sel.close()
If I run python server.py, and in another terminal I run the hyper CLI tool, to send http2 request ( hyper --h2 GET http://localhost:8080/ ). The first request is succesful, I can see the events generated, and the hyper tool shows the resonds and quits nicely. After that sending the same hyper command again there are no events generated by the request, and the CLI tool hangs as well. Could you help me figuring out the problem?
I was able to figure it out.
I had to save the state of the http2 connection in the accept_wrapper function into a dictionary, where the key is the client socket address. In the service_connection function I parsed the data with the appropriate saved http2 connection object, and then It produced events.
at the begining of the file:
h2conns = {}
end of the the accept_wrapper function accept_wrapper:
h2conns[conn.getpeername()[1]] = h2InitConn
in the servie_connection function:
h2conn = h2conns[sock.getpeername()[1]]
I have a little question.
I'm new to the whole socket theory nad the Tornado Framework. Lately I've written a proxy server using the Tornado framework. My app is standing in the middle of a connection client(browser) <===> remote address. So the connection looks like this:
client(my web browser) <=== PROXY(my app) ===> Remote address (stackoverflow.com)
Everything works fine if I use the standard "main" function. But I want to go a little more low - level, I mean I want to create sockets and offer my proxy app through that connection.
My proxy app urls:
# coding: utf-8
"""URL's for proxy app."""
from settings import settings
from tornado.web import (
StaticFileHandler,
url,
)
from handlers import (
mainHandlers,
myProxy,
)
urls = [
url(r"/admin/$", mainHandlers.MainHandler),
url(r"/admin/delete_filter/", mainHandlers.DataDeleteHandler),
url(r"/admin/filters/$", mainHandlers.DataGetter),
url(r"/admin/new_filter/$", mainHandlers.FormHandler),
url(r"/admin/stats/$", mainHandlers.StatsTableHandler),
url(r"/admin/stats/query/$", mainHandlers.AjaxStatsGetHandler),
url(r"/static/", StaticFileHandler, dict(path=settings['static_path'])),
url(r'.*', myProxy.ProxyHandler),
]
My ProxyHandler:
class ProxyHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ['GET', 'POST']
def data_received(self, chunk):
pass
def compute_etag(self):
return None # disable tornado Etag
def handle_response(self, response):
if response.error and not isinstance(response.error, tornado.httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
else:
self.set_status(response.code, response.reason)
self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header
for header, v in response.headers.get_all():
if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'):
self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie'
secured_page = False
for page in secure_pages:
if page in self.request.uri:
secured_page = True
self.set_header('Content-Length', len(response.body))
self.write(response.body)
break
if response.body and not secured_page:
c.execute('SELECT filter_name FROM filters WHERE filter_type=1')
tags = c.fetchall()
soup = BeautifulSoup(response.body, 'html.parser')
for row in tags:
catched_tags = soup.find_all(str(row[0]))
if catched_tags:
print 'catched: %s of <%s> tags' % (len(catched_tags), str(row[0]))
for tag in catched_tags:
tag.extract()
new_body = str(soup)
self.set_header('Content-Length', len(new_body))
self.write(new_body)
self.finish()
#tornado.web.asynchronous
def get(self):
logger.debug('Handle %s request to %s', self.request.method, self.request.uri)
body = self.request.body
if not body:
body = None
try:
if 'Proxy-Connection' in self.request.headers:
del self.request.headers['Proxy-Connection']
c.execute('SELECT filter_name FROM filters WHERE filter_type=2')
urls = c.fetchall()
for url in urls:
if url[0] in self.request.path:
self.set_status(403)
self.finish()
return
fetch_request(self.request.uri, self.handle_response,
method=self.request.method, body=body, headers=self.request.headers, follow_redirects=False,
allow_nonstandard_methods=True)
except tornado.httpclient.HTTPError as e:
if hasattr(e, 'response') and e.response:
self.handle_response(e.response)
else:
self.set_status(500)
self.write('Internal server error:\n' + str(e))
self.finish()
#tornado.web.asynchronous
def post(self):
return self.get()
The easy main function:
# coding: utf-8
import sys
import tornado.web
from tornado.options import options
from configuration.application import MyApplication
from proxy.urls import proxy_urls
def make_app():
"""Create my application with my settings and urls."""
return MyApplication(proxy_urls)
if __name__ == "__main__":
u"""Main loop."""
app = make_app()
port = options.port
if len(sys.argv) > 1:
port = int(sys.argv[1])
app.listen(port)
print 'tornado working on port %s' % port
tornado.ioloop.IOLoop.current().start()
So I want to change the easy way to the low-level way based on the docs:
import errno
import functools
import tornado.ioloop
import socket
def connection_ready(sock, fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
connection.setblocking(0)
handle_connection(connection, address)
if __name__ == '__main__':
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
sock.bind(("", port))
sock.listen(128)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
io_loop.start()
To accomplish this I read a little about the whole network programming using sockets (https://www.tutorialspoint.com/python/python_networking.htm).
The example in the tutorial works well so I tried to connect the tutorial with the example in the Tornado docs:
# coding: utf-8
import errno
import functools
import socket
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.process
import tornado.web
from tornado.options import options
from configuration.application import MyApplication
def make_app():
u"""Create my application with my settings and urls."""
return MyApplication()
def connection_ready(sock, fd, events):
u"""Function to handle an incoming connection."""
proxy_app = make_app()
server = tornado.httpserver.HTTPServer(proxy_app)
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
return
print 'Got connection from', address
# connection.setblocking(False)
connection.send(server)
connection.close()
if __name__ == "__main__":
u"""Main loop."""
port = options.port
if len(sys.argv) > 1:
port = int(sys.argv[1])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(False)
sock.bind(('', port))
sock.listen(5)
io_loop = tornado.ioloop.IOLoop.current()
callback = functools.partial(connection_ready, sock)
io_loop.add_handler(sock.fileno(), callback, io_loop.READ)
print 'Tornado Proxy working on port: %s' % port
io_loop.start()
But when I try to connect to my proxy (for example to add filters: http://127.0.0.1:8000/admin/filters/ - I have an handler written to handle this url)
I get specyfic errors:
ERROR:tornado.application:Exception in callback (3, )
Traceback (most recent call last):
File "/home/dave/.virtualenvs/teleV1/local/lib/python2.7/site-packages/tornado/ioloop.py", line 887, in start
handler_func(fd_obj, events)
File "/home/dave/.virtualenvs/teleV1/local/lib/python2.7/site-packages/tornado/stack_context.py", line 275, in null_wrapper
return fn(*args, **kwargs)
File "manage.py", line 35, in connection_ready
connection.send(server)
TypeError: send() argument 1 must be convertible to a buffer, not HTTPServer
I understand that I cannot send a HTTPServer through the connection (from one end to the other), it must be a buffer.
My first idea was to send the buffor from the handler that handles a URL (for example class ProxyHandler(tornado.web.RequestHandler)),
but how can I handle diffrent urls and diffrent handlers?
After a few approaches I changed my Proxy app written as a Tornado app to a pure Python code, that handles responses from remote addresses and does the filter stuff. I think this is the best and fastest thing that I could do.
The object is to set up n number of ssh tunnels between satellite servers and a centralized registry database. I have already set up public key authentication between my servers so they just log right in without password prompts. Now what ? I've tried Paramiko. It seems decent but gets pretty complicated just to set up a basic tunnel, although code exmplaes would be aprreciated. I've tried Autossh and it dies 2 minutes after setting up a working tunnel, bizarre! Hopefully someone can help me with a simple code snippet that I can daemonize and monitor with supervisord or monit.
Here is a cutdown version of the script that Alex pointed you to.
It simply connects to 192.168.0.8 and forwards port 3389 from 192.168.0.6 to localhost
import select
import SocketServer
import sys
import paramiko
class ForwardServer(SocketServer.ThreadingTCPServer):
daemon_threads = True
allow_reuse_address = True
class Handler (SocketServer.BaseRequestHandler):
def handle(self):
try:
chan = self.ssh_transport.open_channel('direct-tcpip', (self.chain_host, self.chain_port), self.request.getpeername())
except Exception, e:
print('Incoming request to %s:%d failed: %s' % (self.chain_host, self.chain_port, repr(e)))
return
if chan is None:
print('Incoming request to %s:%d was rejected by the SSH server.' % (self.chain_host, self.chain_port))
return
print('Connected! Tunnel open %r -> %r -> %r' % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port)))
while True:
r, w, x = select.select([self.request, chan], [], [])
if self.request in r:
data = self.request.recv(1024)
if len(data) == 0:
break
chan.send(data)
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
self.request.send(data)
chan.close()
self.request.close()
print('Tunnel closed from %r' % (self.request.getpeername(),))
def main():
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
client.connect("192.168.0.8")
class SubHandler(Handler):
chain_host = "192.168.0.6"
chain_port = 3389
ssh_transport = client.get_transport()
try:
ForwardServer(('', 3389), SubHandler).serve_forever()
except KeyboardInterrupt:
sys.exit(0)
if __name__ == '__main__':
main()
Is there a special reason not to just do it with ssh, the usual
(ssh -L <localport>:localhost:<remoteport> <remotehost>)
minuet? Anyway, this script is an example of local port forwarding (AKA tunneling).