asyncore python hangs - python

I try to do simple async http client with asyncore:
This code works fine and output is (fast enought):
www.gmail.com : recv http code: 301
www.yandex.ru : recv http code: 200
www.python.org : recv http code: 200
www.google.ru : recv http code: 200
www.gravatar.com : recv http code: 302
www.com.com : recv http code: 302
www.yahoo.com : recv http code: 302
www.bom.com : recv http code: 301
But than i uncomment line with not exist host:
#c = AsyncHTTP('http://www.no-such-host.ru') #!this line breaks execution!
The execution breaks, code hangs for some time, output part of data and hangs with no last data output:
connection error: [Errno -5] No address associated with hostname
www.gmail.com : recv http code: 301
www.yandex.ru : recv http code: 200
www.yahoo.com : recv http code: 302
www.com.com : recv http code: 302
www.bom.com : recv http code: 301
www.gravatar.com : recv http code: 302
...
some hosts are lost here and long delay at start.
Why this happen and how to fix this?
# coding=utf-8
import asyncore
import string, socket
import StringIO
import mimetools, urlparse
class AsyncHTTP(asyncore.dispatcher):
# HTTP requestor
def __init__(self, uri):
asyncore.dispatcher.__init__(self)
self.uri = uri
# turn the uri into a valid request
scheme, host, path, params, query, fragment = urlparse.urlparse(uri)
assert scheme == "http", "only supports HTTP requests"
try:
host, port = string.split(host, ":", 1)
port = int(port)
except (TypeError, ValueError):
port = 80 # default port
if not path:
path = "/"
if params:
path = path + ";" + params
if query:
path = path + "?" + query
self.request = "GET %s HTTP/1.0\r\nHost: %s\r\n\r\n" % (path, host)
self.host = host
self.port = port
self.status = None
self.header = None
self.http_code = None
self.data = ""
# get things going!
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
#self.connect((host, port))
#return
try:
self.connect((host, port))
except Exception,e:
self.close()
self.handle_connect_expt(e)
def handle_connect(self):
self.send(self.request)
def handle_expt(self):
print "handle_expt error!"
self.close()
def handle_error(self):
print "handle_error error!"
self.close()
def handle_connect_expt(self,expt):
print "connection error:",expt
def handle_code(self):
print self.host," : ","recv http code: ",self.http_code
def handle_read(self):
data = self.recv(2048)
#print data
if not self.header:
self.data = self.data + data
try:
i = string.index(self.data, "\r\n\r\n")
except ValueError:
return # continue
else:
# parse header
fp = StringIO.StringIO(self.data[:i+4])
# status line is "HTTP/version status message"
status = fp.readline()
self.status = string.split(status, " ", 2)
self.http_code = self.status[1]
self.handle_code()
# followed by a rfc822-style message header
self.header = mimetools.Message(fp)
# followed by a newline, and the payload (if any)
data = self.data[i+4:]
self.data = ""
#header recived
#self.close()
def handle_close(self):
self.close()
c = AsyncHTTP('http://www.python.org')
c = AsyncHTTP('http://www.yandex.ru')
c = AsyncHTTP('http://www.google.ru')
c = AsyncHTTP('http://www.gmail.com')
c = AsyncHTTP('http://www.gravatar.com')
c = AsyncHTTP('http://www.yahoo.com')
c = AsyncHTTP('http://www.com.com')
c = AsyncHTTP('http://www.bom.com')
#c = AsyncHTTP('http://www.no-such-host.ru') #!this line breaks execution!
asyncore.loop()
ps: My system ubuntu 11.10 + python 2.7.2

You invoke a blocking name-resolution when you do self.connect((host, port)). Combined with your local DNS configuration, this is why your program has a long delay at startup.
An alternative to asyncore and figuring out how to do non-blocking name resolution yourself, you might think about using Twisted. Twisted's TCP connection setup API (mainly reactor.connectTCP or one of the APIs built on top of it) does not block. So a naive use of it will remain properly asynchronous.

Related

Python Socket with Multiprocessing and Pickle issue

I am having a Pickle issue with SSL client to server communication using multiprocessing.
I have an SSL client that connects to the server:
SSLClient.py
import socket
import struct
import ssl
import copyreg
from os import path
import socket
import os
from pathlib import Path
from loguru import logger as log
from utils.misc import read_py_config
from datetime import datetime
from cryptography.fernet import Fernet
fernetkey = '1234567'
fernet = Fernet(fernetkey)
class SSLclient:
license = None
licenseencrypted = None
uuid = None
def __init__(self):
try:
path = Path(__file__).parent / "/lcl" #get unique license key
with path.open() as file:
self.licenseencrypted = file.read().rstrip()
self.license = fernet.decrypt(str.encode(self.licenseencrypted)).decode('ascii')
self.host, self.port = "127.0.0.1", 65416
except Exception as e:
log.error("Could not decode license key")
def connect(self):
self.client_crt = os.path.join(os.path.dirname(__file__), 'key/c-.crt')
self.client_key = os.path.join(os.path.dirname(__file__), 'key/ck-.key')
self.server_crt = os.path.join(os.path.dirname(__file__), 'key/s-.crt')
self.sni_hostname = "example.com"
self._context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH, cafile=self.server_crt)
self._context.load_cert_chain(certfile=self.client_crt, keyfile=self.client_key)
self._sock = None
self._ssock = None
## ---- Client Communication Setup ----
HOST = self.host # The server's hostname or IP address
PORT = self.port # The port used by the server
try:
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._ssock = self._context.wrap_socket(self._sock, server_side=False, server_hostname=self.sni_hostname)
self._ssock.connect((HOST, PORT))
log.info("Socket successfully created")
except socket.error as err:
log.error("socket creation failed with error %s" %(err))
return False
log.info('Waiting for connection')
return True
def closesockconnection(self):
self._ssock.close()
def checkvalidsite(self):
#check if site is active
jsonobj = {
"uuid": self.license,
"ipaddress" : self.external_ip,
"req": "checkvalidsite"
}
send_msg(self._ssock, json.dumps(jsonobj).encode('utf-8'))
active = False
while True:
Response = recv_msg(self._ssock)
if not Response:
return False
if Response is not None:
Response = Response.decode('utf-8')
Response = json.loads(Response)
req = Response['req']
if req == "checkvalidsite":
active = Response['active']
self.info1 = Response['info1']
self.info2 = Response['info2']
return active
# ---- To Avoid Message Boundary Problem on top of TCP protocol ----
def send_msg(sock: socket, msg): # ---- Use this to send
try:
# Prefix each message with a 4-byte length (network byte order)
msg = struct.pack('>I', len(msg)) + msg
sock.sendall(msg)
except Exception as e:
log.error("Sending message " + str(e))
def recv_msg(sock: socket): # ---- Use this to receive
try:
# Read message length and unpack it into an integer
raw_msglen = recvall(sock, 4)
if not raw_msglen:
return None
msglen = struct.unpack('>I', raw_msglen)[0]
# Read the message data
return recvall(sock, msglen)
except Exception as e:
log.error("Receiving message " + str(e))
return False
def recvall(sock: socket, n: int):
try:
# Helper function to receive n bytes or return None if EOF is hit
data = bytearray()
while len(data) < n:
packet = sock.recv(n - len(data))
if not packet:
return None
data.extend(packet)
return data
except Exception as e:
log.error("Receiving all message " + str(e))
raise Exception(e)
I then have a server that is Multithreaded and accepts the connection and communicates with the client.
Server.py
import socket
import os
from socket import AF_INET, SOCK_STREAM, SO_REUSEADDR, SOL_SOCKET, SHUT_RDWR
import ssl
from os import path
from _thread import *
import struct # Here to convert Python data types into byte streams (in string) and back
import traceback
from threading import Thread
import json
import mysql.connector as mysql
import time
from loguru import logger as log
import threading
from cryptography.fernet import Fernet
fernetkey = '12213423423'
fernet = Fernet(fernetkey)
threadLocal = threading.local()
# ---- To Avoid Message Boundary Problem on top of TCP protocol ----
def send_msg(sock: socket, msg): # ---- Use this to send
try:
# Prefix each message with a 4-byte length (network byte order)
msg = struct.pack('>I', len(msg)) + msg
sock.sendall(msg)
except Exception as e:
log.error("Error send_msg " + str(e))
def recv_msg(sock: socket): # ---- Use this to receive
try:
# Read message length and unpack it into an integer
raw_msglen = recvall(sock, 4)
if not raw_msglen:
return None
msglen = struct.unpack('>I', raw_msglen)[0]
# Read the message data
return recvall(sock, msglen)
except Exception as e:
log.error("Receiving message " + str(e))
return False
def recvall(sock: socket, n: int):
try:
# Helper function to receive n bytes or return None if EOF is hit
data = bytearray()
while len(data) < n:
packet = sock.recv(n - len(data))
if not packet:
return None
data.extend(packet)
return data
except Exception as e:
log.error("Receiving all message " + str(e))
raise Exception(e)
# ---- Server Communication Setup
class Newclient:
def __init__(self):
self.addr = None
self.conn = None
self.uuid = None
class Server:
def __init__(self):
self.HOST = '127.0.0.1' # Standard loopback interface address (localhost)
self.PORT = 65416 # Port to listen on (non-privileged ports are > 1023)
self.ThreadCount = 0
self.threads = []
self.sock = None
def checkvalidsite(self, uuid, ipaddress, cursor, db_connection):
sql = "select * from myexample where uuid ='" + uuid + "'"
cursor.execute(sql)
results = cursor.fetchall()
active = False
for row in results:
active = row["active"]
siteid = row["info1"]
clientid = row["info2"]
return active, siteid, clientid
def Serverthreaded_client(self, newclient):
conn = newclient.conn
try:
while True:
# data = conn.recv(2048) # receive message from client
data = recv_msg(conn)
uuid = None
ipaddress = None
req = None
if not data :
return False
if data is not None:
data = json.loads(data.decode('utf-8'))
uuid = data['uuid']
req = data['req']
if uuid is not None and req is not None:
newclient.uuid = uuid
cursor, db_connection = setupDBConnection()
if req == "checkvalidsite":
ipaddress = data['ipaddress']
active, info1, info2 = self.checkvalidsite(uuid, ipaddress, cursor, db_connection)
data = {
"req": "checkvalidsite",
"uuid": uuid,
"active": active,
"info1" : info1,
"info2" : info2
}
if not data:
break
# conn.sendall(str.encode(reply))
send_msg(conn, json.dumps(data).encode('utf-8'))
log.info("Server response sent")
#conn.close()
closeDBConnection(cursor, db_connection)
else:
#send no message
a=1
except Exception as e:
log.warning(str(e))
log.warning(traceback.format_exc())
finally:
log.info("UUID Closing connection")
conn.shutdown(socket.SHUT_RDWR)
conn.close()
#conn.close()
def Serverconnect(self):
try: # create socket
self.server_cert = path.join(path.dirname(__file__), "keys/server.crt")
self.server_key = path.join(path.dirname(__file__), "keys/server.key")
self.client_cert = path.join(path.dirname(__file__), "keys/client.crt")
self._context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
self._context.verify_mode = ssl.CERT_REQUIRED
###self._context.load_cert_chain(self.server_cert, self.server_key)
self._context.load_cert_chain(certfile=self.server_cert, keyfile=self.server_key)
###self._context.load_verify_locations(self.client_cert)
self._context.load_verify_locations(cafile=self.client_cert)
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) ###<-- socket.socket() ???
log.info("Socket successfully created")
except socket.error as err:
log.warning("socket creation failed with error %s" %(err))
try: # bind socket to an address
self.sock.bind((self.HOST, self.PORT))
except socket.error as e:
log.warning(str(e))
log.info('Waiting for a Connection..')
self.sock.listen(3)
def Serverwaitforconnection(self):
while True:
Client, addr = self.sock.accept()
conn = self._context.wrap_socket(Client, server_side=True)
log.info('Connected to: ' + addr[0] + ':' + str(addr[1]))
log.info("SSL established. Peer: {}".format(conn.getpeercert()))
newclient = Newclient()
newclient.addr = addr
newclient.conn = conn
thread = Thread(target=self.Serverthreaded_client, args =(newclient, ))
thread.start()
self.threads.append(newclient)
self.ThreadCount += 1
log.info('Thread Number: ' + str(self.ThreadCount))
def startserver():
server = Server()
server.Serverconnect()
server.Serverwaitforconnection()
serverthread = Thread(target=startserver)
serverthread.daemon = False
serverthread.start()
The server accepts the connection with SSL then waits for a message. It investigates the message command, executes the respective function and returns the data from the database as a response (checkvalidsite in this example).
All good so far (as far as I can tell).
I also have the main program that calls the SSLClient and connects.
Main program
remoteclient = SSLclient()
successfulconnection = remoteclient.connect()
siteactive = remoteclient.checkvalidsite()
So far all is well. However I also have the main program reading in frames from multiple cameras. Can be 20 cameras for example. In order to do this I created multiprocessing to deal with the camera load. Each camera or two cameras per, are assigned to a processor (depending on the number of cores in the machine).
(code below has been stripped out to simplify reading)
x = range(3, 6)
for n in x:
processes = multiprocessing.Process(target=activateMainProgram, args=(queue1, queue2, queue3, queue4, remoteclient, ))
processes.start()
When I try pass the remoteclient (SSLClient) as an argument I get the error:
cannot pickle 'SSLContext' object
I then (after reading online) added the code to the SSLClient:
def save_sslcontext(obj):
return obj.__class__, (obj.protocol,)
copyreg.pickle(ssl.SSLContext, save_sslcontext)
but then I get the error:
cannot pickle 'SSLContext' object
There are 2 options I experimented with:
Trying to get the pickle working (which would be ideal) as the processes themselves each need to communicate with the server. So the processes need to call functions from the SSLClient file. But I cannot get over the pickle issue and can't find a solution online
I then placed the remoteclient = SSLClient code outside the main function. Hoping it would run first and then be accessible to the processes. This worked, however what I learnt was that when a process is called (as it does not share memory) it reprocesses the entire file. Meaning if I have 10 processes each with 2 cameras then I would have 10 connections to the server (1 per process). This means on the server side I would also have 10 threads running each connection. Though it works, it seems significantly inefficient.
Being a noob and self taught in Python I am not sure how to resolve the issue and after 3 days, I figured I would reach out for assistance. If I could get assistance with the pickle issue of the SSLClient then I will have one connection that is shared with all processes and 1 thread in the server to deal with them.
P.s. I have cobbled all of the code together myself and being new to Python if you see that I am totally going down the wrong, incorrect, non-professional track, feel free to yell.
Much appreciated.
Update:
If I change the SSLClient code to:
def save_sslcontext(obj):
return obj.__class__, (obj.protocol,)
copyreg.pickle(ssl.SSLContext, save_sslcontext)
Then I get the error:
[WinError 10038] An operation was attempted on something that is not a socket
Not sure what is better..

Python GET request with sockets - 400 Bad request

I wrote this code to manually make a GET request using only python sockets. It worked perfectly fine back in 2016 when I wrote it but now I need it again and I keep getting the error code 400 bad request. I tried switching python version but it's still the same. I have been looking through Stackoverflow questions, asking more or less the same thing I do, but I just can't get it to work. I would appreciate if anyone could help me out. Here is my code, I removed all the IO and only posted the networking code.
URL_PATTERN = re.compile("^(.*://)?([A-Za-z0-9\-\.]+)(:[0-9]+)?(.*)$")
HEADER_END = re.compile("\r\n\r\n")
URL_DATA = re.match(URL_PATTERN, INPUT_URL)
PROTOCOL = URL_DATA.groups()[0][:-3]
HOSTNAME = URL_DATA.groups()[1]
PATHNAME = URL_DATA.groups()[3] if URL_DATA.groups()[3] != "" else "/"
PORT = 80 if PROTOCOL == "http" else 443
BUFFER_SIZE = 4096
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOSTNAME, PORT))
s.send("GET " + PATHNAME + " HTTP/1.1\r\nHost: " + HOSTNAME + "\r\nConnection: close\r\n\r\n")
resp = s.recv(BUFFER_SIZE)
HEADER_INDEX = re.search(HEADER_END, resp).start()
HTTP_RESPONSE_HEADER = resp[:HEADER_INDEX]
s.close()
When I run my program on the URL https://doc.rust-lang.org/book/2018-edition/foreword.html
The variables from my program has the values:
PORT: 443
PROTOCOL: https
HOSTNAME: doc.rust-lang.org
PATHNAME: /book/2018-edition/foreword.html
And then I get the 400 bad request code back. I don't understand what I'm doing wrong and would appreciate any help I can get.
I believe it's all about SSL. For reference you can check this question Python socket server handle HTTPS request.
I suggest you use:
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
and create a secure socket:
s_sock = context.wrap_socket(s, server_hostname=HOSTNAME)
s_sock.connect((HOSTNAME, PORT))
Additionally you might need to encode the message.
At the end your code could look like:
import re
import socket
import ssl
URL_PATTERN = re.compile("^(.*://)?([A-Za-z0-9\-\.]+)(:[0-9]+)?(.*)$")
HEADER_END = re.compile("\r\n\r\n")
INPUT_URL = "https://doc.rust-lang.org/book/2018-edition/foreword.html"
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
URL_DATA = re.match(URL_PATTERN, INPUT_URL)
PROTOCOL = URL_DATA.groups()[0][:-3]
HOSTNAME = URL_DATA.groups()[1]
PATHNAME = URL_DATA.groups()[3] if URL_DATA.groups()[3] != "" else "/"
PORT = 80 if PROTOCOL == "http" else 443
BUFFER_SIZE = 4096
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s_sock = context.wrap_socket(s, server_hostname=HOSTNAME)
s_sock.connect((HOSTNAME, PORT))
message = "GET " + PATHNAME + " HTTP/1.1\r\nHost: " + HOSTNAME + "\r\nConnection: close\r\n\r\n"
s_sock.send(message.encode('utf-8'))
resp = bytearray()
while True:
part = s_sock.recv(BUFFER_SIZE)
if not part:
break
resp += part
s_sock.close()
resp_string = str(resp, 'utf-8')
HEADER_INDEX = re.search(HEADER_END, resp_string).start()
HTTP_RESPONSE_HEADER = resp_string[:HEADER_INDEX]

SocketServer rfile.read() very very slow

I am building an HTTPS proxy which can forward all SSL traffic. It is called a transparent tunneling. Anyway, I have a problem with Python's socketserver. When I called rfile.read(), it takes a very long time to return. Even I used select to make sure the I/O is ready, it still takes a very long time. Usually 30s and the client's socket is closed because of timeout. I cannot make the socket unblocking, because I need to read the data first, and then forward all the data I just read to remote server, it must be returned with data first.
My code is following:
import SocketServer
import BaseHTTPServer
import socket
import threading
import httplib
import time
import os
import urllib
import ssl
import copy
from history import *
from http import *
from https import *
from logger import Logger
DEFAULT_CERT_FILE = "./cert/ncerts/proxpy.pem"
proxystate = None
class ProxyHandler(SocketServer.StreamRequestHandler):
def __init__(self, request, client_address, server):
self.peer = False
self.keepalive = False
self.target = None
self.tunnel_mode = False
self.forwardSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Just for debugging
self.counter = 0
self._host = None
self._port = 0
SocketServer.StreamRequestHandler.__init__(self, request, client_address, server)
def createConnection(self, host, port):
global proxystate
if self.target and self._host == host:
return self.target
try:
# If a SSL tunnel was established, create a HTTPS connection to the server
if self.peer:
conn = httplib.HTTPSConnection(host, port)
else:
# HTTP Connection
conn = httplib.HTTPConnection(host, port)
except HTTPException as e:
proxystate.log.debug(e.__str__())
# If we need a persistent connection, add the socket to the dictionary
if self.keepalive:
self.target = conn
self._host = host
self._port = port
return conn
def sendResponse(self, res):
self.wfile.write(res)
def finish(self):
if not self.keepalive:
if self.target:
self.target.close()
return SocketServer.StreamRequestHandler.finish(self)
# Otherwise keep-alive is True, then go on and listen on the socket
return self.handle()
def handle(self):
global proxystate
if self.keepalive:
if self.peer:
HTTPSUtil.wait_read(self.request)
else:
HTTPUtil.wait_read(self.request)
# Just debugging
if self.counter > 0:
proxystate.log.debug(str(self.client_address) + ' socket reused: ' + str(self.counter))
self.counter += 1
if self.tunnel_mode:
HTTPUtil.wait_read(self.request)
print "++++++++++++++++++++++++++"
req=self.rfile.read(4096) #This is the line take very lone time!
print "----------------------------------"
data = self.doFORWARD(req)
print "**************************************"
if len(data)!=0:
self.sendResponse(data)
return
try:
req = HTTPRequest.build(self.rfile)
except Exception as e:
proxystate.log.debug(e.__str__() + ": Error on reading request message")
return
if req is None:
return
# Delegate request to plugin
req = ProxyPlugin.delegate(ProxyPlugin.EVENT_MANGLE_REQUEST, req.clone())
# if you need a persistent connection set the flag in order to save the status
if req.isKeepAlive():
self.keepalive = True
else:
self.keepalive = False
# Target server host and port
host, port = ProxyState.getTargetHost(req)
if req.getMethod() == HTTPRequest.METHOD_GET:
res = self.doGET(host, port, req)
self.sendResponse(res)
elif req.getMethod() == HTTPRequest.METHOD_POST:
res = self.doPOST(host, port, req)
self.sendResponse(res)
elif req.getMethod() == HTTPRequest.METHOD_CONNECT:
res = self.doCONNECT(host, port, req)
def _request(self, conn, method, path, params, headers):
global proxystate
conn.putrequest(method, path, skip_host = True, skip_accept_encoding = True)
for header,v in headers.iteritems():
# auto-fix content-length
if header.lower() == 'content-length':
conn.putheader(header, str(len(params)))
else:
for i in v:
conn.putheader(header, i)
conn.endheaders()
if len(params) > 0:
conn.send(params)
def doRequest(self, conn, method, path, params, headers):
global proxystate
try:
self._request(conn, method, path, params, headers)
return True
except IOError as e:
proxystate.log.error("%s: %s:%d" % (e.__str__(), conn.host, conn.port))
return False
def doCONNECT(self, host, port, req):
#global proxystate
self.tunnel_mode = True
self.tunnel_host = host
self.tunnel_port = port
#socket_req = self.request
#certfilename = DEFAULT_CERT_FILE
#socket_ssl = ssl.wrap_socket(socket_req, server_side = True, certfile = certfilename,
#ssl_version = ssl.PROTOCOL_SSLv23, do_handshake_on_connect = False)
HTTPSRequest.sendAck(self.request)
#print "Send ack to the peer %s on port %d for establishing SSL tunnel" % (host, port)
print "into forward mode: %s : %s" % (host, port)
'''
host, port = socket_req.getpeername()
proxystate.log.debug("Send ack to the peer %s on port %d for establishing SSL tunnel" % (host, port))
while True:
try:
socket_ssl.do_handshake()
break
except (ssl.SSLError, IOError) as e:
# proxystate.log.error(e.__str__())
print e.__str__()
return
# Switch to new socket
self.peer = True
self.request = socket_ssl
'''
self.setup()
#self.handle()
def doFORWARD(self, data):
host, port = self.request.getpeername()
#print "client_host", host
#print "client_port", port
try:
print "%s:%s===>data read, now sending..."%(host,port)
self.forwardSocket.connect((self.tunnel_host,self.tunnel_port))
self.forwardSocket.sendall(data)
print data
print "%s:%s===>sent %d bytes to server"%(host,port,len(data))
select.select([self.forwardSocket], [], [])
chunk = self.forwardSocket.recv(4096)
print chunk
print "%s:%s===>receive %d bytes from server"%(host,port,len(chunk))
return chunk
except socket.error as e:
print e.__str__()
return ''
class ThreadedHTTPProxyServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
allow_reuse_address = True
class ProxyServer():
def __init__(self, init_state):
global proxystate
proxystate = init_state
self.proxyServer_port = proxystate.listenport
self.proxyServer_host = proxystate.listenaddr
def startProxyServer(self):
global proxystate
self.proxyServer = ThreadedHTTPProxyServer((self.proxyServer_host, self.proxyServer_port), ProxyHandler)
# Start a thread with the server (that thread will then spawn a worker
# thread for each request)
server_thread = threading.Thread(target = self.proxyServer.serve_forever)
# Exit the server thread when the main thread terminates
server_thread.setDaemon(True)
proxystate.log.info("Server %s listening on port %d" % (self.proxyServer_host, self.proxyServer_port))
server_thread.start()
while True:
time.sleep(0.1)
This is driving me crazy, because when I forward normal HTTP traffic, the read() returns immediately with the data. But every time when the client sent SSL traffic (binary), the read() takes very long time to return! And I think there is some mechanics that the read() only returns when the request socket is closed by remote client. Does anyone know how to make the read() fast? I tried recv() and readline(), both of them is as slow as read() when handling binary traffic!
I had similar problem on my python server while trying to send image to it via http.
rfile.read took 1700ms for a 800kb image and 4500ms for a 4.5mb image. I was also doing this through a limited network speed on a vpn.
It appears that the rfile.read downloads/uploads the file. By improving connection/network speed between client-server i have reduced 1700ms read to less than 50ms.

WebSocket server in Python: 'module' object has no attribute 'AF_INET' [duplicate]

This question already has answers here:
Importing installed package from script with the same name raises "AttributeError: module has no attribute" or an ImportError or NameError
(2 answers)
Closed 7 months ago.
I am trying to run this simple Python WebSocket, with a couple very minor changes. I am running Python 2.4.3 because I cannot use an newer version, but I'm not sure how much that matters.
Here is the error I'm getting:
Traceback (most recent call last):
File "socket.py", line 258, in ?
server = WebSocketServer("localhost", 8000, WebSocket)
File "socket.py", line 205, in __init__
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
AttributeError: 'module' object has no attribute 'AF_INET'
And here is my code:
import time
import struct
import socket
import base64
import sys
from select import select
import re
import logging
from threading import Thread
import signal
# Simple WebSocket server implementation. Handshakes with the client then echos back everything
# that is received. Has no dependencies (doesn't require Twisted etc) and works with the RFC6455
# version of WebSockets. Tested with FireFox 16, though should work with the latest versions of
# IE, Chrome etc.
#
# rich20b#gmail.com
# Adapted from https://gist.github.com/512987 with various functions stolen from other sites, see
# below for full details.
# Constants
MAGICGUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
TEXT = 0x01
BINARY = 0x02
# WebSocket implementation
class WebSocket(object):
handshake = (
"HTTP/1.1 101 Web Socket Protocol Handshake\r\n"
"Upgrade: WebSocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %(acceptstring)s\r\n"
"Server: TestTest\r\n"
"Access-Control-Allow-Origin: http://localhost\r\n"
"Access-Control-Allow-Credentials: true\r\n"
"\r\n"
)
# Constructor
def __init__(self, client, server):
self.client = client
self.server = server
self.handshaken = False
self.header = ""
self.data = ""
# Serve this client
def feed(self, data):
# If we haven't handshaken yet
if not self.handshaken:
logging.debug("No handshake yet")
self.header += data
if self.header.find('\r\n\r\n') != -1:
parts = self.header.split('\r\n\r\n', 1)
self.header = parts[0]
if self.dohandshake(self.header, parts[1]):
logging.info("Handshake successful")
self.handshaken = True
# We have handshaken
else:
logging.debug("Handshake is complete")
# Decode the data that we received according to section 5 of RFC6455
recv = self.decodeCharArray(data)
# Send our reply
self.sendMessage(''.join(recv).strip());
# Stolen from http://www.cs.rpi.edu/~goldsd/docs/spring2012-csci4220/websocket-py.txt
def sendMessage(self, s):
"""
Encode and send a WebSocket message
"""
# Empty message to start with
message = ""
# always send an entire message as one frame (fin)
b1 = 0x80
# in Python 2, strs are bytes and unicodes are strings
if type(s) == unicode:
b1 |= TEXT
payload = s.encode("UTF8")
elif type(s) == str:
b1 |= TEXT
payload = s
# Append 'FIN' flag to the message
message += chr(b1)
# never mask frames from the server to the client
b2 = 0
# How long is our payload?
length = len(payload)
if length < 126:
b2 |= length
message += chr(b2)
elif length < (2 ** 16) - 1:
b2 |= 126
message += chr(b2)
l = struct.pack(">H", length)
message += l
else:
l = struct.pack(">Q", length)
b2 |= 127
message += chr(b2)
message += l
# Append payload to message
message += payload
# Send to the client
self.client.send(str(message))
# Stolen from http://stackoverflow.com/questions/8125507/how-can-i-send-and-receive-websocket-messages-on-the-server-side
def decodeCharArray(self, stringStreamIn):
# Turn string values into opererable numeric byte values
byteArray = [ord(character) for character in stringStreamIn]
datalength = byteArray[1] & 127
indexFirstMask = 2
if datalength == 126:
indexFirstMask = 4
elif datalength == 127:
indexFirstMask = 10
# Extract masks
masks = [m for m in byteArray[indexFirstMask : indexFirstMask+4]]
indexFirstDataByte = indexFirstMask + 4
# List of decoded characters
decodedChars = []
i = indexFirstDataByte
j = 0
# Loop through each byte that was received
while i < len(byteArray):
# Unmask this byte and add to the decoded buffer
decodedChars.append( chr(byteArray[i] ^ masks[j % 4]) )
i += 1
j += 1
# Return the decoded string
return decodedChars
# Handshake with this client
def dohandshake(self, header, key=None):
logging.debug("Begin handshake: %s" % header)
# Get the handshake template
handshake = self.handshake
# Step through each header
for line in header.split('\r\n')[1:]:
name, value = line.split(': ', 1)
# If this is the key
if name.lower() == "sec-websocket-key":
# Append the standard GUID and get digest
combined = value + MAGICGUID
response = base64.b64encode(combined.digest())
# Replace the placeholder in the handshake response
handshake = handshake % { 'acceptstring' : response }
logging.debug("Sending handshake %s" % handshake)
self.client.send(handshake)
return True
def onmessage(self, data):
#logging.info("Got message: %s" % data)
self.send(data)
def send(self, data):
logging.info("Sent message: %s" % data)
self.client.send("\x00%s\xff" % data)
def close(self):
self.client.close()
# WebSocket server implementation
class WebSocketServer(object):
# Constructor
def __init__(self, bind, port, cls):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind((bind, port))
self.bind = bind
self.port = port
self.cls = cls
self.connections = {}
self.listeners = [self.socket]
# Listen for requests
def listen(self, backlog=5):
self.socket.listen(backlog)
logging.info("Listening on %s" % self.port)
# Keep serving requests
self.running = True
while self.running:
# Find clients that need servicing
rList, wList, xList = select(self.listeners, [], self.listeners, 1)
for ready in rList:
if ready == self.socket:
logging.debug("New client connection")
client, address = self.socket.accept()
fileno = client.fileno()
self.listeners.append(fileno)
self.connections[fileno] = self.cls(client, self)
else:
logging.debug("Client ready for reading %s" % ready)
client = self.connections[ready].client
data = client.recv(4096)
fileno = client.fileno()
if data:
self.connections[fileno].feed(data)
else:
logging.debug("Closing client %s" % ready)
self.connections[fileno].close()
del self.connections[fileno]
self.listeners.remove(ready)
# Step though and delete broken connections
for failed in xList:
if failed == self.socket:
logging.error("Socket broke")
for fileno, conn in self.connections:
conn.close()
self.running = False
# Entry point
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s")
server = WebSocketServer("localhost", 8000, WebSocket)
server_thread = Thread(target=server.listen, args=[5])
server_thread.start()
# Add SIGINT handler for killing the threads
def signal_handler(signal, frame):
logging.info("Caught Ctrl+C, shutting down...")
server.running = False
sys.exit()
signal.signal(signal.SIGINT, signal_handler)
while True:
time.sleep(100)
It appears that you've named your own file socket.py, so when you import socket, you're not getting the system library (it's just re-importing the file you're currently in - which has no AF_INET symbol). Try renaming your file something like mysocket.py.
Even after changing the file name, if you are running the python from the terminal.
(you may get the same error)
Kindly
rm -rf socket.pyc
(previously compiled bytecode)
I had the same problem, I was literally stuck here for hours, tried re installing it a million times, but found the solution.
1) Make sure the file name is not socket.py,
2) Change the directory, it will not work in the home directory due to some permission issues.
If you have by anychance saved the file as socket.py, do not copy the same file or rename it to something else, the problem will persist.
What I advice you to do is, open a new folder in a different directory, write a simple socket code which involved AF_INET. Try to run it. It should work.
Issue can be that you have a file or Cache name socket.py or socket.pyc
rm -rf socket.py
rm -rf socket.pyc
Hopefully this will resolve your import issue. Gud Luck
enter the current working directory
and remove the files named 'socket.py' and 'socket.pyc'

Python Proxy isn't handling POST requests

I'm creating a simple Python proxy that logs all requests sent through it. For now, everything works except POST requests (I can't vote on StackOverflow questions).
I haven't defined an explicit POST function, but I am using the GET function instead.
Could anyone tell me where the problem is?
This is my code:
#!/usr/local/bin/python2
import BaseHTTPServer, select, socket, SocketServer, urlparse
class ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
self.setup()
self.socket = socket
self.server_version = 'TestProxy 1.0'
self.request_buffer_size = 0
try:
self.handle()
finally:
self.finish()
def connect_to(self, location, destination_socket):
i = location.find(':')
if i >= 0:
host = location[:i]
port = int(location[i + 1:])
else:
host = location
port = 80
print 'Connecting to {0}:{1}'.format(host, port)
try:
self.socket.connect((host, port))
except socket.error, arg:
try:
msg = arg[1]
except:
msg = arg
self.send_error(404, msg)
return 0
return 1
def do_CONNECT(self):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
if self.connect_to(self.path, socket):
self.log_request(200)
self.wfile.write(self.protocol_version + ' 200 Connection established\n')
self.wfile.write('Proxy-agent: {0}\n'.format(self.version_string()))
self.wfile.write('\n')
self.read_write(self.socket, 300)
finally:
print 'Connection closed.'
self.socket.close()
self.connection.close()
def do_GET(self):
scm, location, path, parameters, query, fragment = urlparse.urlparse(self.path, 'http')
if scm != 'http' or fragment or not location:
self.send_error(400, 'bad url {0}'.format(self.path))
return
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
if self.connect_to(location, self.socket):
self.log_request()
self.socket.send('{0} {1} {2}\n'.format(self.command, urlparse.urlunparse(('', '', path, parameters, query, '')), self.request_version))
self.headers['Connection'] = 'close'
del self.headers['Proxy-Connection']
for key, value in self.headers.items():
self.socket.send('{0}: {1}\n'.format(key, value))
self.socket.send('\n')
self.read_write(self.socket)
finally:
print 'Closing connection...'
self.socket.close()
self.connection.close()
def read_write(self, socket, max_idling = 100):
iw = [self.connection, socket]
ow = []
count = 0
while count != max_idling:
count += 1
read_list, write_list, exception_list = select.select(iw, ow, iw, 3)
if exception_list:
break
if read_list:
for item in read_list:
out = self.connection if item is socket else socket
data = item.recv(8192)
if data:
out.send(data)
count = 0
else:
pass
#print 'Idle for {0}'.format(count)
do_HEAD = do_GET
do_POST = do_GET
do_PUT = do_GET
do_DELETE = do_GET
class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): pass
if __name__ == '__main__':
BaseHTTPServer.test(ProxyHandler, ThreadingHTTPServer)
This part handles the POST/GET requests:
def do_GET(self):
scm, location, path, parameters, query, fragment = urlparse.urlparse(self.path, 'http')
if scm != 'http' or fragment or not location:
self.send_error(400, 'bad url {0}'.format(self.path))
return
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
if self.connect_to(location, self.socket):
self.log_request()
self.socket.send('{0} {1} {2}\n'.format(self.command, urlparse.urlunparse(('', '', path, parameters, query, '')), self.request_version))
self.headers['Connection'] = 'close'
del self.headers['Proxy-Connection']
for key, value in self.headers.items():
self.socket.send('{0}: {1}\n'.format(key, value))
self.socket.send('\n')
self.read_write(self.socket)
finally:
print 'Closing connection...'
self.socket.close()
self.connection.close()
The read-select on self.connection never returns, and if one manually reads from that object, no data is coming. It looks like you have to read from self.rfile.
Note that calling select on self.rfile doesn't return, but self.rfile.read() does work. That is probably because self.rfile is a Python file-like object, and the server process has already read the start of the POST body from the OS.

Categories

Resources