Mock HTTP server that performs content negotiation - python

I normally use Python's http.server to mock an HTTP server but this time I need the server to perform content negotiation.
I just need it to return an error if it cannot match the Accept header. Python's server ignores the header.
The way to run the server could be in a thread or in a separate process such as by running python -m http.server.

You can, still using http.server, but using a custom handler...
class MockHandler(http.server.BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
def do_GET(self):
if 'text/html' not in self.headers['Accept']:
self.send_response(406)
else:
self.send_response(200)
self.end_headers()
self.wfile.write(b'some data')
class ThreadedHTTPServer(socketserver.ThreadingMixIn, http.server.HTTPServer):
pass
server_address = ('localhost', 8888)
server = ThreadedHTTPServer(server_address, MockHandler)
server.daemon_threads = True
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
(Note: it's a HTTP 1.1 server, since most servers are these days, and it's thread/multiple request-safe, but you can probably simplify if you would like...)
A full example in a small test-suite-of-one along with an example "production" function that you might want to test is below.
import http.server
import socketserver
import threading
import unittest
import urllib.request
def example_production_function(accept_header):
req = urllib.request.Request('http://localhost:8888', headers={'Accept': accept_header});
try:
urllib.request.urlopen(req)
return 'Allowed'
except urllib.error.HTTPError as e:
return 'Not allowed'
class TestServer(unittest.TestCase):
def test(self):
class MockHandler(http.server.BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
def do_GET(self):
if 'text/html' not in self.headers['Accept']:
self.send_response(406)
else:
self.send_response(200)
self.end_headers()
self.wfile.write(b'some data')
class ThreadedHTTPServer(socketserver.ThreadingMixIn, http.server.HTTPServer):
pass
server_address = ('localhost', 8888)
server = ThreadedHTTPServer(server_address, MockHandler)
server.daemon_threads = True
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
result = example_production_function('text/html')
self.assertEqual(result, 'Allowed')
result = example_production_function('application/json')
self.assertEqual(result, 'Not allowed')
server.shutdown()
server.server_close()
if __name__ == '__main__':
unittest.main()

Related

Close script after receiving server response 200 instead of after request is handled

I wrote a webserver with SimpleHTTPRequestHandler and a small script to send a GET request with an argument to the server, to which the server responds with code 200.
What I want is for the script to close as soon as the server replies with 200 (which should happen when "self.send_response(200)" is ran, so I think the problem might be somewhere around there), instead of staying open until the called file, jg_server_ed.py, has finished.
Webserver code:
# server.py
import http.server
import socketserver
import subprocess
import webbrowser
from urllib.parse import urlparse
class Handler(http.server.SimpleHTTPRequestHandler):
def do_POST(self):
print(f"request body: {self.path}")
return
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
if self.path.startswith("/?sitename="):
try:
body_split = self.path.split("/?sitename=")
if body_split != "" or []:
print(f"Incoming request: {body_split[-1]}")
subprocess.call(f"python jg_server_ed.py {body_split[-1]}")
else: pass
except Exception as err: print(err)
else: pass
return super().do_GET()
if __name__ == "__main__":
print("Serving on port 8080, IP 0.0.0.0.")
socketserver.TCPServer(('0.0.0.0', 8080), Handler).serve_forever()
GET request sending script thingy:
# submit_to_server.py
import urllib3
http = urllib3.PoolManager()
url = input("URL: ")
print("Sending request...")
r = http.request('GET', f"http://server_ip_address/?sitename={url}")
if r.status == "200":
exit()

How can I set a timeout in Python HTTPServer

I have a Python (2.7.13) HTTP Server running in Debian, I want to stop any GET request that takes longer than 10 seconds, but can't find a solution anywhere.
I already tried all the snippets posted in the following question: How to implement Timeout in BaseHTTPServer.BaseHTTPRequestHandler Python
#!/usr/bin/env python
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import os
class handlr(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text-html')
self.end_headers()
self.wfile.write(os.popen('sleep 20 & echo "this took 20 seconds"').read())
def run():
server_address = ('127.0.0.1', 8080)
httpd = HTTPServer(server_address, handlr)
httpd.serve_forever()
if __name__ == '__main__':
run()
As a test, I'm running a shell command that takes 20 seconds to execute, so I need the server stop before that.
Put your operation on a background thread, and then wait for your background thread to finish. There isn't a general-purpose safe way to abort threads, so this implementation unfortunately leaves the function running in the background even though it had already given up.
If you can, you might consider putting a proxy server (like say nginx) in front of your server and let it handle timeouts for you, or perhaps use a more robust HTTP server implementation that allows this as a configuration option. But the answer below should basically cover it.
#!/usr/bin/env python
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import os
import threading
class handlr(BaseHTTPRequestHandler):
def do_GET(self):
result, succeeded = run_with_timeout(lambda: os.popen('sleep 20 & echo "this took 20 seconds"').read(), timeout=3)
if succeeded:
self.send_response(200)
self.send_header('Content-type','text-html')
self.end_headers()
self.wfile.write(os.popen('sleep 20 & echo "this took 20 seconds"').read())
else:
self.send_response(500)
self.send_header('Content-type','text-html')
self.end_headers()
self.wfile.write('<html><head></head><body>Sad panda</body></html>')
self.wfile.close()
def run():
server_address = ('127.0.0.1', 8080)
httpd = HTTPServer(server_address, handlr)
httpd.serve_forever()
def run_with_timeout(fn, timeout):
lock = threading.Lock()
result = [None, False]
def run_callback():
r = fn()
with lock:
result[0] = r
result[1] = True
t = threading.Thread(target=run_callback)
t.daemon = True
t.start()
t.join(timeout)
with lock:
return tuple(result)
if __name__ == '__main__':
run()

Python multi threading HTTP server not working

I am trying to create multi threaded web server in python, but the requests are handled one by one. After searching few hours, I found this link but the approved answer seems to be incorrect as the request over there is also handled one by one.
Here is the code:
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
import threading
from time import sleep
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
sleep(5)
message = threading.currentThread().getName()
self.wfile.write(message)
self.wfile.write('\n')
return
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""Handle requests in a separate thread."""
if __name__ == '__main__':
server = ThreadedHTTPServer(('localhost', 8080), Handler)
print 'Starting server, use <Ctrl-C> to stop'
server.serve_forever()
I added "sleep(5)" for 5 second delay to handle the request. After that I send multiple requests but all the requests are handled one by one and each request took 5 seconds. I am unable to find the reason. Help me.
The key requirement here is to be able to have a 5-second delay between the send_response and the data returned. That means you need streaming; you can't use ThreadingMixIn, gunicorn, or any other such hack.
You need something like this:
import time, socket, threading
sock = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
host = socket.gethostname()
port = 8000
sock.bind((host, port))
sock.listen(1)
HTTP = "HTTP/1.1 200 OK\nContent-Type: text/html; charset=UTF-8\n\n"
class Listener(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True # stop Python from biting ctrl-C
self.start()
def run(self):
conn, addr = sock.accept()
conn.send(HTTP)
# serve up an infinite stream
i = 0
while True:
conn.send("%i " % i)
time.sleep(0.1)
i += 1
[Listener() for i in range(100)]
time.sleep(9e9)

Write a simple proxy server with python, just strange chars in browser

Im trying to write a simple Http proxy with python.
This is my code:
import BaseHTTPServer
import urllib2
import threading
HOST_NAME="localhost"
PORT_NUMBER=9090
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def handle_get_async(self):
opener = urllib2.build_opener()
#opener.addheaders = self.headers.items()
response = opener.open(self.path)
response_content=response.read()
response.close()
self.send_response(200)
self.send_header("Content-type",response.info().type)
self.end_headers()
self.wfile.write(response_content)
def handle_head_async(s):
s.send_responde(200)
s.send_Header("Content-type","text/html")
s.end_headers()
def do_HEAD(s):
thread = threading.Thread(target=self.handle_head_async)
thread.start()
thread.join()
def do_GET(self):
thread = threading.Thread(target=self.handle_get_async)
thread.start()
thread.join()
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), RequestHandler)
httpd.serve_forever()
So everything works fine, but when I use opener.addheaders = self.headers.items() to copy the headers send by the browser and copy it to the new request I just recive just strange characters in the browser like this:
Ô½Yo#I¶&ø^¿ÂÓ«:¯”)JÜ—PJn’(q’ZCQ^Nw#ÝEßä)**€z`€æi€™—‹žy¨Ûfè—Æ4ptþ‰ê·ÂEãöüŒ9ÇÌÝé\œŠHeõB$Ý–sŽ}fvì˜Ùq³Ÿ¾«uªƒÛnS];þÍOøÅi¢1>âeÂsª|ÄD‰MsÂs’&:Îo˜ÂƒÃcb"ÊÇ?éÄ9Im‡¸G¼çŽEž;8þÉ‘lÕrGž!¹ªipĘžhž£ìˆ»ÃÀáŽO¦½3mNæTƒw¥÷ò‡#>?©£™jÈælß&žj“¦ø<ßý¸¶óþïêÆôï>ì w?Í!áñÈO?ÿøÇŸ‡;ðâ?…¢|äÅñÉR-"¸æ„ü¾|óðỔ:ñHEÎ[c~×G˜»ò»³®Ñq34aAÉ?8 °õ?8‰äHö~?vÙ$v"9–~O<{”Èø?S‡Ä
Has someone an idea whats the problem?

Multithreaded web server in python

I'm trying to create multithreaded web server in python, but it only responds to one request at a time and I can't figure out why. Can you help me, please?
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from SocketServer import ThreadingMixIn
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from time import sleep
class ThreadingServer(ThreadingMixIn, HTTPServer):
pass
class RequestHandler(SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type', 'text/plain')
sleep(5)
response = 'Slept for 5 seconds..'
self.send_header('Content-length', len(response))
self.end_headers()
self.wfile.write(response)
ThreadingServer(('', 8000), RequestHandler).serve_forever()
Check this post from Doug Hellmann's blog.
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
import threading
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
message = threading.currentThread().getName()
self.wfile.write(message)
self.wfile.write('\n')
return
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""Handle requests in a separate thread."""
if __name__ == '__main__':
server = ThreadedHTTPServer(('localhost', 8080), Handler)
print 'Starting server, use <Ctrl-C> to stop'
server.serve_forever()
I have developed a PIP Utility called ComplexHTTPServer that is a multi-threaded version of SimpleHTTPServer.
To install it, all you need to do is:
pip install ComplexHTTPServer
Using it is as simple as:
python -m ComplexHTTPServer [PORT]
(By default, the port is 8000.)
In python3, you can use the code below (https or http):
from http.server import HTTPServer, BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
import threading
USE_HTTPS = True
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write(b'Hello world\t' + threading.currentThread().getName().encode() + b'\t' + str(threading.active_count()).encode() + b'\n')
class ThreadingSimpleServer(ThreadingMixIn, HTTPServer):
pass
def run():
server = ThreadingSimpleServer(('0.0.0.0', 4444), Handler)
if USE_HTTPS:
import ssl
server.socket = ssl.wrap_socket(server.socket, keyfile='./key.pem', certfile='./cert.pem', server_side=True)
server.serve_forever()
if __name__ == '__main__':
run()
You will figure out this code will create a new thread to deal with every request.
Command below to generate self-sign certificate:
openssl req -x509 -newkey rsa:4096 -nodes -out cert.pem -keyout key.pem -days 365
If you are using Flask, this blog is great.
It's amazing how many votes these solutions that break streaming are getting. If streaming might be needed down the road, then ThreadingMixIn and gunicorn are no good because they just collect up the response and write it as a unit at the end (which actually does nothing if your stream is infinite).
Your basic approach of combining BaseHTTPServer with threads is fine. But the default BaseHTTPServer settings re-bind a new socket on every listener, which won't work in Linux if all the listeners are on the same port. Change those settings before the serve_forever() call. (Just like you have to set self.daemon = True on a thread to stop ctrl-C from being disabled.)
The following example launches 100 handler threads on the same port, with each handler started through BaseHTTPServer.
import time, threading, socket, SocketServer, BaseHTTPServer
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
if self.path != '/':
self.send_error(404, "Object not found")
return
self.send_response(200)
self.send_header('Content-type', 'text/html; charset=utf-8')
self.end_headers()
# serve up an infinite stream
i = 0
while True:
self.wfile.write("%i " % i)
time.sleep(0.1)
i += 1
# Create ONE socket.
addr = ('', 8000)
sock = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(5)
# Launch 100 listener threads.
class Thread(threading.Thread):
def __init__(self, i):
threading.Thread.__init__(self)
self.i = i
self.daemon = True
self.start()
def run(self):
httpd = BaseHTTPServer.HTTPServer(addr, Handler, False)
# Prevent the HTTP server from re-binding every handler.
# https://stackoverflow.com/questions/46210672/
httpd.socket = sock
httpd.server_bind = self.server_close = lambda self: None
httpd.serve_forever()
[Thread(i) for i in range(100)]
time.sleep(9e9)
A multithreaded https server in python3.7
from http.server import BaseHTTPRequestHandler, HTTPServer
from socketserver import ThreadingMixIn
import threading
import ssl
hostName = "localhost"
serverPort = 8080
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes("<html><head><title>https://pythonbasics.org</title></head>", "utf-8"))
self.wfile.write(bytes("<p>Request: %s</p>" % self.path, "utf-8"))
self.wfile.write(bytes("<p>Thread: %s</p>" % threading.currentThread().getName(), "utf-8"))
self.wfile.write(bytes("<p>Thread Count: %s</p>" % threading.active_count(), "utf-8"))
self.wfile.write(bytes("<body>", "utf-8"))
self.wfile.write(bytes("<p>This is an example web server.</p>", "utf-8"))
self.wfile.write(bytes("</body></html>", "utf-8"))
class ThreadingSimpleServer(ThreadingMixIn,HTTPServer):
pass
if __name__ == "__main__":
webServer = ThreadingSimpleServer((hostName, serverPort), MyServer)
webServer.socket = ssl.wrap_socket(webServer.socket, keyfile='./privkey.pem',certfile='./certificate.pem', server_side=True)
print("Server started http://%s:%s" % (hostName, serverPort))
try:
webServer.serve_forever()
except KeyboardInterrupt:
pass
webServer.server_close()
print("Server stopped.")
you can test it in a browser: https://localhost:8080
the running result is:
enter image description here
enter image description here
remind that you can generate your own keyfile and certificate use
$openssl req -newkey rsa:2048 -keyout privkey.pem -x509 -days 36500 -out certificate.pem
To learn details about creating self-signed certificate with openssl:https://www.devdungeon.com/content/creating-self-signed-ssl-certificates-openssl

Categories

Resources