Is it possible to update a html site hosted with builtin modules of python? This is a minimal example from https://pythonbasics.org/webserver/ I have extended a meta-tag to refresh the side every second. This looks like a hack to me. Is there any possible solution to make this more clean? I would like to avoid ajax and other external libraries.
# Python 3 server example
from http.server import BaseHTTPRequestHandler, HTTPServer
import time
hostName = "localhost"
serverPort = 8080
class MyServer(BaseHTTPRequestHandler):
output = []
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes("<html><head><title>https://pythonbasics.org</title>\n", "utf-8"))
self.wfile.write(bytes('<meta http-equiv="refresh" content="1">\n</head>', "utf-8"))
self.wfile.write(bytes("<body>", "utf-8"))
self.wfile.write(bytes("<p>This is an example web server.</p>", "utf-8"))
for text in MyServer.output:
self.wfile.write(bytes(text, "utf-8"))
self.wfile.write(bytes("</body></html>", "utf-8"))
def log_message(self, format, *args):
return
def http_serve():
webServer = HTTPServer((hostName, serverPort), MyServer)
print("Server started http://%s:%s" % (hostName, serverPort))
try:
webServer.serve_forever()
except KeyboardInterrupt:
pass
webServer.server_close()
print("Server stopped.")
from threading import Thread
thread_http_serve = Thread(target=http_serve)
thread_http_serve.start()
from time import sleep
for i in range(100):
text = "<p>sample data: " + str(i) + "</p>\n"
MyServer.output.append(text)
sleep(1)
Related
I have a very simple HTTPServer implemented in Python. The code is the following:
import SimpleHTTPServer
import SocketServer as socketserver
import os
import threading
class MyHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
path_to_image = 'RGBWebcam1.png'
img = open(path_to_image, 'rb')
statinfo = os.stat(path_to_image)
img_size = statinfo.st_size
print(img_size)
def do_HEAD(self):
self.send_response(200)
self.send_header("Content-type", "image/png")
self.send_header("Content-length", img_size)
self.end_headers()
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "image/png")
self.send_header("Content-length", img_size)
self.end_headers()
f = open(path_to_image, 'rb')
self.wfile.write(f.read())
f.close()
class MyServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, server_adress, RequestHandlerClass):
self.allow_reuse_address = True
socketserver.TCPServer.__init__(self, server_adress, RequestHandlerClass, False)
if __name__ == "__main__":
HOST, PORT = "192.168.2.10", 9999
server = MyServer((HOST, PORT), MyHandler)
server.server_bind()
server.server_activate()
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
while(1):
print "test"
If I connect to the given IP-Adress the page loads and everything is fine. Now it would be nice if the page would automatically refresh every n seconds.
I am very new to python and especially new to webcoding. I have found LiveReload however I cannot get my head around how I merge these two libraries together.
Thank you for your help
You'll require a connection to the client if you want the server to tell it to refresh. A HTTP server means you've sent information (HTML) and the client will process it. There is no communication beyond that. That would require AJAX or Websockets to be implemented - both protocols that allow frequent communication.
Since you can't communicate, you should automate the refresh in the content you initially send. In our example we'll say we want the page to refresh every 30 seconds. This is possible to do in either HTML or Javascript:
<meta http-equiv="refresh" content="30" />
or
setTimeout(function(){
window.location.reload(1);
}, 30000);
i am new in web development. i am creating a web app for my home automation project, in which i need bi-directional communication. any theft-security alert from home will be send to client from server or if client want to control the main gate, he'll sent a POST request to server. I am still confused what thing to use, SSE or Web sockets. my question is, is it possible to develop an app that uses both, SSE as well as handles traditional (long-polling) HTTP requests from client (GET/POST) ? i have tested each of them individually and they work fine but i am unable to make them work together. i am using python BaseHTTPServer. Or at last, do i have to move to WebSocket? Any suggestion will be highly appreciated. my code here is;
import time
import BaseHTTPServer
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
import os
import requests
import threading
from threading import Thread
from chk_connection import is_connected
import socket
HOST_NAME = socket.gethostbyname(socket.gethostname())
PORT_NUMBER = 8040 # Maybe set this to 9000.
ajax_count=0
ajax_count_str=""
switch=0
IP_Update_time=2
keep_alive=0
connected=False
###############################
##############
my_dir = os.getcwd()
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_HEAD(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
def do_POST(s):
global keep_alive
"""Respond to a POST request."""
s.send_response(200)
s.send_header('Access-Control-Allow-Origin', '*')
s.send_header("Content-type", "text/html")
s.end_headers()
HTTP_request=s.requestline
if HTTP_request.find('keep_alive')>-1:
keep_alive += 1
keep_alive_str = str(keep_alive)
s.wfile.write(keep_alive_str) #sending ajax calls for keep alive
def do_GET(s):
global ajax_count
global my_dir
global switch
global ajax_count_str
global keep_alive
#print 'got Get',
"""Respond to a GET request."""
s.send_response(200)
#s.send_header('Access-Control-Allow-Origin', '*')
s.send_header('content-type', 'text/html')
s.end_headers()
print s.headers
HTTP_request=s.requestline
index_1=HTTP_request.index("GET /")
index_2=HTTP_request.index(" HTTP/1.1")
file_name=HTTP_request[index_1+5:index_2]
#print 'file_name:',file_name
#print 'HTTP_request:',HTTP_request
#if len(file_name)>0:
#if HTTP_request.find('L2ON')>-1:
# print 'sending SSE'
# s.wfile.write('event::'.format(time.time()))
elif HTTP_request.find('GET / HTTP/1.1')>-1:
print 'send main'
file1=open('Index.html','r')
file_read=file1.read()
s.wfile.write(file_read)
elif file_name.find("/")==-1:
for root, dirs, files in os.walk(my_dir):
#print 'in for 1'
for file in files:
#print 'in for'
if HTTP_request.find(file)>-1:
file_path=os.path.join(root,file)
file1=open(file_path,'r')
file_read=file1.read()
s.wfile.write(file_read)
print 'send',file
elif file_name.find("/")>-1:
#print 'get /...'
slash_indexes=[n for n in xrange(len(file_name)) if file_name.find('/', n)==n]
length=len(slash_indexes)
slash=slash_indexes[length-1]
file_path=file_name[0:slash]
root_dir=(my_dir + '/' + file_path + '/')
for root, dirs, files in os.walk(root_dir):
for file in files:
if HTTP_request.find(file)>-1:
image_path=os.path.join(root,file)
image=open(image_path,'r')
image_read=image.read()
s.wfile.write(image_read)
print 'send',file
#else:
#print 'file not found'
class MyHandler_SSE(BaseHTTPRequestHandler):
print 'SSE events class'
def do_GET(self):
print 'this is SSE'
self.send_response(200)
self.send_header('content-type', 'text/event-stream')
self.end_headers()
while True:
print 'SSE sent'
self.wfile.write('event: message\nid: 1\ndata: {0}\ndata:\n\n'.format(time.time()))
time.sleep(2)
class chk_connection(threading.Thread):
"""
# this thread checks weather there is internet connection available ?
"""
def __init__(self):
threading.Thread.__init__(self)
def run(self):
global connected
while 1:
########################################################## INSIDE THE chk_connection import is_connected
#import socket
#REMOTE_SERVER = "www.google.com"
#def is_connected():
#try:
# # see if we can resolve the host name -- tells us if there is
# # a DNS listening
# host = socket.gethostbyname(REMOTE_SERVER)
# connect to the host -- tells us if the host is actually
# reachable
# s = socket.create_connection((host, 80), 2)
# return True
#except:
# pass
#return False
##########################################################
connected=is_connected()
#print 'internet:', connected
time.sleep(1)
class server_main(threading.Thread):
"""
"""
def __init__(self):
threading.Thread.__init__(self)
def run(self):
global connected
#print 'shutdown started'
server_class = BaseHTTPServer.HTTPServer
HOST_NAME = socket.gethostbyname(socket.gethostname())
last_HOST_NAME = HOST_NAME
httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
#http_SSE = server_class((HOST_NAME, PORT_NUMBER), MyHandler_SSE)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
while(1):
while is_connected():
httpd._handle_request_noblock()
#print 'no block'
#http_SSE._handle_request_noblock()
time.sleep(1)
HOST_NAME = socket.gethostbyname(socket.gethostname())
if HOST_NAME != last_HOST_NAME:
print 'Serving at new host:', HOST_NAME
httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
def start():
tx_socket_thread3 = chk_connection() # this thread checks weather there is internet connection available ?
tx_socket_thread3.start()
tx_socket_thread5 = server_main()
tx_socket_thread5.start()
print 's1:',tx_socket_thread1.is_alive()
if __name__ == '__main__':
start()
i might need to modify the code in a new manner, but don't know how. What i want is, if any interrupt happens at server side, it pulls data to client, and mean while it also responds to the GET and POST requests from client. Help Plx...
It is definitely possible to develop a web application which uses a mixture of normal HTTP traffic, server-side events and WebSockets. However, the web server classes in Python standard library are not designed for this purpose, though one can probably make them to work with enough hammering. You should to install a proper web server and use it facilities.
Examples include
uWSGI and server-side events with WSGI applications
Tornado and WebSockets
Furthermore
Installing Python packages
I have currently got this code set up:
import time
import http.server
import socketserver
import mimetypes
import os
HOST_NAME = 'localhost'
PORT = 8000
def load(self):
with open(self, 'r') as self:
self = self.read()
return self.encode('UTF-8')
class MyHandler(http.server.BaseHTTPRequestHandler):
def do_HEAD(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
def do_GET(self):
if self.path == "/":
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(load('index.html'))
if self.path.endswith('.css'):
self.send_response(200)
self.send_header("Content-type", "text/css")
self.end_headers()
dest = self.path.replace("/", "")
self.wfile.write(load(dest))
if self.path == "/bg.jpg":
self.send_response(200)
self.send_header("Content-type", "image/jpeg")
self.end_headers()
if __name__ == '__main__':
httpd = socketserver.TCPServer(("", PORT), MyHandler)
print(time.asctime(), "Server Starts - %s:%s" % ("", PORT))
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print(time.asctime(), "Server Stops - %s:%s" % ("", PORT))
My webpage seems to be working and I get my index.html and css loaded when I open up my webpage, however the image is not coming through, has anyone got any idea why?
To send an image over HTTP, just write the image data directly to the socket, like for any other type of file. You can't use your load() function for this, since that encodes the text as UTF-8. Instead, you should open the file with mode rb or similar, read a bytes from that filehandle, then write it directly to self.wfile in the HTTP handler.
I'm trying to create multithreaded web server in python, but it only responds to one request at a time and I can't figure out why. Can you help me, please?
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from SocketServer import ThreadingMixIn
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from time import sleep
class ThreadingServer(ThreadingMixIn, HTTPServer):
pass
class RequestHandler(SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type', 'text/plain')
sleep(5)
response = 'Slept for 5 seconds..'
self.send_header('Content-length', len(response))
self.end_headers()
self.wfile.write(response)
ThreadingServer(('', 8000), RequestHandler).serve_forever()
Check this post from Doug Hellmann's blog.
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
import threading
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
message = threading.currentThread().getName()
self.wfile.write(message)
self.wfile.write('\n')
return
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""Handle requests in a separate thread."""
if __name__ == '__main__':
server = ThreadedHTTPServer(('localhost', 8080), Handler)
print 'Starting server, use <Ctrl-C> to stop'
server.serve_forever()
I have developed a PIP Utility called ComplexHTTPServer that is a multi-threaded version of SimpleHTTPServer.
To install it, all you need to do is:
pip install ComplexHTTPServer
Using it is as simple as:
python -m ComplexHTTPServer [PORT]
(By default, the port is 8000.)
In python3, you can use the code below (https or http):
from http.server import HTTPServer, BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
import threading
USE_HTTPS = True
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write(b'Hello world\t' + threading.currentThread().getName().encode() + b'\t' + str(threading.active_count()).encode() + b'\n')
class ThreadingSimpleServer(ThreadingMixIn, HTTPServer):
pass
def run():
server = ThreadingSimpleServer(('0.0.0.0', 4444), Handler)
if USE_HTTPS:
import ssl
server.socket = ssl.wrap_socket(server.socket, keyfile='./key.pem', certfile='./cert.pem', server_side=True)
server.serve_forever()
if __name__ == '__main__':
run()
You will figure out this code will create a new thread to deal with every request.
Command below to generate self-sign certificate:
openssl req -x509 -newkey rsa:4096 -nodes -out cert.pem -keyout key.pem -days 365
If you are using Flask, this blog is great.
It's amazing how many votes these solutions that break streaming are getting. If streaming might be needed down the road, then ThreadingMixIn and gunicorn are no good because they just collect up the response and write it as a unit at the end (which actually does nothing if your stream is infinite).
Your basic approach of combining BaseHTTPServer with threads is fine. But the default BaseHTTPServer settings re-bind a new socket on every listener, which won't work in Linux if all the listeners are on the same port. Change those settings before the serve_forever() call. (Just like you have to set self.daemon = True on a thread to stop ctrl-C from being disabled.)
The following example launches 100 handler threads on the same port, with each handler started through BaseHTTPServer.
import time, threading, socket, SocketServer, BaseHTTPServer
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
if self.path != '/':
self.send_error(404, "Object not found")
return
self.send_response(200)
self.send_header('Content-type', 'text/html; charset=utf-8')
self.end_headers()
# serve up an infinite stream
i = 0
while True:
self.wfile.write("%i " % i)
time.sleep(0.1)
i += 1
# Create ONE socket.
addr = ('', 8000)
sock = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(5)
# Launch 100 listener threads.
class Thread(threading.Thread):
def __init__(self, i):
threading.Thread.__init__(self)
self.i = i
self.daemon = True
self.start()
def run(self):
httpd = BaseHTTPServer.HTTPServer(addr, Handler, False)
# Prevent the HTTP server from re-binding every handler.
# https://stackoverflow.com/questions/46210672/
httpd.socket = sock
httpd.server_bind = self.server_close = lambda self: None
httpd.serve_forever()
[Thread(i) for i in range(100)]
time.sleep(9e9)
A multithreaded https server in python3.7
from http.server import BaseHTTPRequestHandler, HTTPServer
from socketserver import ThreadingMixIn
import threading
import ssl
hostName = "localhost"
serverPort = 8080
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes("<html><head><title>https://pythonbasics.org</title></head>", "utf-8"))
self.wfile.write(bytes("<p>Request: %s</p>" % self.path, "utf-8"))
self.wfile.write(bytes("<p>Thread: %s</p>" % threading.currentThread().getName(), "utf-8"))
self.wfile.write(bytes("<p>Thread Count: %s</p>" % threading.active_count(), "utf-8"))
self.wfile.write(bytes("<body>", "utf-8"))
self.wfile.write(bytes("<p>This is an example web server.</p>", "utf-8"))
self.wfile.write(bytes("</body></html>", "utf-8"))
class ThreadingSimpleServer(ThreadingMixIn,HTTPServer):
pass
if __name__ == "__main__":
webServer = ThreadingSimpleServer((hostName, serverPort), MyServer)
webServer.socket = ssl.wrap_socket(webServer.socket, keyfile='./privkey.pem',certfile='./certificate.pem', server_side=True)
print("Server started http://%s:%s" % (hostName, serverPort))
try:
webServer.serve_forever()
except KeyboardInterrupt:
pass
webServer.server_close()
print("Server stopped.")
you can test it in a browser: https://localhost:8080
the running result is:
enter image description here
enter image description here
remind that you can generate your own keyfile and certificate use
$openssl req -newkey rsa:2048 -keyout privkey.pem -x509 -days 36500 -out certificate.pem
To learn details about creating self-signed certificate with openssl:https://www.devdungeon.com/content/creating-self-signed-ssl-certificates-openssl
I try to implement simple server on python based on HTTPServer.
How can i extract information about site domain served in current request?
I mean it can serv several domains such as site1.com and site2.com for example, how can i get it in this code:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class MyHandler(BaseHTTPRequestHandler):
def do_GET(self):
print "get"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
#how can i get here host name of serving site?
#site1.com or site2.com ?
domain = ???
self.wfile.write('<html>Welcome on www.%s.com</html>' % (domain))
if __name__ == "__main__":
try:
server = HTTPServer(("", 8070), MyHandler)
print "started httpserver..."
server.serve_forever()
except KeyboardInterrupt:
print "^C received, shutting down server"
server.socket.close()
I guess you should be able to read the Host header.
The headers can be accessed from BaseHTTPRequestHandler.headers