I have created a simple live video stream from the raspberry camera. It looks like this
server.py
from flask import Flask
from flask import render_template
from flask import Response
import cv2
app = Flask(__name__)
#app.route('/')
def index():
return render_template('index.html')
#app.route('/video_feed')
def video_feed():
return Response(gen(), mimetype='multipart/x-mixed-replace; boundary=frame')
def gen():
camera = cv2.VideoCapture(0)
while True:
ret, img = camera.read()
if ret:
frame = cv2.imencode('.jpg', img)[1].tobytes()
yield (b'--frame\r\n'b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
else:
break
app.run(host='192.168.0.241', port=7070, debug=True)
index.html
<html>
<head>
<title>PiCamera stream</title>
</head>
<body>
<h1>Streaming</h1>
<img src="{{ url_for('video_feed') }}">
</body>
</html>
Everything works correct, I enter http://<raspberry_ip>:<port> in the browser and I can see the video.
Right now I need to create mobile app for watching this video, however I am struggling how to do it. Is there a way to capture video stream in iOS app?
Your video is streaming on
http://<raspberry_ip>:<port>/video_feed
http://<raspberry_ip>:<port>
Using Ngrok if you want to push your app to public server!
http://ngrok.com/
Related
I am trying to take a RTSP video input and apply analytics and show it as a live stream. I am using Open CV and PyFlask for this purpose.
I can manage to show the output successfully. However, I am having issues when I try to access the host from multiple devices.
If I access from single device, it's working like a charm but when I try to access from multiple devices at the same time, it's breaking.
RTSP = f'''rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mp4'''
# importing
import cv2
from flask import Flask, render_template, Response
app= Flask(__name__)
cap= cv2.VideoCapture(RTSP)
def gen_frames():
while True:
success, frame = cap.read()
ret, buffer = cv2.imencode('.jpg', frame)
frame = buffer.tobytes()
yield(b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
#app.route('/')
def index():
return render_template('index.html')
#app.route('/video')
def video():
return Response(gen_frames(), mimetype = 'multipart/x-mixed-replace; boundary=frame')
if __name__ == "__main__":
app.run(debug = False, host = '0.0.0.0', port = 8018)
Need some expert advice. Thanks in advance.
Note - index.html code is given below:
<!DOCTYPE html>
<html>
<body>
<h1>Live streaming</h1>
<div>
<img src="{{ url_for('video') }}"/>
</div>
</body>
</html>
I have this Flask-Socketio app which shows the Raspberry Pi system info like temperature, RAM and Disk space. This app also has a video streaming component VideroStream.py.
I have added VideroStream.py route to index.py using Flask blueprint. When accessing the app in browser RPI freezes and in error log it shows:
> Truncated or oversized response headers received from daemon process
> 'rpiWebServer': /var/www/rpiWebServer.wsgi
Why this is happening?
Is this line correct videoStreamBp = Blueprint('video_stream', __name__) ?
Should I use videopi instead of video_stream?
When I create a standalone app without blueprint and Socketio streaming works perfectly.
UPDATE:
When I remove image src="{{url_for(videopi)}}" page loads without video as expected.
index.py
from flask import Flask, render_template, Response, request
from flask_socketio import SocketIO, emit
from threading import Lock
#for temp
import os
import datetime
import ast
import psutil
app = Flask(__name__)
#for socket
async_mode = None
socketio = SocketIO(app, async_mode=async_mode)
#thread = None
thread1 = None
thread_lock = Lock()
from findPath import findPathBp
app.register_blueprint(findPathBp)
from videoStream import videoStreamBp
app.register_blueprint(videoStreamBp)
# GET RAM info
def getSysInfo():
count = 0
while True:
#RAM
memory = psutil.virtual_memory()
ramAvailable = round(memory.available/1024.0/1024.0,1) # Divide from Bytes -> KB -> MB
ramTotal = round(memory.total/1024.0/1024.0,1)
#Temp
temp = os.popen("vcgencmd measure_temp").readline()
cpuTemp = temp.replace("temp=","")
cpuTemp = cpuTemp.replace("'C","°C")
#DISK
disk = psutil.disk_usage('/')
# Divide from Bytes -> KB -> MB -> GB
diskFree = round(disk.free/1024.0/1024.0/1024.0,1)
diskTotal = round(disk.total/1024.0/1024.0/1024.0,1)
socketio.sleep(1)
count += 1
socketio.emit('sysStat',{'available': ramAvailable, 'total': ramTotal, 'temp': cpuTemp, 'freeDisk': diskFree, 'totalDisk': diskTotal }, namespace='/getSysInfo')
#index route
#app.route("/", methods=['GET', 'POST'])
def index():
return render_template('index.html', result= timeString)
#socket IO
# Get system info
#socketio.on('connect', namespace='/getSysInfo')
def test_connect():
global thread1
with thread_lock:
if thread1 is None:
thread1 = socketio.start_background_task(getSysInfo)
if __name__ == "__main__":
socketio.run(host='192.168.225.47', port=80, debug=True, threaded=True)
videoStream.py
from flask import Blueprint, render_template, Response
videoStreamBp = Blueprint('video_stream', __name__)
# Raspberry Pi camera module (requires picamera package)
from camera_pi import Camera
def gen(camera):
# Video streaming generator function.
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
#videoStreamBp.route('/videopi')
def video_stream():
return Response(gen(Camera()),
mimetype='multipart/x-mixed-replace; boundary=frame')
index.html
<div class='fifty'>
<p class='tempConainer'>CPU temperature is: <span id='temp'>Loading..</span></p><br>
<p class='tempConainer'>RAM available: <span id='ramInfo'>Loading..</span></p>
<p class='tempConainer'>RAM total: <span id='ramInfo1'>Loading..</span></p><br>
<p class='tempConainer'>Free disk: <span id='freeDisk'>Loading..</span></p><br>
<p class='tempConainer'>Total disk: <span id='totalDisk'>Loading..</span></p><br>
</div>
<div class='fifty'>
<img src="{{url_for(videopi)}}">
</div>
Finally I found that the src attribute of image was wrong and I changed it to this:
<img src='/videopi'>
Works like a charm.
I am trying to create a video streaming from my Raspberry Pi to multiple clients. Flask does not support WSGI server so i use the cheroot.wsgi Server. i have created a ddns server using noip in order to broadcast the video stream over the internet. Till now i manage to serve the video only to one client even if i use a wsgi server.
Here is the video feeder
from flask import Flask, render_template, Response
from camera import VideoCamera
import RPi.GPIO as gpio
gpio.setmode(gpio.BCM)
gpio.setup(21, gpio.OUT)
app = Flask(__name__)
def gen(camera):
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
#app.route('/')
def index():
return render_template('index.html')
#app.route('/video_feed')
def video_feed():
return Response(gen(VideoCamera()),mimetype='multipart/x-mixed-replace; boundary=frame')
#app.route('/background_process_test')
def background_process_test():
gpio.output(21, gpio.HIGH)
print ("Hello")
return ("nothing")
#app.route('/background_process_test2')
def background_process_test2():
gpio.output(21, gpio.LOW)
print ("Hello")
return ("nothing")
if __name__ == '__main__':
app.run()
here is the wsgi server using cheroot
try:
from cheroot.wsgi import Server as WSGIServer, PathInfoDispatcher
except ImportError:
print("OK")
from cherrypy.wsgiserver import CherryPyWSGIServer as WSGIServer, WSGIPathInfoDispatcher as PathInfoDispatcher
from main import app
d = PathInfoDispatcher({'/': app})
server = WSGIServer(('0.0.0.0', 5000), d)
if __name__ == '__main__':
try:
server.start()
except KeyboardInterrupt:
server.stop()
The opencv module that captures the camera frames
import cv2
class VideoCamera(object):
def __init__(self):
# Using OpenCV to capture from device 0. If you have trouble capturing
# from a webcam, comment the line below out and use a video file
# instead.
self.video = cv2.VideoCapture(0)
self.video.set(cv2.CAP_PROP_FRAME_WIDTH, 160)
self.video.set(cv2.CAP_PROP_FRAME_HEIGHT, 200)
# If you decide to use video.mp4, you must have this file in the folder
# as the main.py.
# self.video = cv2.VideoCapture('video.mp4')
def __del__(self):
self.video.release()
def get_frame(self):
success, image = self.video.read()
# We are using Motion JPEG, but OpenCV defaults to capture raw images,
# so we must encode it into JPEG in order to correctly display the
# video stream.
ret, jpeg = cv2.imencode('.jpg', image)
return jpeg.tobytes()
finally the web page that serves the video feed
<!doctype html>
<html>
<head>
<!-- <link rel="shortcut icon" href="1.ico" type="image/x-icon" />-->
<title>jindeath</title>
</head>
<body>
hello
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script type=text/javascript>
$(function() {
$('a#test').bind('click', function() {
$.getJSON('/background_process_test',
function(data) {
//do nothing
});
return false;
});
});
$(function() {
$('a#test2').bind('click', function() {
$.getJSON('/background_process_test2',
function(data) {
//do nothing
});
return false;
});
});
</script>
<div class='container'>
<h3>Test</h3>
<form>
<img id="bg" src="{{ url_for('video_feed') }}">
<a href=# id=test><button class='btn btn-default'>Test</button></a>
<a href=# id=test2><button class='btn btn-default'>Test</button></a>
</form>
</body>
</html>
Further more when more than one devices connect to the page my rpi uses 100% of its cpu. any suggestions
I'm developing an application that has a client(html&js) and a server(flask)
The client will open the Webcam(HTML5 api) -> send a video stream to server -> server will return other streams with a json/text stream
I don't want to do pooling.
I was researching something about video stream, but every article and example o internet that I found, use the webcam by OpenCV or a local video and does get the real-time webcam's video and send to the server.
Here are the principal examples that I found
Server:
from flask import Flask, render_template, Response
import cv2
app = Flask(__name__)
camera = cv2.VideoCapture(0) # I can't use a local webcam video or a local source video, I must receive it by http in some api(flask) route
def gen_frames(): # generate frame by frame from camera
while True:
success, frame = camera.read() # read the camera frame
if not success:
break
else:
ret, buffer = cv2.imencode('.jpg', frame)
frame = buffer.tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') # concat frame one by one and show result
#app.route('/video_feed')
def video_feed():
"""Video streaming route. Put this in the src attribute of an img tag."""
return Response(gen_frames(),
mimetype='multipart/x-mixed-replace; boundary=frame')
#app.route('/')
def index():
"""Video streaming home page."""
return render_template('index.html')
if __name__ == '__main__':
app.run(host='0.0.0.0')
Client:
camera.js
//--------------------
// GET USER MEDIA CODE
//--------------------
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var video;
var webcamStream;
function startWebcam() {
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: true,
audio: false
},
// successCallback
function(localMediaStream) {
video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
}
);
} else {
console.log("getUserMedia not supported");
}
}
//---------------------
// TAKE A SNAPSHOT CODE
//---------------------
var canvas, ctx;
function init() {
// Get the canvas and obtain a context for
// drawing in it
canvas = document.getElementById("myCanvas");
context = canvas.getContext('2d');
}
function snapshot() {
// Draws current image from the video element into the canvas
context.drawImage(video, 0,0, canvas.width, canvas.height);
webcamStream.stop();
var dataURL = canvas.toDataURL('image/jpeg', 1.0);
document.querySelector('#dl-btn').href = dataURL;
$.ajax({
type: "POST",
contentType: false,
cache: false,
processData: false,
async: false,
url: "/upload",
data: {
imgBase64: dataURL
}
}).done(function(o) {
console.log('saved');
// If you want the file to be visible in the browser
// - please modify the callback in javascript. All you
// need is to return the url to the file, you just saved
// and than put the image in your browser.
});
}
index.html
<!DOCTYPE html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script src="camera.js"></script>
</head>
<body onload="init();">
<h1>Take a snapshot of the current video stream</h1>
Click on the Start WebCam button.
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button type="submit" id="dl-btn" href="#" download="participant.jpeg" onclick="snapshot();">Take Snapshot</button>
</p>
<video onclick="snapshot(this);" width=400 height=400 id="video" controls autoplay></video>
<p>
Screenshots : <p>
<canvas id="myCanvas" width="400" height="350"></canvas>
</body>
</html>
Another problem is that I can't make pooling with snapshots, I need to send the video stream to the server to work with frames there.
Someone know how can I send the WebCam video stream to flask?
tks
You should probably use stream_with_context around your stream generator to stream your response. You can't return a regular response because there is nothing that tells the client not to close the connection.
https://flask.palletsprojects.com/en/1.1.x/api/#flask.stream_with_context
Hey this might help you video streaming in flask
#!/usr/bin/env python
from flask import Flask, render_template, Response
from camera import Camera
app = Flask(__name__)
#app.route('/')
def index():
return render_template('index.html')
def gen(camera):
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
#app.route('/video_feed')
def video_feed():
return Response(gen(Camera()),
mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
This question already has answers here:
I'm trying to use a simple webcam capture upload, upload isn't working?
(2 answers)
How do i take picture from client side(html) and save it to server side(Python)
(2 answers)
How can I capture an image via the user's webcam using getUserMedia?
(3 answers)
Closed 4 years ago.
First that all sorry about my not so good English skills and i appreciate the corrections.
I have tried to build a python program that can access the camera through a Flask web service, this is the code:
from flask import Flask, render_template, Response
import cv2
app = Flask(__name__)
#app.route('/')
def index():
return render_template('index.html')
def gen():
i = 1
while i < 10:
yield (b'--frame\r\n'
b'Content-Type: text/plain\r\n\r\n' + str(i) + b'\r\n')
i += 1
def get_frame():
camera_port = 0
ramp_frames = 100
camera = cv2.VideoCapture(camera_port) # this makes a web cam object
i = 1
while True:
retval, im = camera.read()
imgencode = cv2.imencode('.jpg', im)[1]
stringData = imgencode.tostring()
yield (b'--frame\r\n'
b'Content-Type: text/plain\r\n\r\n' + stringData + b'\r\n')
i += 1
del (camera)
#app.route('/calc')
def calc():
return Response(get_frame(), mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=False, threaded=True)
And the html code:
<html>
<head>
<title>Video Streaming Demonstration</title>
</head>
<body>
<h1>Video Streaming Demonstration</h1>
<img src="{{ url_for('calc') }}">
<!-- <h1>{{ url_for('calc') }}</h1> -->
</body>
</html>
The problem i have with this is when I run it in my laptop and Flask deploys. I access the service from a different device doesn't use the device's cam, but activates my laptop's webcam.
Is it possible to make the service use the camera of the device from which I access it(client) instead the laptop's(server) camera?