It is a 4 channel video streaming program.
When running with QThread, if you stream 4 at the same time, the speed of the video will slow down and CPU usage will be 100%.
Can I change this to multiprocessing?
I want to speed up a 4 channel cctv program.
threadMode.py
from PyQt5.QtCore import QThread, pyqtSignal, Qt
from PyQt5.QtGui import QImage
import cv2
class StreamingThread(QThread):
changePixmap = pyqtSignal(QImage)
def __init__(self):
super(StreamingThread, self).__init__()
self.running = True
self.camUrl = None
self.Qsize = None
self.cap = None
def setRtsp(self, camUrl):
self.camUrl = camUrl
def setSize(self, Qsize):
self.Qsize = Qsize
def run(self):
try:
self.cap = cv2.VideoCapture(self.camUrl)
if self.cap.isOpened():
while self.running:
success, frame = self.cap.read()
if success:
rgbImage = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
h, w, ch = rgbImage.shape
bytesPerLine = ch * w
convertToQtFormat = QImage(rgbImage.data, w, h, bytesPerLine, QImage.Format_RGB888)
p = convertToQtFormat.scaled(self.Qsize, Qt.KeepAspectRatio)
self.changePixmap.emit(p)
else:
print("RTSP(RTMP) Video Streaming Fail")
self.stop()
except Exception as e:
print(e)
self.stop()
def stop(self):
if self.running:
self.running = False
print("Streaming Stop")
self.quit()
Main.py
def setChannel1(self):
# Index 0: No Channel
if self.chComboBox1.currentIndex() == 0:
self.mStreamingThread1.terminate()
sched = BackgroundScheduler()
sched.add_job(self.clearChannel, 'date', run_date=datetime.datetime.now() + datetime.timedelta(seconds=1), args=[self.cctvStreaming1])
sched.start()
else:
ip, url, channel, boxId = self.findUrl(self.chComboBox1)
if url != '' and channel != '':
self.mStreamingThread1.terminate()
self.mStreamingThread1.wait(1)
self.mStreamingThread1.setRtsp(url)
self.mStreamingThread1.setSize(self.cctvStreaming1.size())
self.mStreamingThread1.changePixmap.connect(self.setImage1)
self.mStreamingThread1.start()
self.show()
logger.info("Channel1 Streaming Success")
#pyqtSlot(QImage)
def setImage1(self, image):
self.cctvStreaming1.setPixmap(QPixmap.fromImage(image))
Related
I'm a bit stuck here. I would like to display a webcam live feed in a PyQt5 window.
When i push the button the feed has to start, button turns green and text changes to "Stop camera" , on the next click the feed has to stop and the button has to revert to its original status and the feed is replaced with an image.
At the moment i only get a still image.
As soon as i get this working i would like to add some threading
Here is the code (updated):
import os
import threading
import timeit
import cv2
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from PyQt5.QtCore import Qt, QThread, pyqtSignal
from PyQt5.QtGui import *
from PyQt5.uic import loadUi
class Worker1(QThread):
ImageUpdate = pyqtSignal(QImage)
def run(self):
self.ThreadActive = True
self.Capture = cv2.VideoCapture(0)
while self.ThreadActive:
ret, frame = self.Capture.read()
if ret:
Image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
FlippedImage = cv2.flip(Image, 1)
ConvertToQtFormat = QImage(FlippedImage.data, FlippedImage.shape[1], FlippedImage.shape[0], QImage.Format_RGB888)
Pic = ConvertToQtFormat.scaled(640, 480, Qt.KeepAspectRatio)
self.ImageUpdate.emit(Pic)
self.Capture.release()
cv2.destroyAllWindows()
def stop(self):
self.ThreadActive = False
self.terminate()
class FaceIdWindow(QtWidgets.QMainWindow):
def __init__(self):
super(FaceIdWindow, self).__init__()
self.ui = loadUi("uidesign/facereco/FaceId.ui", self)
self.ui.cmdChoosePicture.clicked.connect(self.ChoosePicture)
self.ui.cmdStartCamera.clicked.connect(self.StartCamera)
self.ui.cmdTrainFace.clicked.connect(self.TrainFace)
self.ui.cmdProcess.clicked.connect(self.Process)
self.status_camera = "STOPPED"
def StartCamera(self):
start = timeit.default_timer()
start = timeit.default_timer()
print("Start StartCamera\n")
print(self.status_camera)
if self.status_camera == "STOPPED":
self.status_camera = "STARTED"
self.ui.cmdStartCamera.setStyleSheet("background-color: green")
self.ui.cmdStartCamera.setText("Stop camera")
self.Worker1 = Worker1()
self.Worker1.start()
self.Worker1.ImageUpdate.connect(self.ImageUpdateSlot)
else:
self.status_camera = "STOPPED"
self.Worker1.stop()
image_path = str(os.getcwd())
image_path = image_path + "/assets/clipart/clipartfaceid2.png"
self.lblPicture.setPixmap(QtGui.QPixmap(image_path))
self.ui.cmdStartCamera.setStyleSheet("background-color: ")
self.ui.cmdStartCamera.setText("Start camera")
print("Stop StartCamera\n")
end = timeit.default_timer()
print("Process Time: ", (end - start))
def ImageUpdateSlot(self, Image):
self.lblPicture.setPixmap(QPixmap.fromImage(Image))
Any suggestions ?
Cheers , John
Seems i wasn't starting and stopping my thread in the right place.Code is updated , but if anyone has any improvements don't hesitate! Cheers John
I have a task to process a streaming video from Hikvision IP cam using OpenCV
I try this
RTSP
"""
cap = cv2.VideoCapture()
cap.open("rtsp://yourusername:yourpassword#172.16.30.248:555/Streaming/channels/1/")
and this
using API Hikvision
"""
cam = Client('http://192.168.1.10', 'admin', 'password', timeout=30)
cam.count_events = 2
response = cam.Streaming.channels[101].picture(method='get', type='opaque_data')
for chunk in response.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
img = cv2.imread('screen.jpg')
cv2.imshow("show", img)
cv2.waitKey(1)
In the first case, I have a delay between realtime and cap.read() about 9-20 seconds.
I solved it with such a "hack", but no results.
"""
class CameraBufferCleanerThread(threading.Thread):
def __init__(self, camera, name='camera-buffer-cleaner-thread'):
self.camera = camera
self.last_frame = None
super(CameraBufferCleanerThread, self).__init__(name=name)
self.start()
def run(self):
while True:
ret, self.last_frame = self.camera.read()
"""
The second case shows frames with a delay of 1-2 seconds, which is acceptable, but fps = 1, which is not very good.
Are there any options that can help you get a stream with low latency and normal fps?
In nathancys's post I finded working solutuion. it is done.
I used simple modification his code for my case for getting frame in main function.
from threading import Thread
import cv2, time
from threading import Thread
import cv2, time
class ThreadedCamera(object):
def __init__(self, src=0):
self.capture = cv2.VideoCapture(src)
self.capture.set(cv2.CAP_PROP_BUFFERSIZE, 1)
self.FPS = 1/100
self.FPS_MS = int(self.FPS * 1000)
# First initialisation self.status and self.frame
(self.status, self.frame) = self.capture.read()
# Start frame retrieval thread
self.thread = Thread(target=self.update, args=())
self.thread.daemon = True
self.thread.start()
def update(self):
while True:
if self.capture.isOpened():
(self.status, self.frame) = self.capture.read()
time.sleep(self.FPS)
if __name__ == '__main__':
src = 'rtsp://admin:password#192.168.7.100:554/ISAPI/Streaming/Channels/101'
threaded_camera = ThreadedCamera(src)
while True:
try:
cv2.imshow('frame', threaded_camera.frame)
cv2.waitKey(threaded_camera.FPS_MS)
except AttributeError:
pass
I have written an application in PyQt5. I am basically displaying a camera feed (in this case my web cam), but the problem is that the frame size keeps on increasing at run time and ultimately goes out of my laptop screen. I'm unable to figure out what the problem is.
Can anyone please explain what I'm doing wrong here?
Below is the code snippet.
from PyQt5 import QtCore, QtGui, QtWidgets
from threading import Thread
from collections import deque
from datetime import datetime
import time
import sys
import cv2
import imutils
class CameraWidget(QtWidgets.QWidget):
"""Independent camera feed
Uses threading to grab IP camera frames in the background
#param width - Width of the video frame
#param height - Height of the video frame
#param stream_link - IP/RTSP/Webcam link
#param aspect_ratio - Whether to maintain frame aspect ratio or force into fraame
"""
def __init__(self, width=0, height=0, aspect_ratio=False, parent=None, deque_size=1):
super(CameraWidget, self).__init__(parent)
# Initialize deque used to store frames read from the stream
self.deque = deque(maxlen=deque_size)
self.maintain_aspect_ratio = aspect_ratio
self.camera_stream_link = 0
# Flag to check if camera is valid/working
self.online = False
self.capture = None
self.video_frame = QtWidgets.QLabel()
self.load_network_stream()
# Start background frame grabbing
self.get_frame_thread = Thread(target=self.get_frame, args=())
self.get_frame_thread.daemon = True
self.get_frame_thread.start()
# Periodically set video frame to display
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.set_frame)
self.timer.start(.5)
print('Started camera: {}'.format(self.camera_stream_link))
def load_network_stream(self):
"""Verifies stream link and open new stream if valid"""
def load_network_stream_thread():
if self.verify_network_stream(self.camera_stream_link):
self.capture = cv2.VideoCapture(self.camera_stream_link)
self.online = True
self.load_stream_thread = Thread(target=load_network_stream_thread, args=())
self.load_stream_thread.daemon = True
self.load_stream_thread.start()
def verify_network_stream(self, link):
"""Attempts to receive a frame from given link"""
cap = cv2.VideoCapture(link)
if not cap.isOpened():
return False
cap.release()
return True
def get_frame(self):
# time.sleep(5)
"""Reads frame, resizes, and converts image to pixmap"""
while True:
try:
if self.capture.isOpened() and self.online:
# Read next frame from stream and insert into deque
status, frame = self.capture.read()
if status:
self.deque.append(frame)
else:
self.capture.release()
self.online = False
else:
# Attempt to reconnect
print('attempting to reconnect', self.camera_stream_link)
self.load_network_stream()
self.spin(2)
self.spin(.001)
except AttributeError:
pass
def spin(self, seconds):
"""Pause for set amount of seconds, replaces time.sleep so program doesnt stall"""
time_end = time.time() + seconds
while time.time() < time_end:
QtWidgets.QApplication.processEvents()
def set_frame(self):
"""Sets pixmap image to video frame"""
if not self.online:
self.spin(1)
return
if self.deque and self.online:
# Grab latest frame
frame = self.deque[-1]
# Keep frame aspect ratio
if self.maintain_aspect_ratio:
self.frame = imutils.resize(frame, width=self.screen_width)
# Force resize
else:
self.frame = cv2.resize(frame, (self.screen_width, self.screen_height))
self.frame = cv2.cvtColor(self.frame, cv2.COLOR_BGR2RGB)
h, w, ch = self.frame.shape
bytesPerLine = ch * w
# Convert to pixmap and set to video frame
self.img = QtGui.QImage(self.frame, w, h, bytesPerLine, QtGui.QImage.Format_RGB888)
self.pix = QtGui.QPixmap.fromImage(self.img)
self.video_frame.setPixmap(self.pix)
def set_frame_params(self, width, height):
self.screen_width = width
self.screen_height = height
def get_video_frame(self):
self.video_frame.setScaledContents(True)
return self.video_frame
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
# Middle frame
self.mid_frame = QtWidgets.QFrame()
self.mid_frame.setStyleSheet("background-color: rgb(153, 187, 255)")
self.camera = CameraWidget()
# Create camera widgets
print('Creating Camera Widgets...')
self.video_frame = self.camera.get_video_frame()
self.mid_layout = QtWidgets.QHBoxLayout()
self.mid_layout.addWidget(self.video_frame)
self.mid_frame.setLayout(self.mid_layout)
self.widget = QtWidgets.QWidget()
self.layout = QtWidgets.QVBoxLayout()
self.layout.addWidget(self.mid_frame)
self.layout.setContentsMargins(0,0,0,0)
self.layout.setSpacing(0)
self.widget.setLayout(self.layout)
self.setCentralWidget(self.widget)
def event(self, e):
if e.type() in (QtCore.QEvent.Show, QtCore.QEvent.Resize):
print("resize ", self.mid_frame.width(), self.mid_frame.height())
self.camera.set_frame_params(self.mid_frame.width()-10, self.mid_frame.height()-10)
return QtWidgets.QMainWindow.event(self, e)
if __name__ == '__main__':
# Create main application window
app = QtWidgets.QApplication([])
app.setStyle(QtWidgets.QStyleFactory.create("Cleanlooks"))
w = MainWindow()
w.showMaximized()
sys.exit(app.exec_())
I'm trying to display an rtsp stream via kivy video player, this runs fine but in my video I get a 2 or 3 second delay in the stream which I would ideally like to eliminate to 0.5 to 1 seconds.
Here's what I have:
from kivy.app import App
from kivy.uix.video import Video
class TestApp(App):
def build(self):
video = Video(source='rtsp://my-stream-address', state='play')
video.size = (720, 320)
video.opacity = 0
video.state = 'play'
video.bind(texture=self._play_started)
return video
def _play_started(self, instance, value):
instance.opacity = 1
if __name__ == '__main__':
TestApp().run()
EDIT
I have a working solution to the video streaming BUT I don't know how to get this into my kivy gui.
Here's my streaming solution:
from threading import Thread
import cv2, time
class ThreadedCamera(object):
def __init__(self, src=0):
self.capture = cv2.VideoCapture(src)
self.capture.set(cv2.CAP_PROP_BUFFERSIZE, 2)
self.FPS = 1/30
self.FPS_MS = int(self.FPS * 1000)
# Start frame retrieval thread
self.thread = Thread(target=self.update, args=())
self.thread.daemon = True
self.thread.start()
def update(self):
while True:
if self.capture.isOpened():
(self.status, self.frame) = self.capture.read()
time.sleep(self.FPS)
def show_frame(self):
cv2.imshow('frame', self.frame)
cv2.waitKey(self.FPS_MS)
if __name__ == '__main__':
src = 'rtsp://my-stream-address'
threaded_camera = ThreadedCamera(src)
while True:
try:
threaded_camera.show_frame()
except AttributeError:
pass
EDIT 2
I have also found this implementation of a kivy video widget not using the built in Video widget. I'm still unsure how to combine my working solution with a Kivy widget but perhaps this can help someone help me:
class KivyCamera(Image):
source = ObjectProperty()
fps = NumericProperty(30)
def __init__(self, **kwargs):
super(KivyCamera, self).__init__(**kwargs)
self._capture = None
if self.source is not None:
self._capture = cv2.VideoCapture(self.source)
Clock.schedule_interval(self.update, 1.0 / self.fps)
def on_source(self, *args):
if self._capture is not None:
self._capture.release()
self._capture = cv2.VideoCapture(self.source)
#property
def capture(self):
return self._capture
def update(self, dt):
ret, frame = self.capture.read()
if ret:
buf1 = cv2.flip(frame, 0)
buf = buf1.tostring()
image_texture = Texture.create(
size=(frame.shape[1], frame.shape[0]), colorfmt="bgr"
)
image_texture.blit_buffer(buf, colorfmt="bgr", bufferfmt="ubyte")
self.texture = image_texture
My initial question was for Kivy Video player widget. But now the solution I am finding is using threading with OpenCV, so I have changed the tags on this question and will accept any Kivy implementation of this solution.
I have the following class for streaming video through rtsp using OpenCV 3.1 with Python 3.4.3. Everything works fine, but if the camera is suddenly disconnected while running (ie. unplug camera), the program would hang at self.capture.read() and never returns the False value (or any value for that matter) to handle closing the connection. That's my understanding of handling sudden disconnections for VideoCapture in OpenCV. Is there a better way?
"""Classes for video processing"""
import cv2
from PyQt5 import QtGui, QtCore
from settings import CAMERA_IP_ADDRESS, CAMERA_PORT, FRAME_RATE
from structures import MessageLevel
class VideoStream(QtCore.QObject):
"""Class for displaying and recording video data."""
logEvent = QtCore.pyqtSignal(MessageLevel, str)
video_frame = QtCore.pyqtSignal(QtGui.QPixmap)
video_timer = QtCore.QTimer()
def __init__(self, parent=None):
super(VideoStream, self).__init__(parent)
self.connected = False
self.stream = False
self.video_timer.timeout.connect(self.stream_loop)
self.video_width = 1280
self.video_height = 720
self.frame_rate_milliseconds = int(round(1/FRAME_RATE * 1000))
self.save_path = None
self.address_ip = None
self.capture = None
self.video_file = None
self.url = 'rtsp://ip_path_to_camera'
def setup(self):
"""Function to set defautls and update GUI, should only be called once during GUI setup"""
# Initialize variables
self.set_address_ip(CAMERA_IP_ADDRESS)
def connect(self):
"""Connect to rtsp video stream"""
if not self.connected:
self.video_file = cv2.VideoWriter(self.save_path, cv2.VideoWriter_fourcc('X', 'V', 'I', 'D'),
FRAME_RATE, (self.video_width, self.video_height))
self.capture = cv2.VideoCapture(self.url)
if self.capture.isOpened():
self.connected = True
self.stream = True
self.video_timer.start(self.frame_rate_milliseconds)
else:
print('Device failed to connect')
def stop(self):
self.pause()
self.connected = False
self.capture.release()
self.video_file = None
def pause(self):
self.video_timer.stop()
self.stream = False
def stream_loop(self):
if self.stream:
ret, frame = self.capture.read()
if ret:
self.video_file.write(frame)
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
image = QtGui.QImage(frame, self.video_width, self.video_height, QtGui.QImage.Format_RGB888)
self.video_frame.emit(QtGui.QPixmap.fromImage(image))
else:
print('Device diconnected')
self.stop()
def set_address_ip(self, ip_address):
self.address_ip = ip_address
self.address_ip_signal.emit(self.address_ip)
def log_message(self, level, message):
self.logEvent.emit(level, message)