Constant camera grabbing with OpenCV & Python multiprocessing - python

I'm after constantly reading images from an OpenCV camera in Python and reading from the main program the latest image. This is needed because of problematic HW.
After messing around with threads and getting a very low efficiency (duh!), I'd like to switch to multiprocessing.
Here's the threading version:
class WebcamStream:
# initialization method
def __init__(self, stream_id=0):
self.stream_id = stream_id # default is 0 for main camera
# opening video capture stream
self.camera = cv2.VideoCapture(self.stream_id)
self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, 3840)
self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, 2880)
if self.camera.isOpened() is False:
print("[Exiting]: Error accessing webcam stream.")
exit(0)
# reading a single frame from camera stream for initializing
_, self.frame = self.camera.read()
# self.stopped is initialized to False
self.stopped = True
# thread instantiation
self.t = Thread(target=self.update, args=())
self.t.daemon = True # daemon threads run in background
# method to start thread
def start(self):
self.stopped = False
self.t.start()
# method passed to thread to read next available frame
def update(self):
while True:
if self.stopped is True:
break
_, self.frame = self.camera.read()
self.camera.release()
# method to return latest read frame
def read(self):
return self.frame
# method to stop reading frames
def stop(self):
self.stopped = True
And -
if __name__ == "__main__":
main_camera_stream = WebcamStream(stream_id=0)
main_camera_stream.start()
frame = main_camera_stream.read()
Can someone please help me translate this to multiprocess land ?
Thanks!

I've written several solutions to similar problems, but it's been a little while so here we go:
I would use shared_memory as a buffer to read frames into, which can then be read by another process. My first inclination is to initialize the camera and read frames in the child process, because that seems like it would be a "set it and forget it" kind of thing.
import numpy as np
import cv2
from multiprocessing import Process, Queue
from multiprocessing.shared_memory import SharedMemory
def produce_frames(q):
#get the first frame to calculate size of buffer
cap = cv2.VideoCapture(0)
success, frame = cap.read()
shm = SharedMemory(create=True, size=frame.nbytes)
framebuffer = np.ndarray(frame.shape, frame.dtype, buffer=shm.buf) #could also maybe use array.array instead of numpy, but I'm familiar with numpy
framebuffer[:] = frame #in case you need to send the first frame to the main process
q.put(shm) #send the buffer back to main
q.put(frame.shape) #send the array details
q.put(frame.dtype)
try:
while True:
cap.read(framebuffer)
except KeyboardInterrupt:
pass
finally:
shm.close() #call this in all processes where the shm exists
shm.unlink() #call from only one process
def consume_frames(q):
shm = q.get() #get the shared buffer
shape = q.get()
dtype = q.get()
framebuffer = np.ndarray(shape, dtype, buffer=shm.buf) #reconstruct the array
try:
while True:
cv2.imshow("window title", framebuffer)
cv2.waitKey(100)
except KeyboardInterrupt:
pass
finally:
shm.close()
if __name__ == "__main__":
q = Queue()
producer = Process(target=produce_frames, args=(q,))
producer.start()
consume_frames(q)

Related

set a custom timeout for opencv 'VideoCapture.read' function

I am working on a script that reads a rtps stream from a camera. The problem is that sometimes the connection is not perfect and the frames take a while to arrive.
From what I have found the read function from cv2.VideoCapture does not have a timeout that we can modify without recompiling, and the default (30 seconds) is way too much for what I need.
I tried two approaches, one using threading and the other using multiprocessing.
The former didn't work as expected since I cannot kill the thread fast enough and the script dies. the latter means that I am creating and destroying processes at a rate of 1/fps when everything is working, which I don't think is a good idea.
The following is a minimum working example. When proc = True, it uses multiprocessing, and when proc = False, it uses threading. the delay of the read function can be mimicked using TIMESLEEP > 0
import cv2
import time
import queue
import psutil
import threading
import multiprocessing as mp
TIMESLEEP = 0
class FrameThread(threading.Thread):
def __init__(self, func, res):
super().__init__()
self.daemon = True
self.res = res
self.func = func
def run(self):
time.sleep(TIMESLEEP)
self.res.put(self.func)
def putframe(func, res):
time.sleep(TIMESLEEP)
res.put(func)
class Test(object):
def __init__(self, url, proc = True):
self.url = url
self.black = [1, 2, 3]
self.fps = 10
self.proc = proc
self._rq = mp.Queue() if self.proc else queue.Queue()
def _timeout_func(self, func, timeout = 10):
if self.proc:
_proc = mp.Process(target = putframe, args = (func, self._rq))
_proc.start()
else:
FrameThread(func, self._rq).start()
try:
t1 = time.time()
ret, frame = self._rq.get(block = True, timeout = timeout)
diff_fps = 1 / self.fps - (time.time() - t1)
time.sleep(diff_fps if diff_fps > 0 else 0)
if self.proc:
_proc.terminate()
frame = frame if ret else self.black.copy()
except queue.Empty:
diff_fps = 1 / self.fps - timeout
time.sleep(diff_fps if diff_fps > 0 else 0)
if self.proc:
_proc.terminate()
ret, frame = True, self.black.copy()
return ret, frame
def run(self):
cap = cv2.VideoCapture(self.url)
while True:
ret, frame = self._timeout_func(cap.read(), timeout = 0.1)
if not ret:
break
print(self.proc if self.proc else len(psutil.Process().threads()), end='\r')
proc = False
test = Test('./video.mp4', proc = proc)
test.run()
Do you guys have any other idea or approach to do this? or any improvement on the above code?
Thanks!
Not tried this sort of script but I saw a similar kind of question and I would suggest you to use the e VLC python bindings (you can install it with pip install python-vlc) and play the stream:
import vlc
player=vlc.MediaPlayer('rtsp://:8554/output.h264')
player.play()
Then take a snapshot every second or so:
while 1:
time.sleep(1)
player.video_take_snapshot(0, '.snapshot.tmp.png', 0, 0)
And then you can use SimpleCV or something for processing (just load the image file '.snapshot.tmp.png' into your processing library).

Writing to a Multiprocessing Array as image buffer

I am looking to use a shared memory array as a buffer for images in numpy format.
So far the videos have been black screens as im unsure of how to update the shared memory array in the child process.
The queue is loaded with the index of the buffers and the child process "vid_writer" uses that to write out the frame to the video.
So far the assignment in the add_frame_obj method is not actually updating the shared memory.
Do I need to tell the the child process to "update" the buffer var or am I missing something?
class VidWriterArray:
def __init__(self, directory, vid_name='outvid.avi', fps=30, framesize=(1280, 720)):
# Control---
print(f'Creating {__class__.__name__} object...', end='')
self.stop_req = Event()
self.stop_ack = Event()
self.started = False
# Data Objects---
self.filename = directory + '/' + vid_name
self.vid_fps = fps
self.vid_framesize = framesize
arrayshape = (framesize[1], framesize[0], 3)
self.queue = Manager().Queue()
self.mp_arrays = ((Array('I', int(np.prod(arrayshape)), lock=Lock())) for _ in range(5)) # create 5 buffers
self.buffer = [(m, np.frombuffer(m.get_obj(), dtype='I').reshape(arrayshape), Event()) for m in self.mp_arrays]
self.process = Process(name='VidWriter', target=self.vid_writer,
args=(self.queue, self.buffer, self.filename, self.vid_framesize, self.vid_fps,
self.stop_req, self.stop_ack,))
self.process.daemon = True # Set process to daemon to force process closed if calling thread is terminated.
print(f'\rCreating {__class__.__name__} object...Done')
def add_frame_obj(self, img):
for buff_index in range(len(self.buffer)):
m_arr, buff, buff_rdy = self.buffer[buff_index]
if m_arr.acquire(block=False) and not buff_rdy.is_set(): # aquired lock
buff = img
buff_rdy.set()
m_arr.release()
self.queue.put(buff_index)
print(f'Placed image in {buff_index}')
return
def start(self):
"""
Call to start parallel process object
:return: nothing
"""
self.stop_req.clear() # Reset Flag
self.stop_ack.clear() # Reset Flag
print(f'Starting {__class__.__name__} on parallel process...')
self.process.start()
print(f'{__class__.__name__} process started...PID: {self.process.pid}')
if self.process.is_alive():
self.started = True # set a fast property to check
def stop(self):
try:
self.stop_req.set()
print(f'Stop request sent to {__class__.__name__}...', end='')
if self.started:
self.stop_ack.wait(5) # Wait up to 5 second for reply
self.stop_req.clear()
wait_false(self.stop_req, 5)
self.process.join(5) # Join the thread
except Exception as e:
print(e)
if self.process.is_alive():
print(f'\rStop request sent to {__class__.__name__}...Failed! Process has failed to terminate!')
else:
print(f'\rStop request sent to {__class__.__name__}...Done! Exited successfully with code: '
f'{self.process.exitcode}')
#staticmethod
def vid_writer(q, buffer, filename, framesize, fps, stop_req, stop_ack):
import queue
# create VideoWriter with opencv
output = cv2.VideoWriter(filename, cv2.VideoWriter_fourcc(*'DIVX'), fps, framesize)
while not stop_req.is_set():
# grab videoFrame Object from the buffer, timeout 2s to ensure no deadlock
try:
img_rdy_index = q.get(timeout=1)
m, img, buff_rdy = buffer[img_rdy_index]
if m.acquire(timeout=1):
buff_rdy.clear()
vid_shape = (framesize[0], framesize[1])
output.write(img) # Append image into the video
m.release()
except queue.Empty:
# catch empty queue exception and do nothing to allow thread to continue
pass
stop_ack.set() # Set ack flag
output.release() # Finish video writing
wait_false(stop_req, 5) # Handshake stop_req flag with timeout
stop_ack.clear()

How to have a thread/process always active and request the updated value of one of its variables every certain time interval in another thread/process

import numpy as np
import cv2
import multiprocessing
import time
import random
finish_state = multiprocessing.Event()
#function that requests frames
def actions_func(frame):
while True:
time.sleep(random.randint(1,5))
cv2.imshow('requested_frame_1',frame)
time.sleep(random.randint(1,5))
cv2.imshow('requested_frame_2',frame)
if cv2.waitKey(1) & 0xFF == ord('q'): break
#function that keeps the camera always on and should return the frame value with the last image only when requested
def capture_cam():
cap = cv2.VideoCapture(1)
if (cap.isOpened() == False):
print("Unable to read camera feed")
# Default resolutions of the frame are obtained. The default resolutions are system dependent.
# We convert the resolutions from float to integer.
frame_width = int(cap.get(3))
frame_height = int(cap.get(4))
while(True):
ret, frame = cap.read()
if ret == True:
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'): break
else:
break
def main_process(finish_state):
thr1, frame = multiprocessing.Process(target=capture_cam)
thr1.start()
thr2 = multiprocessing.Process(target=actions_func, args=(frame,))
thr2.start()
if __name__ == '__main__':
main_process(finish_state)
print("continue the code with other things after all threads/processes except the main one were closed with the loop that started them... ")
I want a webcam to be open all the time capturing an image, for this I have created thread1 where it is supposed to run all the time regardless of the program.
What you need is to fix this program that is supposed to ask for frames from the function that always runs on thread1.
The problem is that I don't know when it might be time to ask thread1 for the last frame it showed, and to represent that I put the random.randint(1,5), although in reality I won't have knowledge of the maximum or minimum time in which the last frame will be requested from thread1
The truth is that I'm getting complicated with this program, and I really don't know if it's convenient to create a thread2 to do the frame requests or if it's better to just have thread1 and have the frame requests inside the main thread
Although they say thread they are actually parallel processes, try with threads but I think it is more convenient to use processes, right?
Traceback (most recent call last):
File "request_frames_thread.py", line 58, in <module>
main_process(finish_state)
File "request_frames_thread.py", line 50, in main_process
thr1, frame = multiprocessing.Process(target=capture_cam)
TypeError: cannot unpack non-iterable Process object
I would have the main process create a full duplex multiprocessing.Pipe instance which returns two multiprocessing.connection.Connection instances and pass one connection to each of your processes. These connections can be used for a simple two way communication vehicle for sending and receiving objects to one another. I would have the capture_cam process start a dameon thread (it will terminate when all your regular threads terminate and so it can be in an infinite loop) that is passed on of these connections to handle requests for the latest frame, which is stored in a global variable.
The only requirement is that a frame be serializable by the pickle module.
import multiprocessing
from threading import Thread
import time
import random
#function that requests frames
def actions_func(conn):
try:
while True:
time.sleep(random.randint(1,5))
# Ask for latest frame by sending any message:
conn.send('frame')
frame = conn.recv() # This is the response
cv2.imshow('requested_frame_1',frame)
time.sleep(random.randint(1,5))
# Ask for latest frame by sending any message:
conn.send('frame')
frame = conn.recv() # This is the response
cv2.imshow('requested_frame_2',frame)
if cv2.waitKey(1) & 0xFF == ord('q'): break
except BrokenPipeError:
# The capture_cam process has terminated.
pass
def handle_frame_requests(conn):
try:
while True:
# Any message coming in is a request for the latest frame:
request = conn.recv()
conn.send(frame) # The frame must be pickle-able
except EOFError:
# The actions_func process has ended
# and its connection has been closed.
pass
#function that keeps the camera always on and should return the frame value with the last image only when requested
def capture_cam(conn):
global frame
frame = None
# start dameon thread to handle frame requests:
Thread(target=handle_frame_requests, args=(conn,), daemon=True).start()
cap = cv2.VideoCapture(1)
if (cap.isOpened() == False):
print("Unable to read camera feed")
# Default resolutions of the frame are obtained. The default resolutions are system dependent.
# We convert the resolutions from float to integer.
frame_width = int(cap.get(3))
frame_height = int(cap.get(4))
while(True):
ret, frame = cap.read()
if ret == True:
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'): break
else:
break
def main_process(finish_state):
conn1, conn2 = multiprocessing.Pipe(duplex=True)
p1 = multiprocessing.Process(target=capture_cam, args=(conn1,))
p1.start()
p2 = multiprocessing.Process(target=actions_func, args=(conn2,))
p2.start()
if __name__ == '__main__':
finish_state = multiprocessing.Event()
main_process(finish_state)

OpenCV Python IP Camera live image

at the moment I am reading an ip cameras live image by using the following code:
def livestream(self):
print("start")
stream = urlopen('http://192.168.4.1:81/stream')
bytes = b''
while True:
try:
bytes += stream.read(1024)
a = bytes.find(b'\xff\xd8')
b = bytes.find(b'\xff\xd9')
if a != -1 and b != -1:
jpg = bytes[a:b+2]
bytes = bytes[b+2:]
getliveimage = cv2.imdecode(np.frombuffer(jpg, dtype=np.uint8), cv2.IMREAD_COLOR)
livestreamrotated1 = cv2.rotate(getliveimage, cv2.ROTATE_90_CLOCKWISE) #here I am rotating the image
print(type(livestreamrotated1)) #type at this point is <class 'numpy.ndarray'>
cv2.imshow('video',livestreamrotated1)
if cv2.waitKey(1) ==27: # if user hit esc
exit(0) # exit program
except Exception as e:
print(e)
print("failed at this point")
Now I want to integrate the result-image into Kivy-GUI and want to get rid of the while-loop since it freezes my GUI. Unfortunately the loop is necessary to recreate the image byte-by-byte. I would like to use cv2.VideoCapture instead and schedule this multiple times per second. This is not working at all, I am not able to capture the image from the live stream this way...where am I wrong?
cap = cv2.VideoCapture('http://192.168.4.1:81/stream?dummy.jpg')
ret, frame = cap.read()
cv2.imshow('stream',frame)
I read in some other post that a file-ending like "dummy.jpg" would be necessary at this point, but it is still not working, the program freezes.
Please help. Thank you in advance!
If you want to decouple your reading loop from your GUI loop you can use multithreading to separate the code. You can have a thread running your livestream function and dumping the image out to a global image variable where your GUI loop can pick it up and do whatever to it.
I can't really test out the livestream part of the code, but something like this should work. The read function is an example of how to write a generic looping function that will work with this code.
import cv2
import time
import threading
import numpy as np
# generic threading class
class Reader(threading.Thread):
def __init__(self, func, *args):
threading.Thread.__init__(self, target = func, args = args);
self.start();
# globals for managing shared data
g_stop_threads = False;
g_lock = threading.Lock();
g_frame = None;
# reads frames from vidcap and stores them in g_frame
def read():
# grab globals
global g_stop_threads;
global g_lock;
global g_frame;
# open vidcap
cap = cv2.VideoCapture(0);
# loop
while not g_stop_threads:
# get a frame from camera
ret, frame = cap.read();
# replace the global frame
if ret:
with g_lock:
# copy so that we can quickly drop the lock
g_frame = np.copy(frame);
# sleep so that someone else can use the lock
time.sleep(0.03); # in seconds
# your livestream func
def livestream():
# grab globals
global g_stop_threads;
global g_lock;
global g_frame;
# open stream
stream = urlopen('http://192.168.4.1:81/stream')
bytes = b''
# process stream into opencv image
while not g_stop_threads:
try:
bytes += stream.read(1024)
a = bytes.find(b'\xff\xd8')
b = bytes.find(b'\xff\xd9')
if a != -1 and b != -1:
jpg = bytes[a:b+2]
bytes = bytes[b+2:]
getliveimage = cv2.imdecode(np.frombuffer(jpg, dtype=np.uint8), cv2.IMREAD_COLOR)
livestreamrotated1 = cv2.rotate(getliveimage, cv2.ROTATE_90_CLOCKWISE) #here I am rotating the image
# acquire lock and replace image
with g_lock:
g_frame = livestreamrotated1;
# sleep to allow other threads to get the lock
time.sleep(0.03); # in seconds
except Exception as e:
print(e)
print("failed at this point")
def main():
# grab globals
global g_stop_threads;
global g_lock;
global g_frame;
# start a thread
# reader = Reader(read);
reader = Reader(livestream);
# show frames from g_frame
my_frame = None;
while True:
# grab lock
with g_lock:
# show
if not g_frame is None:
# copy # we copy here to dump the lock as fast as possible
my_frame = np.copy(g_frame);
# now we can do all the slow manipulation / gui stuff here without the lock
if my_frame is not None:
cv2.imshow("Frame", my_frame);
# break out if 'q' is pressed
if cv2.waitKey(1) == ord('q'):
break;
# stop the threads
g_stop_threads = True;
if __name__ == "__main__":
main();

How to use multi-threading and multiprocessing to read a frames, encode and sent in python?

I'm using OpenCV to process 2 videos. I have to read frames from these videos, JSON encodes it and sent it using python Kafka producer to two different Kafka topics at the same time. So far I have used the threading module to read frames. Since there is encoding is happening I don't know how to use multiprocessing there. How to use multithreading and multiprocessing to optimize this?
from threading import Thread
import cv2, time
from multiprocessing import Process
class VideoStreamWidget(object):
def __init__(self, src=0):
self.capture = cv2.VideoCapture(src)
self.producer = KafkaProducer(bootstrap_servers='ip address',
value_serializer=lambda m: dumps(m).encode('utf-8'))
# Start the thread to read frames from the video stream
self.thread = Thread(target=self.update, args=())
self.thread.daemon = True
self.thread.start()
def update(self):
# Read the next frame from the stream in a different thread
while True:
if self.capture.isOpened():
(self.status, self.frame) = self.capture.read()
time.sleep(.01)
def show_frame(self,cam):
# Display frames in main program
cv2.imshow(cam, self.frame)
key = cv2.waitKey(1)
if key == ord('q'):
self.capture.release()
cv2.destroyAllWindows()
exit(1)
def image_producer(self,cam,topic):
#compress_rate=90
# encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), compress_rate]
ret, buffer = cv2.imencode('.jpg', self.frame)
jstr = {"image": base64.b64encode(buffer).decode('ascii')}
self.producer.send(topic, value=jstr)
print("Frame sent")
print(f"{cam}")
if __name__ == '__main__':
video_stream_widget1 = VideoStreamWidget('rtsp://link')
video_stream_widget2 = VideoStreamWidget('rtsp://link')
while True:
try:
topic1 = "test"
p1 = Process(target= video_stream_widget1.image_producer,args=('cam1',topic1))
p1.start()
topic2 ="quickstart-events"
p2 = Process(target= video_stream_widget2.image_producer,args= ('cam2',topic2))
p2.start()
p1.join()
p2.join()
except AttributeError:
pass

Categories

Resources