I'm trying to read video into a buffer using threads in OpenCV and I get "Assertion fctx->async_lock failed at libavcodec/pthread_frame.c:167" the reason I want to use threads to do this is so I can read a lot of frames into a list at a time so it's fast. I need all the frames and can not skip any frames. I thought multi-threading would be the way to go. my code works single-threaded aka with "buffer_refill = 1".
import threading
import cv2
cap = cv2.VideoCapture('data/temp/testing/test.mp4')
frames = []
total_frames = cap.get(cv2.CAP_PROP_FRAME_COUNT)
frames_left = total_frames
threads = []
print(total_frames)
min_buffer = 120
buffer_refill = 60
def buffer():
print('buffer')
frame = cap.grab()
ret, frame = cap.retrieve()
frames.append(frame)
def get_buffer(num):
global frames_left
for i in range(num):
frames_left -= 1
if frames_left > 0:
t = threading.Thread(target=buffer)
t.start()
threads.append(t)
for thread in threads:
thread.join()
print('block')
while(cap.isOpened()):
if frames_left > 0 and len(frames) < min_buffer:
get_buffer(buffer_refill)
else:
cv2.imshow('Frame',frames[0])
frames.pop(0)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
Below given is my code. I am trying to scan barcodes and display it using OpenCV. The program works well but there is a huge lag in fps when grabbing frames from drone camera as RTMP stream. Due to the same I am trying to use multi processing method.
import pandas as pd
import cv2
import numpy as np
from pyzbar.pyzbar import decode
from pyzbar.pyzbar import ZBarSymbol
import time
import multiprocessing
global frame
def barcode(frame):
for barcode in decode(frame, symbols=[ZBarSymbol.CODE128]):
myData = barcode.data.decode('utf-8')
pts = np.array([barcode.polygon],np.int32)
pts = pts.reshape((-1,1,2))
cv2.polylines(frame, [pts], True, (255,0,255),5)
pts2 = barcode.rect
akash = []
akash.append(myData)
cv2.putText(frame, myData, (pts2[0], pts2[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (255,99,71), 2)
p1 = multiprocessing.Process(target = barcode)
cv2.namedWindow("Result", cv2.WINDOW_NORMAL)
vid = cv2.VideoCapture(0)
if __name__ == '__main__':
while(True):
ret, frame = vid.read()
if frame is not None:
p1.start()
cv2.imshow('Result',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
vid.release()
vid.destroyAllWindows()
and the error is
AssertionError Traceback (most recent call last)
<ipython-input-1-df50d7c70cda> in <module>
27 ret, frame = vid.read()
28 if frame is not None:
---> 29 p1.start()
30 cv2.imshow('Result',frame)
31 if cv2.waitKey(1) & 0xFF == ord('q'):
C:\ProgramData\Anaconda3\lib\multiprocessing\process.py in start(self)
113 '''
114 self._check_closed()
--> 115 assert self._popen is None, 'cannot start a process twice'
116 assert self._parent_pid == os.getpid(), \
117 'can only start a process object created by current process'
AssertionError: cannot start a process twice
Try not to create processes inside loops. The best way to use processes is to create n processes outside and then, with the help of Queues, access and push data.
In the following code, I created 5 processes which would run infinitely and try to fetch data from inQ queue.
Then do all the processing that you were doing.
After that, I'm pushing it to outQ queue, which we'll use later to show the results.
In the main, I am simply reading the data from the opencv vid object and the pushing to the inQ which our Processes will use to fetch frame.
Next, I'm just fetching the results. This way appears better to me as we don't have to create processes in every iteration as well as we have multiple processes ready to process the data at all times.
You can also set the buffer limit for the queue if you want. Also, with live streams, try to have a skipFrame parameter to skip a few frames. That would boost up the fps.
import cv2
import numpy as np
from pyzbar.pyzbar import decode
from pyzbar.pyzbar import ZBarSymbol
import time
from multiprocessing import Process, Queue
inQ = Queue()
outQ = Queue()
def barcode():
global inQ
global outQ
try:
print("Solving..")
frame = inQ.get()
for barcode in decode(frame, symbols=[ZBarSymbol.CODE128]):
myData = barcode.data.decode('utf-8')
pts = np.array([barcode.polygon],np.int32)
pts = pts.reshape((-1,1,2))
cv2.polylines(frame, [pts], True, (255,0,255),5)
pts2 = barcode.rect
akash = []
akash.append(myData)
cv2.putText(frame, myData, (pts2[0], pts2[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (255,99,71), 2)
outQ.put(frame)
except Exception as e:
print(e)
for _ in range(5): # configure yourself
Process(target = barcode).start()
cv2.namedWindow("Result", cv2.WINDOW_NORMAL)
if __name__ == '__main__':
print("Inside main")
vid = cv2.VideoCapture(0)
while vid.isOpened():
print("While...")
ret, frame = vid.read()
if ret:
try:
inQ.put(frame)
except Exception as e:
print(e)
try:
output = outQ.get()
cv2.imshow("Result", output)
except Exception as e:
print(e)
vid.release()
vid.destroyAllWindows()
I am trying to interface my OpenCV program with my Raspberry Pi PiCamera. Every time I use OpenCV to capture video, it drastically drops the FPS. When I capture video using PiCamera's Library, everything is fine and smooth.
Why is this happening?
Is there a way to fix it?
This is my code:
import time
import RPi.GPIO as GPIO
from PCA9685 import PCA9685
import numpy as np
import cv2
try:
cap = cv2.VideoCapture(0)
cap.set(cv2.CAP_PROP_FPS, 90)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 800)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 700)
while(True):
ret, frame = cap.read()
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything is done, release the capture
except:
pwm.exit_PCA9685()
print ("\nProgram end")
exit()
cap.release()
cv2.destroyAllWindows()
This is the error I'm getting:
First of all, those are warnings not errors.
Reduce the video dimension. Specify the dimension.
cv2.VideoCapture has some problems as it buffers the frames, and the frames are queued so if you're doing some processing and the speed is less than the bandwidth of VideoCapture the video will be slowed down.
So, here is a bufferless VideoCapture.
video_capture_Q_buf.py
import cv2, queue as Queue, threading, time
is_frame = True
# bufferless VideoCapture
class VideoCaptureQ:
def __init__(self, name):
self.cap = cv2.VideoCapture(name)
self.q = Queue.Queue()
t = threading.Thread(target=self._reader)
t.daemon = True
t.start()
# read frames as soon as they are available, keeping only most recent one
def _reader(self):
while True:
ret, frame = self.cap.read()
if not ret:
global is_frame
is_frame = False
break
if not self.q.empty():
try:
self.q.get_nowait() # discard previous (unprocessed) frame
except Queue.Empty:
pass
self.q.put(frame)
def read(self):
return self.q.get()
Using it:
test.py
import video_capture_Q_buf as vid_cap_q # import as alias
from video_capture_Q_buf import VideoCaptureQ # class import
import time
cap = VideoCaptureQ(vid_path)
while True:
t1 = time.time()
if vid_cap_q.is_frame == False:
print('no more frames left')
break
try:
ori_frame = cap.read()
# do your stuff
except Exception as e:
print(e)
break
t2 = time.time()
print(f'FPS: {1/(t2-t1)}')
I am trying to run a function which takes frames from various camera feeds and saves them to disk.
I have three cameras connected to my laptop USB ports and trying to start all the cameras at once and trying to perform some action on those captured images/frames. For simplicity sake, i am just saving them to disk. Here is the below code:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from multiprocessing import Process
import cv2
#from scipy.stats import math
import datetime
count = 0
def saving_frame(frame):
count = 0
date_string = datetime.datetime.now().strftime("%Y-%m-%d-%H:%M")
count = count +1
cv2.imwrite('Frame_'+str(count)+date_string+'.png',frame)
return
def runInParallel(*fns):
proc = []
for fn in fns:
p = Process(target=fn)
p.start()
proc.append(p)
for p in proc:
p.join()
video_capture_1 = cv2.VideoCapture(0)
video_capture_2 = cv2.VideoCapture(1)
video_capture_3 = cv2.VideoCapture(2)
print ('Start Rec')
while True:
ret_1, frame_1 = video_capture_1.read()
ret_2, frame_2 = video_capture_2.read()
ret_3, frame_3 = video_capture_3.read()
runInParallel(saving_frame(frame_1),saving_frame(frame_2),saving_frame(frame_3))
cv2.imshow('frame1', frame_1)
cv2.imshow('frame2', frame_2)
cv2.imshow('frame3', frame_3)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video_capture_1.release()
video_capture_2.release()
video_capture_3.release()
cv2.destroyAllWindows()
However, the above code is not saving any frames. Could you please help me with this?
I use Opencv to display real-time frames, When a remote flag variable(which is saved in firebase in this case) changes from False to True, the program saves current frame. But polling flag value makes the program slow and the frames are not smoothly displayed.So I am wondering if it is possible to make it "event-driven".
Here is what I want to do:
One process captures the frames and display them, like:
cap = cv2.VideoCapture(0)
while(True):
ret, frame = cap.read()
frame = cv2.flip(frame,1)
# Display the resulting frame
cv2.imshow('frame',frame)
Another process gets the variable value, and when it equals True, I send a signal to the main process to save the current frame.
I tried to use multiprocessing and pipe like:
def is_asked(conn):
while(True):
isAsked = firebase.get('/test/isAsked', None)
conn.send(isAsked)
if __name__ == '__main__':
parent_conn, child_conn = Pipe()
p = Process(target=is_asked, args=(child_conn,))
p.start()
cap = cv2.VideoCapture(0)
while(True):
ret, frame = cap.read()
frame = cv2.flip(frame,1)
# Display the frame
cv2.imshow('frame',frame)
recvSignal = parent_conn.recv()
print recvSignal
if cv2.waitKey(1) & recvSignal == True:
## do something like take screenshot
cap.release()
cv2.destroyAllWindows()
p.join()
But then I realized parent_conn.recv() could be blocked by send() in is_asked, it doesn's make much difference with directly polling.
So is there any "event-driven" way to notify the main process only when the variable changes to True?