I'm using the tremendous picamera package on my RPi4. In particular, the server side code
import socket
import time
import picamera
camera = picamera.PiCamera()
camera.resolution = (640, 480)
camera.framerate = 24
server_socket = socket.socket()
server_socket.bind(('0.0.0.0', 8000))
server_socket.listen(0)
# Accept a single connection and make a file-like object out of it
connection = server_socket.accept()[0].makefile('wb')
try:
camera.start_recording(connection, format='h264')
camera.wait_recording(60)
camera.stop_recording()
finally:
connection.close()
server_socket.close()
as offered at PiCamera Basic Recipes. This code works just fine.
I modified the code such that it
records forever instead of only 60 seconds and
the server socket keeps listening forever
like so:
import socket
import time
import picamera
camera = picamera.PiCamera()
camera.resolution = (640, 480)
camera.framerate = 30
server_socket = socket.socket()
server_socket.bind(('0.0.0.0', 8000))
server_socket.listen(0)
while True: # Keep listening forever.
client_socket, client_address = server_socket.accept()
print(f"New connection from {client_address}.")
connection = client_socket.makefile('wb')
try:
camera.start_recording(connection, format='h264')
while True: # To never stop streaming.
camera.wait_recording(1)
camera.stop_recording()
except BrokenPipeError as error:
print(f"Error {error} occured.")
# camera.stop_recording() # If I use this line I get ConnectionResetError, and if not, I get PiCameraAlreadyRecording.
finally:
pass
# connection.close()
# server_socket.close()
I unfortunately now have the problem that I cannot seem to reconnect to the PiCamera object because of the camera.stop_recording(), as described in the comments thereafter. I get a ConnectionResetError or PiCameraAlreadyRecording error.
On the client side I use the code
import numpy as np
import cv2
import sys
import datetime
server = 'http://192.168.178.54:8000/'
video_capture_0 = cv2.VideoCapture(server)
fourcc = cv2.VideoWriter_fourcc(*"mp4v") # Fourcc's that work under Ubuntu 22.04: mp4v, ...
date_time_now = datetime.datetime.now()
yyyymmddHHMMSS = date_time_now.strftime("%Y-%m-%d_%H%M%S")
video_write_0 = cv2.VideoWriter(f"{yyyymmddHHMMSS}.mp4", fourcc, 30.0, (640, 480)) # To save the stream to a file.
while True:
ret0, frame0 = video_capture_0.read()
if ret0:
frame0 = cv2.flip(frame0, 0) # Flip image vertically.
cv2.imshow('Test 0', frame0)
video_write_0.write(frame0)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video_capture_0.release()
video_write_0.release()
cv2.destroyAllWindows()
How do I fix this?
Related
I am making a python client sever application which broadcasts the screen of the server into the clients. which is then saved into a file.
There is a problem while converting the numpy array to string bytes and then decoding in back to a numpy array
Traceback (most recent call last):
File "D:\Development\Sockets\client.py", line 31, in
out.write(msg)
cv2.error: OpenCV(4.6.0) :-1: error: (-5:Bad argument) in function 'write'
Overload resolution failed:
image data type = 19 is not supported
Expected Ptr<cv::UMat> for argument 'image'
SERVER.PY
import numpy as np
import socket
import threading
import cv2
import pyautogui
from config import *
SERVER = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
SERVER.bind((HOST, PORT))
webcam = cv2.VideoCapture(0)
def capture_now():
while True:
# Capture the screen
img = pyautogui.screenshot()
img = np.array(img)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
_, frame = webcam.read()
fr_height, fr_width, _ = frame.shape
img[0:fr_height, 0: fr_width, :] = frame[0:fr_height, 0: fr_width, :]
cv2.imshow('frame', img)
return img
def handle_client(conn, addr):
print(f'[NEW CONNECTION] {addr} connected')
connected = True
while connected:
msg = str(capture_now()).encode(FORMAT)
msg_length = str(len(msg)).encode(FORMAT)
msg_length += b' ' * (HEADER - len(msg_length))
conn.sendall(msg_length)
conn.sendall(msg)
print('CONNECTION CLOSED')
conn.close()
def start():
SERVER.listen()
print(f'[STARTED] server started listening on {HOST}')
while True:
conn, addr = SERVER.accept()
thread = threading.Thread(target=handle_client, args=(conn, addr))
thread.start()
print(f'[ACTIVE CONNECTIONS] {threading.active_count() - 1}')
start()
CLIENT.PY
import cv2
import socket
import numpy as np
from config import *
import pyautogui
SCREEN_SIZE = tuple(pyautogui.size())
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('record.avi', fourcc, 20.0, (SCREEN_SIZE))
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as client:
client.connect((HOST, PORT))
# client.sendall(b'Hello, World!')
while True:
msg_length = int(client.recv(HEADER).decode(FORMAT))
if msg_length:
msg = client.recv(msg_length)
msg = msg.decode(FORMAT)
# print(type(msg))
# print(msg)
# msg = np.vectorize(lambda msg: msg.decode(FORMAT))
msg = np.array(msg)
print(msg)
if msg == BREAK_CONNECTION:
connected = False
print(msg)
out.write(msg)
I have to make a project that shares screen and webcam video of multiple clients at the same time. The screen sharing and webcam sharing work separately but I can't combine them and make them work at the same time. I copied and pasted the code and then made changes to it, so I don't understand all of the code that well.
Server:
import socket, cv2, pickle, struct
import imutils
import threading
import pyshine as ps
import cv2
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
print('HOST IP:', host_ip)
port = 9999
socket_address = (host_ip, port)
server_socket.bind(socket_address)
server_socket.listen()
print("Listening at", socket_address)
def show_client(addr, client_socket):
try:
print('CLIENT {} CONNECTED!'.format(addr))
if client_socket: # if a client socket exists
data = b""
payload_size = struct.calcsize("Q")
while True:
while len(data) < payload_size:
packet = client_socket.recv(4 * 1024) # 4K
if not packet: break
data += packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q", packed_msg_size)[0]
while len(data) < msg_size:
data += client_socket.recv(4 * 1024)
frame_data = data[:msg_size]
data = data[msg_size:]
frame = pickle.loads(frame_data)
text = f"CLIENT: {addr}"
frame = ps.putBText(frame, text, 10, 10, vspace=10, hspace=1, font_scale=0.7,
background_RGB=(255, 0, 0), text_RGB=(255, 250, 250))
cv2.imshow(f"FROM {addr}", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
client_socket.close()
except Exception as e:
print(f"CLINET {addr} DISCONNECTED")
pass
def run_show_client():
while True:
client_socket, addr = server_socket.accept()
thread = threading.Thread(target=show_client, args=(addr, client_socket))
thread.start()
print("TOTAL CLIENTS ", threading.activeCount() - 1)
Screen sharing client:
import socket, cv2, pickle, struct
import imutils
import pyautogui
import numpy as np
def student_screen_show():
camera = True
if camera == True:
vid = cv2.VideoCapture(0)
else:
vid = cv2.VideoCapture('videos/mario.mp4')
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_ip = ' '
port = 9999
client_socket.connect((host_ip, port))
resolution = (1920, 1080)
codec = cv2.VideoWriter_fourcc(*"XVID")
filename = "Recording.avi"
fps = 60.0
out = cv2.VideoWriter(filename, codec, fps, resolution)
if client_socket:
while (vid.isOpened()):
try:
img = pyautogui.screenshot()
frame = np.array(img)
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
out.write(frame)
frame = imutils.resize(frame, width=380)
a = pickle.dumps(frame)
message = struct.pack("Q", len(a)) + a
client_socket.sendall(message)
cv2.imshow(f"TO: {host_ip}", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("q"):
client_socket.close()
except:
print('VIDEO FINISHED!')
break
Webcam sharing client:
import socket, cv2, pickle, struct
import imutils
def student_show():
camera = True
if camera == True:
vid = cv2.VideoCapture(0)
else:
vid = cv2.VideoCapture('videos/mario.mp4')
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_ip = ' '
port = 9999
client_socket.connect((host_ip, port))
if client_socket:
while (vid.isOpened()):
try:
img, frame = vid.read()
frame = imutils.resize(frame, width=380)
a = pickle.dumps(frame)
message = struct.pack("Q", len(a)) + a
client_socket.sendall(message)
cv2.imshow(f"TO: {host_ip}", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("q"):
client_socket.close()
except:
print('VIDEO FINISHED!')
break
So what I have to do is merge the code so that instead of only running screen sharing or webcam sharing, it shares both at the same time from the same client. While I didn't write the Ip address in the code shown here I am writing my IP address in my code, I just felt weird sharing my Ip address.
I have the following two files:
SERVER.PY
import socket
import cv2
import pickle
import struct
HOST = '192.168.0.17'
PORT = 8083
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST, PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
conn, addr = s.accept()
data = b''
payload_size = struct.calcsize("L")
while True:
while len(data) < payload_size:
data += conn.recv(4096)
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("L", packed_msg_size)[0]
while len(data) < msg_size:
data += conn.recv(4096)
frame_data = data[:msg_size]
data = data[msg_size:]
frame=pickle.loads(frame_data)
print(frame.size)
cv2.imshow('frame', frame)
cv2.waitKey(10)
CLIENT.PY
import cv2
import socket
import pickle
import struct
cap = cv2.VideoCapture(0)
clientsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
clientsocket.connect(('192.168.0.17', 8083))
while True:
ret,frame = cap.read()
data = pickle.dumps(frame)
clientsocket.sendall(struct.pack("L", len(data)) + data)
When I run this on the same computer on localhost, it works perfectly fine. However, when I run the server on one computer and the client on another one (same local network), I get the following error:
File "C:/Users//Desktop/server.py", line 35, in
frame=pickle.loads(frame_data)
_pickle.UnpicklingError: invalid load key, '\x00'.
Maybe I should mention that I am running the server.py file on a Windows machine and the client.py on a Linux machine. The connection works but as soon as I connect, I get the error message above. Any ideas?
I am getting the camera feed from a camera on a Raspberry PI to the computer using a TCP socket to use it for openCV. It works well with minimal delay. However, sometimes the frame gets stuck(freeze) and after a while the feed gets back or the openCV window crashes. I have looked everywhere and tried multiple things, but I just don't know what is causing it.
Server (PC):
import socket
import struct
import numpy as np
import cv2
host = "192.168.0.12"
portCar = 8010
# Camera socket
camS = socket.socket()
camS.bind((host, portCar))
camS.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Listen for camera
camS.listen(0)
print("Waiting for camera connection...")
camCon = camS.accept()[0]
camFile = camCon.makefile("rb")
print("Connection made with camera")
camS.settimeout(0.00001)
numOfBytes = struct.calcsize("<L")
try:
while(True):
camS.setblocking(False)
imageLength = struct.unpack("<L", camFile.read(numOfBytes))[0]
if imageLength == 0:
break
nparr = np.frombuffer(camFile.read(imageLength), np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('RC Car Video stream', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
finally:
camFile.close()
camS.close()
cv2.destroyAllWindows()
print("Server - Camera connection closed")
Client (PI):
import io
import socket
import struct
import time
import picamera
client_socket = socket.socket()
client_socket.connect(('192.168.0.12', 8010))
client_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
connection = client_socket.makefile('wb')
class SplitFrames(object):
def __init__(self, connection):
self.connection = connection
self.stream = io.BytesIO()
def write(self, buf):
if buf.startswith(b'\xff\xd8'):
size = self.stream.tell()
if size > 0:
self.connection.write(struct.pack('<L', size))
self.connection.flush()
self.stream.seek(0)
self.connection.write(self.stream.read(size))
self.stream.seek(0)
self.stream.write(buf)
try:
output = SplitFrames(connection)
with picamera.PiCamera(resolution='VGA', framerate=30) as camera:
time.sleep(2)
camera.rotation = 180
camera.start_recording(output, format='mjpeg')
camera.wait_recording(2000)
camera.stop_recording()
# Write the terminating 0-length to the connection to let the
# server know we're done
connection.write(struct.pack('<L', 0))
finally:
connection.close()
client_socket.close()
print("Client - Connection closed")
Any help will be greatly appreciated.
What I try to do: Recording a video and then sending it from one pc to another one, with Socket Stream. For that I use the OpenCV
library CV2, NumPy and Socket.
The problem: When I send the the frames, it only sends a part of the whole (43776 of 921600), which leaves an error afterwards, when I
try to display to frame on the other pc.
The Server:
#!/usr/bin/python
import socket
import cv2
import numpy as np
ip = "XXX.XXX.X.XXX"
def Test(Given_List):
y = 0
temp = []
Frame_List = []
for kappa in range(480):
Frame_List.append([])
for each in Given_List:
if len(temp) < 3:
temp.append(each)
if len(temp) >= 3:
Frame_List[y].append(temp)
temp = []
if len(Frame_List[y]) >= 640:
y += 1
return Frame_List
while True:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((ip,50000))
data = client_socket.recv(10240000)
s = np.fromstring(data, dtype=np.uint8)
nice = np.asarray(Test(s))
cv2.imshow('frame', nice)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
The Client:
#!/usr/bin/python
import socket
import cv2
#Receiver ip
ip = "XXX.XXX.X.XXX"
port = 50000
#Set up socket and stuff
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(("XXX.XXX.X.XXX", port))
server.listen(1)
#Define the cam stuff
cap = cv2.VideoCapture(0)
cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640)
cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480)
while(cap.isOpened()):
s, add = server.accept()
ret, frame = cap.read()
n = frame.tobytes()
if ret:
s.sendall(n)
else:
break
# Release everything if job is finished
cap.release()
out.release()
s.close()
It seems like the amount of data exceeds the maximum amount of the socket, so I changed the video resolution to 320x240 and now it works fine.