Socket Stream - Python (Cutting part of the message) - python

What I try to do: Recording a video and then sending it from one pc to another one, with Socket Stream. For that I use the OpenCV
library CV2, NumPy and Socket.
The problem: When I send the the frames, it only sends a part of the whole (43776 of 921600), which leaves an error afterwards, when I
try to display to frame on the other pc.
The Server:
#!/usr/bin/python
import socket
import cv2
import numpy as np
ip = "XXX.XXX.X.XXX"
def Test(Given_List):
y = 0
temp = []
Frame_List = []
for kappa in range(480):
Frame_List.append([])
for each in Given_List:
if len(temp) < 3:
temp.append(each)
if len(temp) >= 3:
Frame_List[y].append(temp)
temp = []
if len(Frame_List[y]) >= 640:
y += 1
return Frame_List
while True:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((ip,50000))
data = client_socket.recv(10240000)
s = np.fromstring(data, dtype=np.uint8)
nice = np.asarray(Test(s))
cv2.imshow('frame', nice)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
The Client:
#!/usr/bin/python
import socket
import cv2
#Receiver ip
ip = "XXX.XXX.X.XXX"
port = 50000
#Set up socket and stuff
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(("XXX.XXX.X.XXX", port))
server.listen(1)
#Define the cam stuff
cap = cv2.VideoCapture(0)
cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640)
cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480)
while(cap.isOpened()):
s, add = server.accept()
ret, frame = cap.read()
n = frame.tobytes()
if ret:
s.sendall(n)
else:
break
# Release everything if job is finished
cap.release()
out.release()
s.close()

It seems like the amount of data exceeds the maximum amount of the socket, so I changed the video resolution to 320x240 and now it works fine.

Related

Keep PiCamera object always active after client connection breaks

I'm using the tremendous picamera package on my RPi4. In particular, the server side code
import socket
import time
import picamera
camera = picamera.PiCamera()
camera.resolution = (640, 480)
camera.framerate = 24
server_socket = socket.socket()
server_socket.bind(('0.0.0.0', 8000))
server_socket.listen(0)
# Accept a single connection and make a file-like object out of it
connection = server_socket.accept()[0].makefile('wb')
try:
camera.start_recording(connection, format='h264')
camera.wait_recording(60)
camera.stop_recording()
finally:
connection.close()
server_socket.close()
as offered at PiCamera Basic Recipes. This code works just fine.
I modified the code such that it
records forever instead of only 60 seconds and
the server socket keeps listening forever
like so:
import socket
import time
import picamera
camera = picamera.PiCamera()
camera.resolution = (640, 480)
camera.framerate = 30
server_socket = socket.socket()
server_socket.bind(('0.0.0.0', 8000))
server_socket.listen(0)
while True: # Keep listening forever.
client_socket, client_address = server_socket.accept()
print(f"New connection from {client_address}.")
connection = client_socket.makefile('wb')
try:
camera.start_recording(connection, format='h264')
while True: # To never stop streaming.
camera.wait_recording(1)
camera.stop_recording()
except BrokenPipeError as error:
print(f"Error {error} occured.")
# camera.stop_recording() # If I use this line I get ConnectionResetError, and if not, I get PiCameraAlreadyRecording.
finally:
pass
# connection.close()
# server_socket.close()
I unfortunately now have the problem that I cannot seem to reconnect to the PiCamera object because of the camera.stop_recording(), as described in the comments thereafter. I get a ConnectionResetError or PiCameraAlreadyRecording error.
On the client side I use the code
import numpy as np
import cv2
import sys
import datetime
server = 'http://192.168.178.54:8000/'
video_capture_0 = cv2.VideoCapture(server)
fourcc = cv2.VideoWriter_fourcc(*"mp4v") # Fourcc's that work under Ubuntu 22.04: mp4v, ...
date_time_now = datetime.datetime.now()
yyyymmddHHMMSS = date_time_now.strftime("%Y-%m-%d_%H%M%S")
video_write_0 = cv2.VideoWriter(f"{yyyymmddHHMMSS}.mp4", fourcc, 30.0, (640, 480)) # To save the stream to a file.
while True:
ret0, frame0 = video_capture_0.read()
if ret0:
frame0 = cv2.flip(frame0, 0) # Flip image vertically.
cv2.imshow('Test 0', frame0)
video_write_0.write(frame0)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video_capture_0.release()
video_write_0.release()
cv2.destroyAllWindows()
How do I fix this?

Sending webcam video and screen sharing at the same time using python, opencv

I have to make a project that shares screen and webcam video of multiple clients at the same time. The screen sharing and webcam sharing work separately but I can't combine them and make them work at the same time. I copied and pasted the code and then made changes to it, so I don't understand all of the code that well.
Server:
import socket, cv2, pickle, struct
import imutils
import threading
import pyshine as ps
import cv2
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
print('HOST IP:', host_ip)
port = 9999
socket_address = (host_ip, port)
server_socket.bind(socket_address)
server_socket.listen()
print("Listening at", socket_address)
def show_client(addr, client_socket):
try:
print('CLIENT {} CONNECTED!'.format(addr))
if client_socket: # if a client socket exists
data = b""
payload_size = struct.calcsize("Q")
while True:
while len(data) < payload_size:
packet = client_socket.recv(4 * 1024) # 4K
if not packet: break
data += packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q", packed_msg_size)[0]
while len(data) < msg_size:
data += client_socket.recv(4 * 1024)
frame_data = data[:msg_size]
data = data[msg_size:]
frame = pickle.loads(frame_data)
text = f"CLIENT: {addr}"
frame = ps.putBText(frame, text, 10, 10, vspace=10, hspace=1, font_scale=0.7,
background_RGB=(255, 0, 0), text_RGB=(255, 250, 250))
cv2.imshow(f"FROM {addr}", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
client_socket.close()
except Exception as e:
print(f"CLINET {addr} DISCONNECTED")
pass
def run_show_client():
while True:
client_socket, addr = server_socket.accept()
thread = threading.Thread(target=show_client, args=(addr, client_socket))
thread.start()
print("TOTAL CLIENTS ", threading.activeCount() - 1)
Screen sharing client:
import socket, cv2, pickle, struct
import imutils
import pyautogui
import numpy as np
def student_screen_show():
camera = True
if camera == True:
vid = cv2.VideoCapture(0)
else:
vid = cv2.VideoCapture('videos/mario.mp4')
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_ip = ' '
port = 9999
client_socket.connect((host_ip, port))
resolution = (1920, 1080)
codec = cv2.VideoWriter_fourcc(*"XVID")
filename = "Recording.avi"
fps = 60.0
out = cv2.VideoWriter(filename, codec, fps, resolution)
if client_socket:
while (vid.isOpened()):
try:
img = pyautogui.screenshot()
frame = np.array(img)
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
out.write(frame)
frame = imutils.resize(frame, width=380)
a = pickle.dumps(frame)
message = struct.pack("Q", len(a)) + a
client_socket.sendall(message)
cv2.imshow(f"TO: {host_ip}", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("q"):
client_socket.close()
except:
print('VIDEO FINISHED!')
break
Webcam sharing client:
import socket, cv2, pickle, struct
import imutils
def student_show():
camera = True
if camera == True:
vid = cv2.VideoCapture(0)
else:
vid = cv2.VideoCapture('videos/mario.mp4')
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_ip = ' '
port = 9999
client_socket.connect((host_ip, port))
if client_socket:
while (vid.isOpened()):
try:
img, frame = vid.read()
frame = imutils.resize(frame, width=380)
a = pickle.dumps(frame)
message = struct.pack("Q", len(a)) + a
client_socket.sendall(message)
cv2.imshow(f"TO: {host_ip}", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("q"):
client_socket.close()
except:
print('VIDEO FINISHED!')
break
So what I have to do is merge the code so that instead of only running screen sharing or webcam sharing, it shares both at the same time from the same client. While I didn't write the Ip address in the code shown here I am writing my IP address in my code, I just felt weird sharing my Ip address.

Using Python and Raspberry and nobile network for live webcam streaming wihtout bigger lags?

is it possible to write in python a webcam stream script for mobile networks. The biggest requirement is that it shall be realtime as possible without bigger lags and delay. I have tried some standard examples I found on google via UDP. While beeing in my private Wifi it works perfect for a 320x240 resolution.
But as soon I switch to my LTE Surfstick where I have a about 3-4 Mbits of upload, the picture is lagging extremly. It has a big delay and lot of frame drops.
I wonder why, because 3 Mbits should be enough...
So my guess is that I need some kind of compresssion? Or I'm missing something essential here and its even not possible without buffering a lot, which would make realtime impossible?
Here is the code i use vor the raspberry:
import socket
import cv2 as cv
addr = ('myserver.xx', 1331)
buf = 512
width = 320
height = 240
cap = cv.VideoCapture(0)
cap.set(3, width)
cap.set(4, height)
cap.set(cv.CAP_PROP_FPS, 25)
cap.set(cv.CAP_PROP_FOURCC, cv.VideoWriter.fourcc('M','J','P','G'))
code = 'start'
code = ('start' + (buf - len(code)) * 'a').encode('utf-8')
if __name__ == '__main__':
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while(cap.isOpened()):
ret, frame = cap.read()
#frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
if ret:
s.sendto(code, addr)
data = frame.tostring()
for i in range(0, len(data), buf):
s.sendto(data[i:i+buf], addr)
# cv.imshow('send', frame)
# if cv.waitKey(1) & 0xFF == ord('q'):
# break
else:
break
# s.close()
# cap.release()
# cv.destroyAllWindows()

Camera feed sent through TCP socket gets stuck sometimes

I am getting the camera feed from a camera on a Raspberry PI to the computer using a TCP socket to use it for openCV. It works well with minimal delay. However, sometimes the frame gets stuck(freeze) and after a while the feed gets back or the openCV window crashes. I have looked everywhere and tried multiple things, but I just don't know what is causing it.
Server (PC):
import socket
import struct
import numpy as np
import cv2
host = "192.168.0.12"
portCar = 8010
# Camera socket
camS = socket.socket()
camS.bind((host, portCar))
camS.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Listen for camera
camS.listen(0)
print("Waiting for camera connection...")
camCon = camS.accept()[0]
camFile = camCon.makefile("rb")
print("Connection made with camera")
camS.settimeout(0.00001)
numOfBytes = struct.calcsize("<L")
try:
while(True):
camS.setblocking(False)
imageLength = struct.unpack("<L", camFile.read(numOfBytes))[0]
if imageLength == 0:
break
nparr = np.frombuffer(camFile.read(imageLength), np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('RC Car Video stream', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
finally:
camFile.close()
camS.close()
cv2.destroyAllWindows()
print("Server - Camera connection closed")
Client (PI):
import io
import socket
import struct
import time
import picamera
client_socket = socket.socket()
client_socket.connect(('192.168.0.12', 8010))
client_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
connection = client_socket.makefile('wb')
class SplitFrames(object):
def __init__(self, connection):
self.connection = connection
self.stream = io.BytesIO()
def write(self, buf):
if buf.startswith(b'\xff\xd8'):
size = self.stream.tell()
if size > 0:
self.connection.write(struct.pack('<L', size))
self.connection.flush()
self.stream.seek(0)
self.connection.write(self.stream.read(size))
self.stream.seek(0)
self.stream.write(buf)
try:
output = SplitFrames(connection)
with picamera.PiCamera(resolution='VGA', framerate=30) as camera:
time.sleep(2)
camera.rotation = 180
camera.start_recording(output, format='mjpeg')
camera.wait_recording(2000)
camera.stop_recording()
# Write the terminating 0-length to the connection to let the
# server know we're done
connection.write(struct.pack('<L', 0))
finally:
connection.close()
client_socket.close()
print("Client - Connection closed")
Any help will be greatly appreciated.

Python TCP socket send receive large delay

I used python socket to make a server on my Raspberry Pi 3 (Raspbian) and a client on my laptop (Windows 10). The server stream images to the laptop at a rate of 10fps, and can reach 15fps if I push it. The problem is when I want the laptop to send back a command based on the image, the frame rate drop sharply to 3fps. The process is like this:
Pi send img => Laptop receive img => Quick process => Send command based on process result => Pi receive command, print it => Pi send img => ...
The process time for each frame does not cause this (0.02s at most for each frame), so currently I am at a loss as to why the frame rate drop so much. The image is quite large, at around 200kB and the command is only a short string at 3B. The image is in matrix form and is pickled before sending, while the command is sent as is.
Can someone please explain to me why sending back such a short command would make the frame rate drop so much? And if possible, a solution for this problem. I tried making 2 servers, one dedicated to sending images and one for receiving command, but the result is the same.
Server:
import socket
import pickle
import time
import cv2
import numpy as np
from picamera.array import PiRGBArray
from picamera import PiCamera
from SendFrameInOO import PiImageServer
def main():
# initialize the server and time stamp
ImageServer = PiImageServer()
ImageServer2 = PiImageServer()
ImageServer.openServer('192.168.0.89', 50009)
ImageServer2.openServer('192.168.0.89', 50002)
# Initialize the camera object
camera = PiCamera()
camera.resolution = (320, 240)
camera.framerate = 10 # it seems this cannot go higher than 10
# unless special measures are taken, which may
# reduce image quality
camera.exposure_mode = 'sports' #reduce blur
rawCapture = PiRGBArray(camera)
# allow the camera to warmup
time.sleep(1)
# capture frames from the camera
print('<INFO> Preparing to stream video...')
timeStart = time.time()
for frame in camera.capture_continuous(rawCapture, format="bgr",
use_video_port = True):
# grab the raw NumPy array representing the image, then initialize
# the timestamp and occupied/unoccupied text
image = frame.array
imageData = pickle.dumps(image)
ImageServer.sendFrame(imageData) # send the frame data
# receive command from laptop and print it
command = ImageServer2.recvCommand()
if command == 'BYE':
print('BYE received, ending stream session...')
break
print(command)
# clear the stream in preparation for the next one
rawCapture.truncate(0)
print('<INFO> Video stream ended')
ImageServer.closeServer()
elapsedTime = time.time() - timeStart
print('<INFO> Total elapsed time is: ', elapsedTime)
if __name__ == '__main__': main()
Client:
from SupFunctions.ServerClientFunc import PiImageClient
import time
import pickle
import cv2
def main():
# Initialize
result = 'STP'
ImageClient = PiImageClient()
ImageClient2 = PiImageClient()
# Connect to server
ImageClient.connectClient('192.168.0.89', 50009)
ImageClient2.connectClient('192.168.0.89', 50002)
print('<INFO> Connection established, preparing to receive frames...')
timeStart = time.time()
# Receiving and processing frames
while(1):
# Receive and unload a frame
imageData = ImageClient.receiveFrame()
image = pickle.loads(imageData)
cv2.imshow('Frame', image)
key = cv2.waitKey(1) & 0xFF
# Exit when q is pressed
if key == ord('q'):
ImageClient.sendCommand('BYE')
break
ImageClient2.sendCommand(result)
ImageClient.closeClient()
elapsedTime = time.time() - timeStart
print('<INFO> Total elapsed time is: ', elapsedTime)
print('Press any key to exit the program')
#cv2.imshow('Picture from server', image)
cv2.waitKey(0)
if __name__ == '__main__': main()
PiImageServer and PiImageClient:
import socket
import pickle
import time
class PiImageClient:
def __init__(self):
self.s = None
self.counter = 0
def connectClient(self, serverIP, serverPort):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((serverIP, serverPort))
def closeClient(self):
self.s.close()
def receiveOneImage(self):
imageData = b''
lenData = self.s.recv(8)
length = pickle.loads(lenData) # should be 921764 for 640x480 images
print('Data length is:', length)
while len(imageData) < length:
toRead = length-len(imageData)
imageData += self.s.recv(4096 if toRead>4096 else toRead)
#if len(imageData)%200000 <= 4096:
# print('Received: {} of {}'.format(len(imageData), length))
return imageData
def receiveFrame(self):
imageData = b''
lenData = self.s.recv(8)
length = pickle.loads(lenData)
print('Data length is:', length)
'''length = 921764 # for 640x480 images
length = 230563 # for 320x240 images'''
while len(imageData) < length:
toRead = length-len(imageData)
imageData += self.s.recv(4096 if toRead>4096 else toRead)
#if len(imageData)%200000 <= 4096:
# print('Received: {} of {}'.format(len(imageData), length))
self.counter += 1
if len(imageData) == length:
print('Successfully received frame {}'.format(self.counter))
return imageData
def sendCommand(self, command):
if len(command) != 3:
print('<WARNING> Length of command string is different from 3')
self.s.send(command.encode())
print('Command {} sent'.format(command))
class PiImageServer:
def __init__(self):
self.s = None
self.conn = None
self.addr = None
#self.currentTime = time.time()
self.currentTime = time.asctime(time.localtime(time.time()))
self.counter = 0
def openServer(self, serverIP, serverPort):
print('<INFO> Opening image server at {}:{}'.format(serverIP,
serverPort))
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.bind((serverIP, serverPort))
self.s.listen(1)
print('Waiting for client...')
self.conn, self.addr = self.s.accept()
print('Connected by', self.addr)
def closeServer(self):
print('<INFO> Closing server...')
self.conn.close()
self.s.close()
#self.currentTime = time.time()
self.currentTime = time.asctime(time.localtime(time.time()))
print('Server closed at', self.currentTime)
def sendOneImage(self, imageData):
print('<INFO> Sending only one image...')
imageDataLen = len(imageData)
lenData = pickle.dumps(imageDataLen)
print('Sending image length')
self.conn.send(lenData)
print('Sending image data')
self.conn.send(imageData)
def sendFrame(self, frameData):
self.counter += 1
print('Sending frame ', self.counter)
frameDataLen = len(frameData)
lenData = pickle.dumps(frameDataLen)
self.conn.send(lenData)
self.conn.send(frameData)
def recvCommand(self):
commandData = self.conn.recv(3)
command = commandData.decode()
return command
I believe the problem is two-fold. First, you are serializing all activity: The server is sending a complete image, then instead of continuing on to send the next image (which would better fit the definition of "streaming"), it is stopping, waiting for all bytes of the previous image to make themselves across the network to the client, then for the client to receive all bytes of the image, unpickle it, send a response and for the response to then make its way across the wire to the server.
Is there a reason you need them to be in lockstep like this? If not, try to parallelize the two sides. Have your server create a separate thread to listen for commands coming back (or simply use select to determine when the command socket has something to receive).
Second, you are likely being bitten by Nagle's algorithm (https://en.wikipedia.org/wiki/Nagle%27s_algorithm), which is intended to prevent sending numerous packets with small payloads (but lots of overhead) across the network. So, your client-side kernel has gotten your three-bytes of command data and has buffered it, waiting for you to provide more data before it sends the data to the server (it will eventually send it anyway, after a delay). To change that, you would want to use the TCP_NODELAY socket option on the client side (see https://stackoverflow.com/a/31827588/1076479).

Categories

Resources