I am trying to stream my webcam into a remote desktop service, AWS in this case. I managed to get it to work but I am getting a lot of framedrops. Someone suggested to me that I wasn't encoding my webcam and that that was the problem, that I am sending raw camera data , which is something like 327Mbps, which is very hard for the network to process all at once. Or at least that's how I understand it.
I am pretty new to python and don't really know how to approach this, any help would be greatly appreciated!
These are the two scripts that I run to connect to my instance via sockets.
On the server side:
server_side.py
import pickle
import socket
import struct
import cv2
HOST = ''
PORT = somenumber
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST, PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
conn, addr = s.accept()
data = b'' ### CHANGED
payload_size = struct.calcsize("L") ### CHANGED
while True:
# Retrieve message size
while len(data) < payload_size:
data += conn.recv(4096)
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("L", packed_msg_size)[0] ### CHANGED
# Retrieve all data based on message size
while len(data) < msg_size:
data += conn.recv(4096)
frame_data = data[:msg_size]
data = data[msg_size:]
# Extract frame
frame = pickle.loads(frame_data)
# Display
cv2.imshow('frame', frame)
cv2.waitKey(1)
On the client side:
client_side.py
import cv2
import numpy as np
import socket
import sys
import pickle
import struct
cap=cv2.VideoCapture(0)
clientsocket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
clientsocket.connect(('my_IP',somenumber))
while True:
ret,frame=cap.read()
# Serialize frame
data = pickle.dumps(frame)
# Send message length first
message_size = struct.pack("L", len(data)) ### CHANGED
# Then data
clientsocket.sendall(message_size + data)