opencv PyQt在尝试使用套接字传输数据时崩溃

tvz2xvvm  于 2022-12-13  发布在  其他
关注(0)|答案(1)|浏览(116)

我正在尝试制作一个简单的视频聊天应用程序,有两个参与者。这里的JoinClient是一个PyQt UI,用于输入代码和访问会议。然后这个窗口被重定向到CallClient,在那里实际的会议发生。我使用cv2.VideoCapture(0)获得一个视频帧,然后使用PIL和PyQt处理该帧,并发送相同的Qpixmap。但是,我向服务器发送一个原始帧,然后由服务器自己发送给第二个用户。我正在使用pyaudio和音频流输入和输出音频来做类似的事情。在添加套接字编程之前,应用程序工作得很好,但是当我添加套接字编程时,打开CallClient后的应用程序停止响应。我不确定为什么会发生这种情况,也不知道如何修复它。
client.py

from PyQt5 import QtCore, QtGui, QtWidgets
import meetingui
import joinui
from PIL.ImageQt import ImageQt
from PIL import Image
import cv2
import pyaudio
import network as net

class Worker(QtCore.QObject):
    finished = QtCore.pyqtSignal()
    cam_signal = QtCore.pyqtSignal(bool)
    mic_signal = QtCore.pyqtSignal(bool)
    p1video_feed_signal = QtCore.pyqtSignal(QtGui.QPixmap)
    p2video_feed_signal = QtCore.pyqtSignal(QtGui.QPixmap)

    def __init__(self, parent=None):
        QtCore.QObject.__init__(self, parent=parent)
        self.leave_call = False
        self.camera_status = False
        self.mic_status = False
        self.audio_interface = pyaudio.PyAudio()
        self.chunk = 2048
        self.sample_format = pyaudio.paInt16
        self.channels = 2
        self.fs = 44100

    def run(self, name, code):
        network = net.Network({'name': name, 'code': code})
        person = {}
        while not self.leave_call:
            # Get self mic data
            if self.mic_status:
                audio_data = self.stream.read(self.chunk)
                person['audio'] = audio_data
            else:
                person['audio'] = None
            # get self camera data
            if self.camera_status and self.video_feed.isOpened():
                check, self.p1frame = self.video_feed.read()
                if check:
                    self.p1frame = cv2.cvtColor(self.p1frame, cv2.COLOR_BGR2RGB)
                    self.p1frame = cv2.flip(self.p1frame, 1)
                    self.PILp1frame = Image.fromarray(self.p1frame).convert('RGB')
                    self.Qtp1frame = ImageQt(self.PILp1frame)
                    self.p1video_feed_signal.emit(QtGui.QPixmap.fromImage(self.Qtp1frame))
                    person['video_frame'] = self.p1frame
                else:
                    person['video_frame'] = None
            else:
                person['video_frame'] = None
            # send self data and receive person2 data
            p2 = network.send(person)
            if p2:
                if p2['audio']:
                    self.stream.write(p2['audio'])
                if p2['video_frame']:
                    self.PILp2frame = Image.fromarray(p2['video_frame']).convert('RGB')
                    self.Qtp2frame = ImageQt(self.PILp2frame)
                    self.p2video_feed_signal.emit(QtGui.QPixmap.fromImage(self.Qtp2frame))
                else:
                    self.p2video_feed_signal.emit(QtGui.QPixmap('Icons\\img_avatar.png'))
            else:
                break
        self.finished.emit()

    def leave(self):
        self.audio_interface.terminate()
        self.leave_call = True

    def toggle_cam(self):
        if not self.camera_status:
            self.video_feed = cv2.VideoCapture(0)
            self.camera_status = True
        else:
            self.camera_status = False
            self.video_feed.release()
            self.p1video_feed_signal.emit(QtGui.QPixmap('Icons\\img_avatar.png'))
        self.cam_signal.emit(self.camera_status)

    def toggle_mic(self):
        if not self.mic_status:
            self.stream = self.audio_interface.open(format=self.sample_format,
                                                    channels=self.channels,
                                                    rate=self.fs,
                                                    frames_per_buffer=self.chunk,
                                                    input=True,
                                                    output=True)
            self.mic_status = True
        else:
            self.mic_status = False
            self.stream.stop_stream()
            self.stream.close()
        self.mic_signal.emit(self.mic_status)

class CallClient(QtWidgets.QWidget, meetingui.Ui_MeetingWindow):
    stop_signal = QtCore.pyqtSignal()
    toggle_cam_signal = QtCore.pyqtSignal()
    toggle_mic_signal = QtCore.pyqtSignal()

    def __init__(self, code, name, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.setupUi(self)

        self.P1_name = name
        self.MeetCode = code

        self.setWindowTitle(f'Video Call App (Meeting Code - {code})')

        # Signals and Slots

        self.thread = QtCore.QThread()
        self.worker = Worker()

        self.stop_signal.connect(self.worker.leave)
        self.toggle_cam_signal.connect(self.worker.toggle_cam)
        self.toggle_mic_signal.connect(self.worker.toggle_mic)

        self.worker.moveToThread(self.thread)

        self.thread.started.connect(lambda: self.worker.run(self.P1_name, self.MeetCode))

        self.worker.cam_signal.connect(lambda status: self.handleCam(status))
        self.worker.mic_signal.connect(lambda status: self.handleMic(status))

        self.worker.p1video_feed_signal.connect(lambda video_feed: self.setP1Frames(video_feed))
        self.worker.p2video_feed_signal.connect(lambda video_feed: self.setP2Frames(video_feed))

        self.worker.finished.connect(self.thread.quit)
        self.worker.finished.connect(self.worker.deleteLater)
        self.thread.finished.connect(self.thread.deleteLater)
        self.thread.finished.connect(self.close)

        self.thread.start()

        self.leaveBtn.clicked.connect(self.stop_thread)
        self.cameraBtn.clicked.connect(self.toggle_cam)
        self.micBtn.clicked.connect(self.toggle_mic)

    def stop_thread(self):
        self.stop_signal.emit()
        self.close()

    def toggle_cam(self):
        self.toggle_cam_signal.emit()

    def toggle_mic(self):
        self.toggle_mic_signal.emit()

    def handleCam(self, cam_status):
        if cam_status:
            cam_icon = QtGui.QIcon()
            cam_icon.addPixmap(QtGui.QPixmap("Icons\\icons8-camera-96.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
            self.cameraBtn.setIcon(cam_icon)
            self.cameraBtn.setStyleSheet(
                "border:none;border-radius: 35px;padding: 10px;background-color: rgb(255, 255, 255);")
        else:
            cam_icon = QtGui.QIcon()
            cam_icon.addPixmap(QtGui.QPixmap("Icons\\no-cam-icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
            self.cameraBtn.setIcon(cam_icon)
            self.cameraBtn.setStyleSheet(
                "border:none;border-radius: 35px;padding: 10px;background-color: rgb(204, 0, 0);")

    def handleMic(self, mic_status):
        if mic_status:
            mic_icon = QtGui.QIcon()
            mic_icon.addPixmap(QtGui.QPixmap("Icons\\icons8-microphone-96.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
            self.micBtn.setIcon(mic_icon)
            self.micBtn.setStyleSheet(
                "border:none;border-radius: 35px;padding: 10px;background-color: rgb(255, 255, 255);")
        else:
            mic_icon = QtGui.QIcon()
            mic_icon.addPixmap(QtGui.QPixmap("Icons\\no-mic-icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
            self.micBtn.setIcon(mic_icon)
            self.micBtn.setStyleSheet(
                "border:none;border-radius: 35px;padding: 10px;background-color: rgb(204, 0, 0);")

    def setP1Frames(self, pixmap):
        self.Person1_Self.setPixmap(pixmap)

    def setP2Frames(self, pixmap):
        self.Person2_Opposite.setPixmap(pixmap)

class JoinClient(QtWidgets.QWidget, joinui.Ui_JoinClient):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.setupUi(self)

        self.setWindowFlag(QtCore.Qt.FramelessWindowHint)
        self.setAttribute(QtCore.Qt.WA_TranslucentBackground)

        self.joinbtn.clicked.connect(self.connect_to_meeting)

        self.x_btn.clicked.connect(self.btn_close_clicked)
        self.minus_btn.clicked.connect(self.btn_minus_clicked)

    def mousePressEvent(self, event):
        self.start = self.mapToGlobal(event.pos())
        self.pressing = True

    def mouseMoveEvent(self, event):
        if self.pressing:
            self.end = self.mapToGlobal(event.pos())
            self.movement = self.end - self.start
            self.setGeometry(self.mapToGlobal(self.movement).x(),
                             self.mapToGlobal(self.movement).y(),
                             self.width(),
                             self.height())
            self.start = self.end

    def mouseReleaseEvent(self, QMouseEvent):
        self.pressing = False

    def btn_close_clicked(self):
        self.close()

    def btn_minus_clicked(self):
        self.showMinimized()

    def connect_to_meeting(self):
        print(self.username_edit.text())
        print(self.code_edit.text())

        self.meetClient = CallClient(self.code_edit.text(), self.username_edit.text())
        self.meetClient.show()

        self.close()

if __name__ == '__main__':
    import sys

    app = QtWidgets.QApplication(sys.argv)
    main_client = JoinClient()
    main_client.show()
    sys.exit(app.exec_())

network.py

import socket
import pickle

class Network:
    def __init__(self, data: dict):
        self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.server = "192.168.1.38"
        self.port = 5555
        self.addr = (self.server, self.port)
        self.connect(data)

    def connect(self, data: dict):
        try:
            self.client.connect(self.addr)
            self.client.send(pickle.dumps(data))
            print('connected :D')
        except:
            pass

    def send(self, data):
        try:
            self.client.send(pickle.dumps(data))
            return pickle.loads(self.client.recv(2048*500))
        except socket.error as e:
            print(e)

server.py

import socket
import threading
from models import Meeting
import pickle

server = "192.168.1.38"
port = 5555

s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

try:
    s.bind((server, port))
except socket.error as e:
    str(e)

s.listen(2)
print("Waiting for a connection, Server Started")

meetings = []

def threaded_client(conn):
    initials = pickle.loads(conn.recv(2048))
    currentMeeting = None
    currentMeetingIndex = 0
    p = 'p1'
    for meeting in meetings:
        if initials['code'] == meeting.code:
            currentMeeting = meeting
            p = 'p2'
            currentMeetingIndex = meetings.index(currentMeeting)
            meetings[currentMeetingIndex].P2 = {'name': initials['name'], 'video_frame': None, 'audio': None}
    if not currentMeeting:
        currentMeeting = Meeting({'name': initials['name'], 'video_frame': None, 'audio': None}, {'name': None, 'video_frame': None, 'audio': None}, initials['code'])
        meetings.append(currentMeeting)
        currentMeetingIndex = meetings.index(currentMeeting)
    reply = ""
    while True:
        try:
            data = pickle.loads(conn.recv(2048*500))

            if not data:
                print("Disconnected")
                if p == 'p1':
                    meetings[currentMeetingIndex].P1 = {'name': None, 'video_frame': None, 'audio': None}
                else:
                    meetings[currentMeetingIndex].P2 = {'name': None, 'video_frame': None, 'audio': None}
                break
            else:
                if p == 'p1':
                    meetings[currentMeetingIndex].P1['video_frame'] = data['video_frame']
                    meetings[currentMeetingIndex].P1['audio'] = data['audio']

                    reply = meetings[currentMeetingIndex].P2
                else:
                    meetings[currentMeetingIndex].P2['video_frame'] = data['video_frame']
                    meetings[currentMeetingIndex].P2['audio'] = data['audio']

                    reply = meetings[currentMeetingIndex].P1

                print("Received: ", data)
                print("Sending : ", reply)

            conn.sendall(pickle.dumps(reply))
        except:
            break

    if not currentMeeting.P1['name'] and not currentMeeting.P2['name']:
        meetings.remove(currentMeeting)
        print('meet ended')

    print("Lost connection")
    print(meetings)
    conn.close()

while True:
    conn, addr = s.accept()
    print("Connected to:", addr)

    if conn:
        new_thread = threading.Thread(target=threaded_client, args=(conn,))
        new_thread.start()
kcugc4gi

kcugc4gi1#

TCP的一个不方便之处在于它是一个流协议,而不是一个包协议。字节最终会被传输,但它不荣誉你的包边界。如果你发送一个2048字节的块和一个1024字节的块,取决于网络流量,你可能会收到500字节,然后1000字节,然后600字节,然后是最后的972。您不能假设您正在接收一个完整的pickle,也不能假设您只接收一个pickle--它们可以组合在一起。您必须发送某种类型的报头以允许另一端重建数据包。

相关问题