import cv2
import numpy as np

from PySide6.QtCore import QTimer, Signal, QThread, Qt, QSize
from PySide6.QtGui import QImage, QPixmap
from PySide6.QtWidgets import QWidget, QLabel

from conf import Config
from video_ui import Ui_Form


class Video(QWidget):
    close_signal = Signal()

    def __init__(self, conf: Config):
        super(Video, self).__init__()
        self.ui = Ui_Form()  # UI类的实例化
        self.ui.setupUi(self)
        self.conf = conf

        self.setFocusPolicy(Qt.FocusPolicy.StrongFocus)  # 可以通过键盘（如Tab键）或鼠标单击来获取焦点
        self.ui.label.setMouseTracking(self.conf.track)  # 鼠标移动跟踪
        self.setMouseTracking(self.conf.track)  # 鼠标移动跟踪
        self.ui.label.setMinimumSize(1, 1)  # 解决只能放大不能缩小

        self.label_wait = QLabel("等待进程退出...")
        self.label_wait.setFixedSize(220, 80)
        self.label_wait.setAlignment(Qt.AlignmentFlag.AlignCenter)
        self.label_wait.setStyleSheet("QLabel {"
                                      " background-color:#E6A23C;"
                                      " border-radius: 4px;"
                                      " color:#ffffff;"
                                      "}")
        self.ui.gridLayout.addWidget(self.label_wait, 1, 1, 1, 1)
        self.label_wait.hide()

        # 读取视频大小，即输入屏幕分辨率
        self.cap = cv2.VideoCapture(self.conf.screen.device)  # 打开摄像头
        self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, self.conf.screen.width)
        self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, self.conf.screen.height)
        self.cap.set(cv2.CAP_PROP_FPS, 60)

        self.video_thread_finished = False
        self.videoThread = VideoThread(self)
        self.videoThread.frame_signal.connect(self.ui.label.setPixmap)
        self.videoThread.finished.connect(self.close)
        self.videoThread.start()

    def video_size(self) -> QSize:
        return self.ui.label.sizeHint()

    def stop(self):
        self.ui.label.hide()
        self.label_wait.show()
        self.videoThread.stop()

    def closeEvent(self, event):
        self.video_thread_finished = True
        self.close_signal.emit()


class VideoThread(QThread):
    frame_signal = Signal(QPixmap)

    def __init__(self, parent: Video):
        super().__init__()
        self.parent = parent
        self.flag = True

    def run(self):
        while self.flag:
            if self.parent.cap.isOpened():
                ret, frame = self.parent.cap.read()
                if ret:
                    rgb_image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
                    h, w, ch = rgb_image.shape
                    bytes_per_line = ch * w
                    img = QImage(rgb_image.data, w, h, bytes_per_line, QImage.Format.Format_RGB888)
                    pixmap = QPixmap.fromImage(
                        img.scaled(
                            self.parent.width(),
                            self.parent.height(),
                            Qt.AspectRatioMode.KeepAspectRatio
                            if self.parent.conf.screen.KeepAspectRatio
                            else Qt.AspectRatioMode.IgnoreAspectRatio,
                            Qt.TransformationMode.SmoothTransformation
                        )
                    )
                    self.frame_signal.emit(pixmap)
                else:
                    self.parent.cap.release()
            else:
                self.parent.cap = cv2.VideoCapture(self.parent.conf.screen.device)
        self.parent.cap.release()

    def stop(self):
        self.flag = False

