import cv2
from ultralytics import YOLO
from deep_sort_realtime.deepsort_tracker import DeepSort
import numpy as np


# 加载YOLOv8模型
model = YOLO('../ultralytics/assets/yolo11n.pt')

# 初始化DeepSORT跟踪器
tracker = DeepSort(max_age=5, n_init=2)


def detect_hand_movement(video_path):
    cap = cv2.VideoCapture(video_path)
    prev_frame_tracks = {}

    while cap.isOpened():
        ret, frame = cap.read()
        if not ret:
            break

        # 使用YOLOv8进行手部检测
        results = model(frame, classes=0)  # 假设手部类别ID为0
        detections = []
        for result in results:
            boxes = result.boxes.cpu().numpy()
            for box in boxes:
                xyxy = box.xyxy[0].astype(int)
                x1, y1, x2, y2 = xyxy
                w = x2 - x1
                h = y2 - y1
                conf = box.conf[0]
                cls = box.cls[0]
                detections.append(([x1, y1, w, h], conf, cls))

        # 使用DeepSORT进行目标跟踪
        tracks = tracker.update_tracks(detections, frame=frame)

        current_frame_tracks = {}
        for track in tracks:
            if not track.is_confirmed():
                continue
            track_id = track.track_id
            ltrb = track.to_ltrb()
            x1, y1, x2, y2 = map(int, ltrb)
            center = ((x1 + x2) // 2, (y1 + y2) // 2)
            current_frame_tracks[track_id] = center

            cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 2)
            cv2.putText(frame, str(track_id), (x1, y1 - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 0), 2)

        # 分析手部移动
        for track_id, current_center in current_frame_tracks.items():
            if track_id in prev_frame_tracks:
                prev_center = prev_frame_tracks[track_id]
                dx = current_center[0] - prev_center[0]
                dy = current_center[1] - prev_center[1]
                distance = np.sqrt(dx ** 2 + dy ** 2)
                direction = np.arctan2(dy, dx) * 180 / np.pi

                cv2.putText(frame, f"Move: {distance:.2f}px, Dir: {direction:.2f}deg",
                            (x1, y1 - 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)

        prev_frame_tracks = current_frame_tracks

        # 显示结果
        cv2.imshow('Hand Movement Detection', frame)

        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

    cap.release()
    cv2.destroyAllWindows()


if __name__ == "__main__":
    video_path = 'your_video_path.mp4'  # 替换为你的视频文件路径
    detect_hand_movement(0)