import cv2
import mediapipe as mp
import math

# 初始化MediaPipe手部模型
mp_hands = mp.solutions.hands
mp_drawing = mp.solutions.drawing_utils
hands = mp_hands.Hands(min_detection_confidence=0.7, min_tracking_confidence=0.5)

# 手部关键点索引常量（MediaPipe 21点模型）
WRIST = 0
THUMB_TIP, THUMB_MCP = 4, 2    # 拇指尖端、掌指关节
INDEX_TIP, INDEX_MCP = 8, 6     # 食指
MIDDLE_TIP, MIDDLE_MCP = 12, 10 # 中指
RING_TIP, RING_MCP = 16, 14     # 无名指
PINKY_TIP, PINKY_MCP = 20, 18   # 小指

def get_distance(p1, p2):
    """计算两个关键点之间的欧氏距离"""
    return math.hypot(p2.x - p1.x, p2.y - p1.y)

def detect_gesture(hand_landmarks):
    """手势识别核心逻辑"""
    print(f"    hand_landmarks : {hand_landmarks}")
    landmarks = hand_landmarks.landmark
    print(f"    landmarks : {landmarks}")
    
    # 获取关键点对象
    wrist = landmarks[WRIST]
    print(f"    wrist : {wrist}")
    thumb_tip = landmarks[THUMB_TIP]
    index_tip = landmarks[INDEX_TIP]
    middle_tip = landmarks[MIDDLE_TIP]
    ring_tip = landmarks[RING_TIP]
    pinky_tip = landmarks[PINKY_TIP]

    # ------------------- 手势判断逻辑 ------------------- 
    # 1. 判断手指张开数量（对比指尖与对应掌指关节）
    fingers_open = 0
    for tip, mcp in [(INDEX_TIP, INDEX_MCP), 
                    (MIDDLE_TIP, MIDDLE_MCP),
                    (RING_TIP, RING_MCP),
                    (PINKY_TIP, PINKY_MCP)]:
        if get_distance(landmarks[tip], landmarks[mcp]) > 0.1:
            fingers_open += 1

    # 2. 判断竖大拇指（拇指尖端比掌指关节更靠上）
    thumb_vert = thumb_tip.y < landmarks[THUMB_MCP].y

    # 3. 判断OK手势（食指与拇指接触，其他手指闭合）
    ok_distance = get_distance(index_tip, thumb_tip)
    other_closed = all([
        get_distance(landmarks[MIDDLE_TIP], landmarks[MIDDLE_MCP]) < 0.1,
        get_distance(landmarks[RING_TIP], landmarks[RING_MCP]) < 0.1,
        get_distance(landmarks[PINKY_TIP], landmarks[PINKY_MCP]) < 0.1
    ])

    # 4. 优先级判断
    if ok_distance < 0.05 and other_closed:
        return "OK"
    elif thumb_vert and fingers_open == 0:
        return "Thumbs Up"
    elif fingers_open >= 3:
        return "Open Hand"
    elif fingers_open <= 1:
        return "Fist"
    return "Unknown"

# 视频流处理
cap = cv2.VideoCapture(0)
while cap.isOpened():
    ret, frame = cap.read()
    if not ret: break
    
    frame = cv2.flip(frame, 1)
    results = hands.process(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
    print(f"get results")
    
    if results.multi_hand_landmarks:
        for hand_landmarks in results.multi_hand_landmarks:
            print(f"hand_landmarks : {hand_landmarks}")
            mp_drawing.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)

            gesture = detect_gesture(hand_landmarks)
            cv2.putText(frame, gesture, (10,50), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,255,0), 2)
    
    cv2.imshow('Gesture Recognition', frame)
    if cv2.waitKey(1) == 27: break

cap.release()
cv2.destroyAllWindows()
