import math
import cv2
import numpy as np
import mediapipe as mp
from . import fingerutils

mp_hands = mp.solutions.hands
mp_drawing = mp.solutions.drawing_utils

def mediapipe_gestures(image,servo):
    image.flags.writeable = False
    image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
    h, w, c = image.shape

    with mp_hands.Hands(min_detection_confidence=0.5, min_tracking_confidence=0.5) as hands:
        results = hands.process(image)
        image.flags.writeable = True
        image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
        if not (results.multi_hand_landmarks and results.multi_handedness):return image
        hand_clmList = []
        hand_list={"left":[],"right":[]}
        for hand_landmarks,handedness in zip(results.multi_hand_landmarks,results.multi_handedness):
            hand_clmList=[[point.x * w,point.y * h]for point in hand_landmarks.landmark]
            angle_list = fingerutils.get_fingers_angle(hand_clmList)  # 获取手指弯曲角度
            if (handedness.classification[0].index==1):
                hand_list["right"]= angle_list
            else:
                hand_list["left"]= angle_list
            mp_drawing.draw_landmarks(image,hand_landmarks,mp_hands.HAND_CONNECTIONS)
        servo.dirver(hand_list)
    return image