from multiprocessing import process
from queue import Queue
import math
import cv2
import numpy
import mediapipe as mp
import threading
import numpy as np
import ctypes
from lib_c import obj_angle_c
from colors import Color_recognition
from algorithm_wrappers import Algorithm_wrapper, Hand_Algorithm
import multiprocessing
from config import queue_dict_data_markers

queue_hand_rec_thread = Queue()


class Hand_rec:
    def __init__(self):
        self.mp_drawing = mp.solutions.drawing_utils
        self.mp_drawing_styles = mp.solutions.drawing_styles
        self.mp_hands = mp.solutions.hands
        self.width = 0.0
        self.height = 0.0
        self.padding = 20
        self.line_size = 5
        self.i_max = 0
        self.i_min = 10000
        self.obj_color = Color_recognition()

    def main(self):
        self.cap = cv2.VideoCapture(0)
        self.width = self.cap.get(cv2.CAP_PROP_FRAME_WIDTH)  # float `width`
        self.height = self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
        self.width = int(self.width)
        self.height = int(self.height)
        with self.mp_hands.Hands(
            min_detection_confidence=0.5, min_tracking_confidence=0.5
        ) as hands:
            while self.cap.isOpened():
                success, image = self.cap.read()
                if not success:
                    print("Ignoring empty camera frame.")
                    continue
                image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
                image.flags.writeable = False
                results = hands.process(image)
                self.show_image(image, results)
                image.flags.writeable = True
                image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
                cv2.imshow("MediaPipe Hands", image)
                if cv2.waitKey(5) & 0xFF == 27:
                    break
            self.cap.release()

    def show_image(self, image, results):
        dict_all_arm = {}
        hand_orientation = ""
        if results.multi_hand_landmarks:
            for id_hand, hand_landmarks in enumerate(results.multi_hand_landmarks):
                list_arm = []
                for id, lm in enumerate(hand_landmarks.landmark):
                    if results.multi_handedness:
                        hand_orientation = (
                            results.multi_handedness[id_hand].classification[0].label
                        )
                    h, w, c = image.shape
                    cx, cy = int(lm.x * w), int(lm.y * h)
                    c1 = round((lm.x * c) * 100)
                    c2 = round((lm.y * c) * 100)
                    list_arm.append((cx, cy, hand_orientation))
                    cv2.putText(
                        image,
                        str(id),
                        (cx, cy),
                        cv2.FONT_HERSHEY_SIMPLEX,
                        1,
                        (0, 255, 255),
                    )
                self.mp_drawing.draw_landmarks(
                    image,
                    hand_landmarks,
                    self.mp_hands.HAND_CONNECTIONS,
                    self.mp_drawing_styles.get_default_hand_landmarks_style(),
                    self.mp_drawing_styles.get_default_hand_connections_style(),
                )
                name_arm = list_arm[0][2]
                dict_all_arm[name_arm] = list_arm
                queue_dict_data_markers.put(dict_all_arm)
                # Не работает
                # self.obj_color.set_frame(image.copy(),dict_all_arm)
                # self.obj_color.main()


obj_hand_rec = Algorithm_wrapper()
obj = Hand_rec()
process_cam = multiprocessing.Process(target=obj.main)
process_cam.start()
process_alg = multiprocessing.Process(target=obj_hand_rec.main_loop)
process_alg.start()
