#!coding:utf-8
import cv2
import numpy as np
import time
import threading
from apriltag_detector import ApriltagDetector
from pupper.config import Command, State, BehaviorState, Configuration
from pupper.controller import Controller
from pupper.inverse_kinematics import four_legs_inverse_kinematics
from pupper.hardware_interface import HardwareInterface

command = Command()
state = State()


def main():
    """
    主函数
    """

    # 初始化各种对象
    config = Configuration()
    controller = Controller(
        config,
    )
    state.behavior_state = BehaviorState.REST

    hardware_interface = HardwareInterface()

    # 初始姿态
    state.foot_locations = (
        config.default_stance
        + np.array([0, 0, command.height])[:, np.newaxis]
    )
    last_loop = time.time()

    print("Summary of gait parameters:")
    print("overlap time: ", config.overlap_time)
    print("swing time: ", config.swing_time)
    print("z clearance: ", config.z_clearance)
    print("x shift: ", config.x_shift)

    while True:
        now = time.time()
        # 每过一个时钟周期才进行一次计算
        if now - last_loop < config.dt:
            continue
        last_loop = time.time()

        # 执行一次坐标计算
        controller.run(state, command)

        # 坐标到关节角度
        joint_angles = four_legs_inverse_kinematics(
            state.rotated_foot_locations, config)

        # 关节角度到舵机指令
        hardware_interface.set_actuator_postions(joint_angles)


def detect():
    # For webcam input:
    cap = cv2.VideoCapture(0)
    detector = ApriltagDetector()
    while cap.isOpened():
        success, image = cap.read()
        if not success:
            print("Ignoring empty camera frame.")
            # If loading a video, use 'break' instead of 'continue'.
            continue
        detector.detect(image)
        if detector.isDetected(1):
            state.behavior_state = BehaviorState.TROT
            x, y, z = detector.tag[2]
            # 根据横向偏移调整转向
            if abs(x) > 5:
                command.yaw = -0.4*x/abs(x)
            else:
                command.yaw = 0

            # 根据距离调整速度
            if z < 15:
                command.horizontal_velocity = np.array([-0.25, 0.0])
            elif z > 25:
                command.horizontal_velocity = np.array([0.15, 0.0])
            else:
                command.horizontal_velocity = np.array([0.0, 0.0])
        else:
            # 没检测到标签就让机器人停下来
            state.behavior_state = BehaviorState.REST
            command.horizontal_velocity = np.array([0.0, 0.0])

        cv2.imshow("images", image)
        c = cv2.waitKey(4)
        if c == ord('q'):
            break
    cap.release()


if __name__ == '__main__':
    thread  = threading.Thread(target=main)
    # 设置守护线程, 主线程结束后自动退出
    thread.daemon = True
    thread.start()
    detect()
