#!/usr/bin/env python
# -*- coding: utf-8 -*-

import numpy as np
import math
import time
from scipy.spatial.transform import Rotation as R
import scipy.constants as C

from multiprocessing import shared_memory
import cv2
import pyrealsense2 as rs
import apriltag

import rospy
from geometry2.tf2_ros.src import tf2_ros
from geometry_msgs.msg import TransformStamped
from cv_joint_angle.msg import Forearm
from mavros_msgs.msg import ExcavatorInclination

class TargetDetection:
    def __init__(self):

        # 姿态获取与校正参数
        self.point_search_range = int(rospy.get_param('~point_search_range', '4'))
        self.accel_calibrate_array_size = 0
        self.accel_calibrate_array = np.array([0.,0.,0.])
        self.yaw = 0
        self.yaw_cal = False
        
        # 调试参数
        self.debug = rospy.get_param('~debug', 'False') == 'True'

        # ROS话题相关参数
        rospy.Subscriber('/mavros/excavator_inclination', ExcavatorInclination, self.inclination_callback)
        self.tran_flag = False
        self.cam_broadcaster = tf2_ros.StaticTransformBroadcaster()
        self.color_cam_to_body_tf = TransformStamped()
        self.start_time = rospy.get_time()
        self.seq = 0
        self.prev_forearm = None
        # tf坐标变换话题
        self.conversion_buffer = tf2_ros.Buffer(rospy.Time())
        self.conversion_listener = tf2_ros.TransformListener(self.conversion_buffer)
        #发布小臂话题
        pub_topic = rospy.get_param('~pub_topic', '/cv_joint_angle/forearm_apriltag')
        self.angle_pub = rospy.Publisher(pub_topic, Forearm, queue_size=1)
        
        #相机参数
        self.K = np.array([[622.461673, 0., 225.527207], 
                           [  0., 620.817214, 418.324731],[  0., 0., 1.]], dtype=np.float64)
        self.inv_K = np.linalg.inv(self.K) 
        #相机图像pipeline配置
        pipeline = rs.pipeline()
        config = rs.config()
        config.enable_stream(rs.stream.depth, 848, 480, rs.format.z16, 15)
        config.enable_stream(rs.stream.color, 848, 480, rs.format.bgr8, 15)
        config.enable_stream(rs.stream.accel, rs.format.motion_xyz32f, 63)

        while True:
            try:
                    time.sleep(0.5)
                    pipeline.start(config)#等待直至相机连接
                    break
            except Exception as e:
                rospy.logwarn_throttle_identical(60,"Please connect camera.")
        rospy.loginfo_once("Successfully connected camera.")
        align_to = rs.stream.color
        align = rs.align(align_to)
        # 图像共享内存
        frames = pipeline.wait_for_frames()
        aligned_frames = align.process(frames)
        aligned_depth_frame = aligned_frames.get_depth_frame()
        color_frame = aligned_frames.get_color_frame()
        color_image = np.asanyarray(color_frame.get_data())
        self.color_image = cv2.rotate(color_image, cv2.ROTATE_90_CLOCKWISE)
        depth_image = np.asanyarray(aligned_depth_frame.get_data())
        self.depth_image = cv2.rotate(depth_image, cv2.ROTATE_90_CLOCKWISE)
        self.shm_color = shared_memory.SharedMemory(name='color_image', create=True, size=self.color_image.nbytes)
        self.shm_depth = shared_memory.SharedMemory(name='depth_image', create=True, size=self.depth_image.nbytes)

        while True:
            try:
                    time.sleep(0.5)
                    if(self.boom_deg): break
            except Exception as e:
                rospy.logwarn_throttle_identical(60,"Please recive inclination info.")

        # 小臂apriltag配置
        self.forearm_detector = apriltag.Detector(apriltag.DetectorOptions(
            families="tag36h11", refine_pose=True))#开启姿态解算优化后旋转矩阵默认旋转顺序XYZ

        # 主循环
        try:
            while not rospy.is_shutdown() :
                self.tran_flag = self.conversion_buffer.can_transform("camera_color_frame", "body", rospy.Time())
                try:
                    frames = pipeline.wait_for_frames()
                    aligned_frames = align.process(frames)
                    aligned_depth_frame = aligned_frames.get_depth_frame()
                    color_frame = aligned_frames.get_color_frame()
                    accel_frame = frames.first_or_default(rs.stream.accel)

                    if  aligned_depth_frame and color_frame:
                        rospy.loginfo_once("Get image!")
                        # 创建发布信息并赋值Header
                        self.forearm_angle = Forearm()
                        self.forearm_angle.header.stamp = rospy.Time.now()
                        self.seq += 1
                        self.forearm_angle.header.seq = self.seq
                        self.forearm_angle.header.frame_id = "camera_color_frame"

                        # 图像预处理到共享内存
                        color_image = np.asanyarray(color_frame.get_data())
                        self.color_image = cv2.rotate(color_image, cv2.ROTATE_90_CLOCKWISE)
                        depth_image = np.asanyarray(aligned_depth_frame.get_data())
                        self.depth_image = cv2.rotate(depth_image, cv2.ROTATE_90_CLOCKWISE)
                        color_array = np.ndarray(self.color_image.shape, dtype=self.color_image.dtype, buffer=self.shm_color.buf)
                        color_array[:] = self.color_image[:]
                        depth_array = np.ndarray(self.depth_image.shape, dtype=self.depth_image.dtype, buffer=self.shm_depth.buf)
                        depth_array[:] = self.depth_image[:]

                        # apriltag小臂角度推理
                        f_angle = self.forearm_predict()
                        f_angle -= self.boom_deg

                    #收集加速度计信息
                    if self.accel_calibrate_array_size < 80 and not self.tran_flag:
                        self._accecl_collect(accel_frame)
                    # 将校正后得到的相机外参用于关键点转换
                    if self.accel_calibrate_array_size >= 80  and not self.tran_flag and self.yaw_cal:
                        self._calibrate()

                    if self.tran_flag:
                        if f_angle:
                            self.forearm_angle.forearm = f_angle
                            if (self.prev_forearm):
                                if (abs(self.prev_forearm - f_angle < 6.5)):
                                    self.forearm_angle.forearm = self.prev_forearm + (f_angle - self.prev_forearm) * 0.001
                            self.prev_forearm = f_angle
                        else:
                            self.forearm_angle.forearm = 0.
                            rospy.logwarn_throttle_identical(5, "apriltag result wrong, don't publish")
                        self.forearm_angle.cylinder_forearm = self._calc_oil_cylinder_length(self.forearm_angle.forearm)
                        self.angle_pub.publish(self.forearm_angle)
                        rospy.loginfo_throttle_identical(60, "Node is running...")
                    
                except Exception as e:
                    rospy.logwarn_throttle_identical(60,\
                                                     "Unable to obtain camera image data, check if camera is working.")
        except KeyboardInterrupt:
            rospy.loginfo("Shutting down gracefully...")

        finally:
            self.shm_color.close()
            self.shm_color.unlink()
            self.shm_depth.close()
            self.shm_depth.unlink()
            pipeline.stop()

    def forearm_predict(self):
        '''
        该函数负责检测 Apriltag 并计算小臂的姿态角度。

        功能描述：
        1. 将彩色图像转换为灰度图，以便进行 Apriltag 检测。
        2. 检测 Apriltag，若在规定时间内没有检测到，则发出警告信息。
        3. 如果检测到 Apriltag，提取其中心和角点位置，并构造关键点数组。
        4. 通过深度信息计算关键点的三维坐标，并进行坐标转换。
        5. 在校正阶段，持续更新相机与机体的偏航角（yaw）差值，直到校正完成。
        6. 如果已经校正完成，计算小臂的横滚角（roll），并返回计算结果。

        注意：
        - 校正阶段会不断地更新相机和机体之间的偏航角，直到偏差小于某个阈值。
        - 如果校正完成后检测到姿态信息异常（如 Apriltag 数据错误），则不计算横滚角度。

        返回：
        - 如果成功计算了横滚角（forearm_roll），则返回该角度值。
        - 如果未能成功计算，则返回 None。

        异常处理：
        - 捕获相机数据获取过程中的异常并输出警告信息。
        '''
        try:
            # yuv_image = cv2.cvtColor(color_image, cv2.COLOR_RGB2YUV)
            # yuv_image[:, :, 0] = cv2.equalizeHist(yuv_image[:, :, 0])
            # gray = yuv_image[:, :, 0]
            gray = cv2.cvtColor(self.color_image, cv2.COLOR_BGR2GRAY)
            points = None
            tags = self.forearm_detector.detect(gray)
            if tags == [] and rospy.get_time()-self.start_time>5:
                    rospy.logwarn_throttle_identical(10, "No apriltag detected")
                    return None
            else:
                rospy.logdebug_throttle(10, "Have %d apriltag(s) detected", len(tags))
            for tag in tags:
                if tag.tag_family == b'tag36h11' and (tag.tag_id == 5 or tag.tag_id == 10):
                    if points is not None:
                        points[0, 0, :] = tag.center
                        points = np.vstack([points, 
                                                np.expand_dims(np.array([tag.corners[0], tag.corners[3]]), axis=0), 
                                                np.expand_dims(np.array([tag.corners[1], tag.corners[2]]), axis=0)])
                    else:
                        points = np.array([np.array([tag.center, tag.center]),
                                                np.array([tag.corners[0], tag.corners[3]]), 
                                                np.array([tag.corners[1], tag.corners[2]])])
            if points is None:
                return None
            points_coordinate = self._get_distance(points)
            points_coordinate = self._transform_kp(points_coordinate)
            if (points_coordinate[0, 0, :] == points_coordinate[0, 1, :]).all():
                points_coordinate = points_coordinate[1:, :, :]
            vector_diff = points_coordinate[:, 0, :] - points_coordinate[:, 1, :]
                    
            if not self.yaw_cal:
                rospy.loginfo_throttle_identical(20, "Yaw calibrating...")
                if np.any(vector_diff[:, 0] == 0):
                    rospy.logwarn_throttle_identical(10, "Apriltag is wrong")
                    return None
                yaw_list = np.degrees(np.arctan(vector_diff[:, 1] / vector_diff[:, 0]))
                yaw_angle =np.mean(np.sort(yaw_list))
                if abs(self.yaw - yaw_angle) < 0.1 :
                    self.yaw_cal = True
                    return None
                self.yaw = 0.8*yaw_angle + 0.2*self.yaw if self.yaw != 0 else yaw_angle
            else:
                if np.any(vector_diff[:, 2] == 0):
                    rospy.logwarn_throttle_identical(10, "Apriltag is wrong")
                    return None
                forearm_roll_list = np.degrees(np.arctan(vector_diff[:,0] / vector_diff[:, 2]))
                forearm_roll = np.mean(np.sort(forearm_roll_list))
                forearm_roll = forearm_roll-90  # 和IMU小臂角度对齐

                return forearm_roll
        except Exception as e:
            rospy.logwarn_throttle_identical(60, "Unable to obtain camera image data.")
        return None

    def _accecl_collect(self, accel_frame):
        '''
        收集加速度数据并更新校准数组。

        此方法接收一个加速度帧，从中提取加速度数据，并更新校准数组。
        如果校准数组尚未初始化（即大小为零），则直接将当前帧的加速度数据作为初始值。
        如果校准数组已经有值，则使用递归平均算法更新数组，以便平滑地集成新的加速度数据。

        @param accel_frame: 加速度帧，包含当前时刻的加速度传感器数据。
        '''
        if accel_frame:
            accel_data = accel_frame.as_motion_frame().get_motion_data()
            xyz = np.array([accel_data.x, accel_data.y, accel_data.z])
            if self.accel_calibrate_array_size > 0:
                # 使用递归算法更新calibrate_array
                self.accel_calibrate_array = self.accel_calibrate_array + \
                    (xyz - self.accel_calibrate_array) / self.accel_calibrate_array_size
                self.accel_calibrate_array_size += 1
            else:
                self.accel_calibrate_array = xyz
                self.accel_calibrate_array_size = 1

    def _transform_kp(self, kp): 
        '''
        通过(彩色相机->机体)旋转矩阵变换关键点坐标.

        @param kp: 包含关键点三维坐标的, 形状为 (n, m, 3).
                        'n' 代表不同种类的数量, 'm' 代表同一种类中关键点的数量.
        @type kp: np.array

        @return: 变换后的关键点坐标, 形状与输入相同.
        @rtype: np.array 或 None

        如果输入的 kp 不为空, 此函数将对每个关键点应用旋转变换, 
        并返回变换后的坐标. 如果输入为空, 则返回 None. 
        '''
        if kp is not None:
            transform_kp = np.zeros((kp.shape[0], kp.shape[1], 3), dtype=np.float64)
            for row in range(kp.shape[0]):
                for col in range(kp.shape[1]):
                    point = np.array([[kp[row, col, 0]], [kp[row, col, 1]], [1.]])
                    point = kp[row, col, 2] * np.dot(self.inv_K, point)
                    if self.tran_flag:
                        point = np.dot(np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0]], dtype=np.float64), point)
                        transform_kp[row, col] = np.dot(point.T, self.R_color_to_body).ravel()
                    else:
                        transform_kp[row, col] = np.dot(
                            np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0]], dtype=np.float64), point).ravel()
            # x,y,z 前 右 下
            return transform_kp
        return None
            
    def _get_distance(self, kp):
        '''
        获取关键点距离数值并拼接进输入

        @param kp: 包含关键点三维坐标, 形状为 (n, m, 2).
                        'n' 代表不同种类的数量, 'm' 代表同一种类中关键点的数量.
        @type kp: np.array

        @return: 获得距离的关键点坐标, 形状为 (n, m, 3).
        @rtype: np.array
        '''
        add_distance_array = np.zeros((kp.shape[0], kp.shape[1], 3), dtype=np.float64)
        for row in range(kp.shape[0]):
            for col in range(kp.shape[1]):
                mid_pos = kp[row, col]
                distance = self._get_point_distance(mid_pos.astype(int))
                add_distance_array[row, col] = np.concatenate((mid_pos, distance))
        return add_distance_array

    def _get_point_distance(self, pixel_coordinates):
        '''
        计算指定像素坐标处关键点的距离, 单位为厘米, 原始深度图像数据单位为毫米

        @param pixel_coordinates: 指定关键点的像素坐标
        @type pixel_coordinates: np.array

        @return: 计算得到的关键点距离
        @rtype: np.array

        此函数在深度图像中搜索指定像素坐标周围的非零距离值,
        并计算这些值的统计量(如四分位数)来估算关键点的距离
        如果搜索范围内没有非零值, 则返回默认距离
        '''
        distance_list = np.array([])
        for scope in range(self.point_search_range):
            if (pixel_coordinates[0] + scope < self.depth_image.shape[1] and 
                pixel_coordinates[1] + scope < self.depth_image.shape[0]):
                distance_list = self.depth_image[
                    pixel_coordinates[1]-scope:pixel_coordinates[1]+scope+1,
                    pixel_coordinates[0]-scope:pixel_coordinates[0]+scope+1]
                if np.sum(distance_list) != 0:
                    break
        distance_list = distance_list[distance_list != 0]
        if len(distance_list) >= 4:
            distance_list = np.quantile(distance_list, [0.25, 0.5, 0.75])
        elif len(distance_list) == 0:
            return np.array([10000.])
        return np.array([np.round(np.mean(distance_list) * 1000) / 10000])
        
    def _calibrate(self):
        '''
        获取RGBD相机的外参旋转矩阵。

        此函数计算从加速度计坐标系到相机坐标系的旋转矩阵，
        并根据加速度计数据校准相机的姿态。
        '''
        # 从四元数转换为欧拉角，表示加速度计到彩色相机坐标系的旋转
        accel_to_color_rotation = R.from_quat([
            0.00121952651534, -0.00375633803196, 
            -0.000925257743802, 0.999991774559
        ]).as_euler('XYZ', degrees=True)

        calibrated_accel = np.clip(self.accel_calibrate_array, -C.g, C.g) 
        # 计算当前姿态与正下方向的差值，右侧为正方向
        roll_angle = -90 - math.degrees(math.atan(
            calibrated_accel[0] / 
            (math.sqrt(calibrated_accel[1]**2 + calibrated_accel[2]**2) or float('inf'))))
        # 计算当前姿态与正前方向的差值，前侧为正方向
        pitch_angle = math.degrees(math.atan(
            calibrated_accel[1] / 
            (math.sqrt(calibrated_accel[0]**2 + calibrated_accel[2]**2) or float('inf'))))

        roll = roll_angle + accel_to_color_rotation[1]
        pitch = pitch_angle + accel_to_color_rotation[0]

        # 计算并发布(彩色相机->机体)旋转矩阵
        color_to_body_rotation = R.from_euler('XYZ', [pitch, roll, self.yaw], degrees=True)
        self.R_color_to_body = np.array(color_to_body_rotation.as_matrix(), dtype=np.float64)
        self.color_cam_to_body_tf.header.frame_id = "camera_color_frame"
        self.color_cam_to_body_tf.header.stamp = rospy.Time.now()
        self.color_cam_to_body_tf.child_frame_id = "body"
        self.color_cam_to_body_tf.transform.translation.x = 0.
        self.color_cam_to_body_tf.transform.translation.y = 0.
        self.color_cam_to_body_tf.transform.translation.z = 0.
        self.color_cam_to_body_tf.transform.rotation.x = color_to_body_rotation.as_quat()[0]
        self.color_cam_to_body_tf.transform.rotation.y = color_to_body_rotation.as_quat()[1]
        self.color_cam_to_body_tf.transform.rotation.z = color_to_body_rotation.as_quat()[2]
        self.color_cam_to_body_tf.transform.rotation.w = color_to_body_rotation.as_quat()[3]
        self.cam_broadcaster.sendTransform(self.color_cam_to_body_tf)

        rospy.loginfo(f"Calibration successful! \n pitch: {pitch}, roll: {roll}, yaw: {self.yaw}")

    def inclination_callback(self, msg):
        self.boom_deg =msg.boom_deg

    def _radians(self, degrees):
        return degrees * math.pi / 180.0

    def _wrap_to_pi(self, angle):
        angle = (angle + math.pi) % (2 * math.pi)
        if angle < 0:
            angle += 2 * math.pi
        return angle - math.pi

    def _calc_oil_cylinder_length( self, forearm_to_boom):
        deg_DFC = 33.0
        deg_QFG = 94.0
        deg_GFE = 51.0
        mm_DF = 236.8
        mm_EF = 63.6

        phi = math.pi - self._radians(deg_DFC) - self._radians(deg_QFG) - self._radians(deg_GFE) - self._radians(forearm_to_boom)
        phi = self._wrap_to_pi(phi)
        cylinder_forearm = math.sqrt(mm_DF**2 + mm_EF**2 - 2 * mm_EF * mm_DF * math.cos(phi))

        return cylinder_forearm


def main():
    debug = rospy.get_param('~debug', 'False') == 'True'
    if debug:
        rospy.init_node('forearm_apriltag', log_level=rospy.DEBUG, anonymous=True)
    else:
        rospy.init_node('forearm_apriltag', log_level=rospy.INFO, anonymous=True)
    cv_joint_angle = TargetDetection()
    rospy.spin()


if __name__ == "__main__":
    main()
