//
// Created by hzj on 24-2-28.
//
#include "multi_camera_cooperation/pnp_target_node_ros.h"

using namespace std;

PnPTargetNodeROS::PnPTargetNodeROS(){
    
}

void PnPTargetNodeROS::init(ros::NodeHandle &nh, tf::TransformBroadcaster* br00, tf::TransformBroadcaster* br01){

//============== Read ros parameter =====================//
    // std::string uav_config_file;
    int drone_id;
    br0 = br00;
    br1 = br01;

    nh.param<std::string>("cam", cam, "camA"); 
    nh.param<std::string>("servogroup", servogroup, "servogroup12");
    nh.param<int>("landmark_num", landmark_num, 4); 
    nh.param<string>("position", position, "platform");
    nh.param<string>("camera_config_file_path", camera_config_file_path, "");
    nh.param<string>("landmark_config_file_path", landmark_config_file_path, "");

//============== Read ros parameter =====================//

    // uav_config = make_shared<ConfigParser>(uav_config_file);
    // Update flip matrix
    flip(0, 0) = -1;
    flip(2, 2) = -1;

//============== Read camera intrinsic parameters =====================//
    LoadCameraConfig(camera_config_file_path + cam + ".yaml");
    LoadLandmarkConfig(landmark_config_file_path);
    
//============================= Initialize ROS topics =============================//
    sub_marker_pixel = nh.subscribe("/" + cam + "/single_cam_process_ros/ir_mono/marker_pixel", 1, &PnPTargetNodeROS::ir_marker_pixel_cb, this);
    // sub_drone_vio_pose = nh.subscribe("/vio", 1, &PnPTargetNodeROS::drone_vio_pose_cb, this);
    // sub_drone_vicon_pose = nh.subscribe("/mocap", 1, &PnPTargetNodeROS::drone_vicon_pose_cb, this);
    // sub_drone_imu = nh.subscribe("/imu", 1, &PnPTargetNodeROS::drone_imu_cb, this);
    sub_T_servogroup_to_cam = nh.subscribe("/T_" + servogroup + "_to_" + cam, 1, &PnPTargetNodeROS::servogroup_to_cam_cb, this);
    sub_T_base_to_servogroup = nh.subscribe("/T_base_to_" + servogroup, 1, &PnPTargetNodeROS::base_to_servogroup_cb, this); 
    sub_T_base_to_cam = nh.subscribe("/T_base_to_" + cam, 1, &PnPTargetNodeROS::base_to_cam_cb, this);
    // sub_T_base_to_cam = nh.subscribe("/T_base_to_" + cam + "_cal", 1, &PnPTargetNodeROS::base_to_cam_cb, this);
   
    uav_vicon_pos_sub = nh.subscribe("/csj01/csj01/mocap/pos", 1, &PnPTargetNodeROS::Vicon_uav_pos, this);
    // vicon_frame_sub = nh.subscribe("/zwbframe/zwbframe/mocap/pos", 1, &PnPTargetNodeROS::Vicon_pnp_test_frame, this);

    
    pub_cam_to_estimation = nh.advertise<geometry_msgs::TransformStamped>("/" + cam + "/single_cam_process_ros/ir_mono/T_cam_to_estimation", 1);
    pub_base_to_estimation = nh.advertise<geometry_msgs::TransformStamped>("/" + cam + "/single_cam_process_ros/ir_mono/T_base_to_estimation", 1);
    // pub_drone_vicon_pose = nh.advertise<geometry_msgs::PoseStamped>("vicon/pose_correct", 1);
    pub_opticalReadyFlag = nh.advertise<std_msgs::Bool>("/" + cam + "/single_cam_process_ros/ir_mono/opticalReadyFlag", 1);
    // pub_target_pose_from_img = nh.advertise<geometry_msgs::PoseStamped>("pnp_trt/topic_target_pose_from_img", 1);
    // pub_target_pose_from_img_filter = nh.advertise<geometry_msgs::PoseStamped>("pnp_trt/topic_target_pose_from_img_filter", 1);
    // pub_target_pose_in_base = nh.advertise<geometry_msgs::PoseStamped>("mulcam_pnp/topic_target_pose_in_body", 1);
    // pub_relative_pose_mocap =  nh.advertise<geometry_msgs::PoseStamped>("mulcam_pnp/relative_pose_cam2target_mocap", 1);

    // pub_drone_model = nh.advertise<visualization_msgs::MarkerArray>("/drone_model", 1);
}   


void PnPTargetNodeROS::toRotationMatrix(float x, float y,float z,float w,Eigen::Matrix3Xd &R){
    Eigen::Quaterniond q_tmp;
    q_tmp.x() = x;
    q_tmp.y() = y;
    q_tmp.z() = z;
    q_tmp.w() = w;
    R = q_tmp.toRotationMatrix();
    ROS_INFO("Rotation_matrix");
    std::cout<<R<<std::endl;
}

void PnPTargetNodeROS::Vicon_pnp_test_frame(const geometry_msgs::PoseStamped::ConstPtr &msg){
    frame_vicon_pos = uav_vicon_pos-Eigen::Vector3d((msg->pose.position.x), (msg->pose.position.y), (msg->pose.position.z));
    frame_vicon_pos(1) = frame_vicon_pos(1) -0.0596;
    frame_vicon_pos(0) = frame_vicon_pos(0) -0.006;
    frame_vicon_pos(2) = frame_vicon_pos(2) + 0.074;

    ROS_INFO("Vicon_message x%.3f,y%.3f,z%.3f",msg->pose.position.x,msg->pose.position.y,msg->pose.position.z);
    ROS_INFO("frame_vicon_pos:x%.3f, y%.3f, z%.3f", frame_vicon_pos(0), frame_vicon_pos(1), frame_vicon_pos(2));
    
    
    toRotationMatrix(msg->pose.orientation.x,msg->pose.orientation.y,msg->pose.orientation.z,msg->pose.orientation.w,world_to_cameraA_R);

    cameraA_to_world_R = world_to_cameraA_R.transpose();
    
}

void PnPTargetNodeROS::Vicon_uav_pos(const geometry_msgs::PoseStamped::ConstPtr &msg){
    uav_vicon_pos = Eigen::Vector3d(msg->pose.position.x, msg->pose.position.y, msg->pose.position.z);
    ROS_INFO("uav_vicon_pos:x%.3f, y%.3f, z%.3f", uav_vicon_pos(0), uav_vicon_pos(1), uav_vicon_pos(2));
}


//============================= Initialize ROS topic =============================//


void PnPTargetNodeROS::drone_vio_pose_cb(const geometry_msgs::PoseStamped::ConstPtr &msg){
    if(!drone_pose_vicon_first_flag){
        //在有vicon初值情况下，需要将VIO的起点加上vicon初值，作为飞机位置。
        drone_pose.pos << msg->pose.position.x, msg->pose.position.y, msg->pose.position.z;
        drone_pose.pos += drone_pose_vicon_init.pos;
        //姿态在VIO和Vicon坐标系下都是0
        drone_pose.Quat = Eigen::Quaterniond(msg->pose.orientation.w, msg->pose.orientation.x, msg->pose.orientation.y, msg->pose.orientation.z);
    }else{
        //在没有vicon值情况下，VIO起点就是自身摆放的起点。
        drone_pose.pos << msg->pose.position.x, msg->pose.position.y, msg->pose.position.z;
        drone_pose.Quat = Eigen::Quaterniond(msg->pose.orientation.w, msg->pose.orientation.x, msg->pose.orientation.y, msg->pose.orientation.z);
    }
  //    printf("drone_attitude.Quat = %f, %f, %f, %f\n", drone_attitude.Quat.w(), drone_attitude.Quat.x(), drone_attitude.Quat.y(), drone_attitude.Quat.z());
}

// void PnPTargetNodeROS::drone_vicon_pose_cb(const geometry_msgs::PoseStamped::ConstPtr &msg){
//     drone_pose_vicon.pos << msg->pose.position.x, msg->pose.position.y, msg->pose.position.z;
//     drone_pose_vicon.Quat = Eigen::Quaterniond(msg->pose.orientation.w, msg->pose.orientation.x, msg->pose.orientation.y, msg->pose.orientation.z);
//     drone_pose_vicon.Quat = Eigen::Quaterniond(drone_pose_vicon.Quat.toRotationMatrix() * uav_config->Vicon_correction);
// }

// void PnPTargetNodeROS::base_vicon_pose_cb(const geometry_msgs::PoseStamped::ConstPtr &msg){
//     base_pose_vicon.pos << msg->pose.position.x, msg->pose.position.y, msg->pose.position.z;
//     base_pose_vicon.Quat = Eigen::Quaterniond(msg->pose.orientation.w, msg->pose.orientation.x, msg->pose.orientation.y, msg->pose.orientation.z);
//     base_pose_vicon.Quat = Eigen::Quaterniond(base_pose_vicon.Quat.toRotationMatrix() * uav_config->Vicon_correction);
// }

void PnPTargetNodeROS::servogroup_to_cam_cb(const geometry_msgs::TransformStamped::ConstPtr &msg){
    stamp_servogroup_to_cam = msg->header.stamp;        
    auto q = msg->transform.rotation;
    auto t = msg->transform.translation;
    T_servogroup_to_cam.block<3, 3>(0, 0) = Eigen::Quaterniond(q.w, q.x, q.y, q.z).toRotationMatrix();
    T_servogroup_to_cam.block<3, 1>(0, 3) = Eigen::Vector3d(t.x, t.y, t.z);
}

void PnPTargetNodeROS::base_to_servogroup_cb(const geometry_msgs::TransformStamped::ConstPtr &msg){
    stamp_base_to_servogroup = msg->header.stamp;
    auto q = msg->transform.rotation;
    auto t = msg->transform.translation;
    T_base_to_servogroup.block<3, 3>(0, 0) = Eigen::Quaterniond(q.w, q.x, q.y, q.z).toRotationMatrix();
    T_base_to_servogroup.block<3, 1>(0, 3) = Eigen::Vector3d(t.x, t.y, t.z);
}

void PnPTargetNodeROS::base_to_cam_cb(const geometry_msgs::TransformStamped::ConstPtr &msg){
    stamp_base_to_cam = msg->header.stamp;
    auto q = msg->transform.rotation;
    auto t = msg->transform.translation;
    T_base_to_cam.block<3, 3>(0, 0) = Eigen::Quaterniond(q.w, q.x, q.y, q.z).toRotationMatrix();
    T_base_to_cam.block<3, 1>(0, 3) = Eigen::Vector3d(t.x, t.y, t.z);
    servoGoodcount = 0;

    std::cout << "T_base_to_cam: " << std::endl;
    std::cout << T_base_to_cam << std::endl;
}


// void PnPTargetNodeROS::drone_imu_cb(const sensor_msgs::Imu::ConstPtr &msg){
//     drone_attitude = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z);
//     // if(drone_attitude_init_flag){
//     //   drone_attitude_init = drone_attitude;
//     //   drone_attitude_init_flag = false;
//     //   //只需要把四元数的yaw方向的置为0即可，roll和pitch不需要置为0
//     //   Eigen::Vector3d euler_init = drone_attitude_init.toRotationMatrix().eulerAngles(2,1,0);
//     //   //只把yaw对应四元数取出来，作为yaw角偏置，然后乘到当前四元数上，就可以把当前四元数的yaw置为0
//     //   drone_attitude_init = Eigen::Quaterniond(cos(euler_init[0]/2), 0, 0, sin(euler_init[0]/2));
//     // }
//     // drone_attitude = drone_attitude_init.inverse() * drone_attitude;//只矫正yaw角

// //     ImuData message;
// //     message.timestamp = msg->header.stamp.toSec();
// //     message.q = drone_attitude;
// //     message.am << msg->linear_acceleration.x, msg->linear_acceleration.y, msg->linear_acceleration.z;

// //     filter->agent_self.feed_measurement_imu(message);
// }





//============================= marker_pixel receive =============================//
void PnPTargetNodeROS::ir_marker_pixel_cb(const std_msgs::Float64MultiArray::ConstPtr &msg){
    marker_pixels.clear();
    
    for(int i = 1; i < msg->data.size(); i+=2){
        cv::Point2f pixel;
        pixel.x = msg->data[i];
        pixel.y = msg->data[i+1];
        marker_pixels.push_back(pixel);
    }

    auto start = std::chrono::steady_clock::now();
    landmark_pose_solve();
    auto end = std::chrono::steady_clock::now();
    auto duration = std::chrono::duration_cast<std::chrono::microseconds>(end - start);
    std::cout << "[PnP Solve] landmark_pose_solve running time: " << duration.count() << " us" << std::endl;
    
}
//============================= marker_pixel receive =============================//


void PnPTargetNodeROS::landmark_pose_solve(){
    printf(BOLDREDPURPLE"[PnP Solve] Start to solve pnp pose...\n" RESET);
    
    //进行PnP解算
    pnpGoodFlag = pnp_process(marker_pixels, pnpExtrinsicGuessFlag);
    if(!pnpGoodFlag){
        marker_pixels.clear();
    }


    //marker在camera下的位姿
    #ifdef USE_IMU_DIFF
        T_image_to_markers.block<3, 3>(0, 0) = Eigen::Matrix3d::Identity(); // landmark在相机下的姿态暂定为Identity()
    #else
        T_image_to_markers.block<3, 3>(0, 0) = target_q_in_img.toRotationMatrix(); // 改成landmark在相机下的姿态
    #endif
        T_image_to_markers.block<3, 1>(0, 3) = target_pos_in_img;

    if(position == "platform"){
        //先从相机坐标系到图像坐标系，然后从图像坐标系到标记点坐标系，再由标记点坐标系到飞机坐标系
        T_base_to_estimation_temp = T_base_to_cam * T_camera_to_image * T_image_to_markers * T_IRLandmark_to_drone;
        if(servoGoodcount < 100){
            servoGoodcount++;
        }
        
        //csj0118
        // if(T_base_to_estimation_temp(2, 2) < 0.7){
        if(T_base_to_estimation_temp(2, 2) < -100){//csj0118
            pnpExtrinsicGuessFlag = false;
            cout << "T_base_to_estimation: " << endl;
            for(int i = 0; i < 4; i++){
                cout << T_base_to_estimation_temp.row(i) << endl;
            }
            Solving_multiple_problems();
            T_base_to_estimation_temp = T_base_to_cam * T_camera_to_image * T_image_to_markers * T_IRLandmark_to_drone;
            
            cout << "T_base_to_cam: " << endl;
            for(int i = 0; i < 4; i++){
                cout << T_base_to_estimation_temp.row(i) << endl;
            }
            // if(T_base_to_estimation_temp(2, 2) < 0.7){
            if(T_base_to_estimation_temp(2, 2) < -100){ //csj0118
                // cout << "T_base_to_estimation_temp(2, 2) < 0.7" << endl;
                cout << "T_base_to_estimation_temp(2, 2) < -100" << endl;
                pnpGoodFlag = false;
                marker_pixels.clear();
            }
            // pnpExtrinsicGuessFlag = true;
        }else{
            pnpExtrinsicGuessFlag = true;
        }

        T_cam_to_estimation = T_camera_to_image * T_image_to_markers * T_IRLandmark_to_drone;
        // T_body_to_drone.block<3,1>(0,3) = T_body_to_drone.block<3,1>(0,3) + T_markers_to_drone.block<3,1>(0,3); //marker在drone1坐标系下的位置
        R_cam_to_estimation = T_cam_to_estimation.block<3, 3>(0, 0);

        if((T_cam_to_estimation.block<3, 1>(0, 3)).norm() > 40.0) { //csj
            pnpGoodFlag = false;
            marker_pixels.clear();
        }

        T_base_to_estimation = T_base_to_cam * T_cam_to_estimation;
        R_base_to_estimation = T_base_to_estimation.block<3, 3>(0, 0);
    }else if(position == "uav"){
        //先从相机坐标系到图像坐标系，然后从图像坐标系到标记点坐标系，再由标记点坐标系到飞机坐标系
        T_cam_to_estimation = T_camera_to_image * T_image_to_markers * T_IRLandmark_to_drone;
        // T_body_to_drone.block<3,1>(0,3) = T_body_to_drone.block<3,1>(0,3) + T_markers_to_drone.block<3,1>(0,3); //marker在drone1坐标系下的位置
        R_cam_to_estimation = T_cam_to_estimation.block<3, 3>(0, 0);

        T_base_to_estimation = T_base_to_camera * T_cam_to_estimation;
        R_base_to_estimation = T_base_to_estimation.block<3, 3>(0, 0);
    }

    // 发布光流准备好的标志位
    msg_opticalReadyFlag.data = pnpGoodFlag;
    pub_opticalReadyFlag.publish(msg_opticalReadyFlag);

    t_cam_to_estimation = EigenVector3dToTFVector3(T_cam_to_estimation.block<3, 1>(0, 3));
    q_cam_to_estimation = EigenQuaterniondToTFQuaternion(Eigen::Quaterniond(R_cam_to_estimation));
    t_base_to_estimation = EigenVector3dToTFVector3(T_base_to_estimation.block<3, 1>(0, 3));
    q_base_to_estimation = EigenQuaterniondToTFQuaternion(Eigen::Quaterniond(R_base_to_estimation));    

    // 发布单台相机PNP估计结果T_cam_to_estimation
    if(pnpGoodFlag){
        printf(GREEN "[PnP Solve] t_cam_to_estimation = %.3f, %.3f, %.3f | q_cam_to_estimation (wxyz) = %.3f, %.3f, %.3f, %.3f\n" RESET,
            t_cam_to_estimation.x(), t_cam_to_estimation.y(), t_cam_to_estimation.z(),
            q_cam_to_estimation.getW(), q_cam_to_estimation.getX(), q_cam_to_estimation.getY(), q_cam_to_estimation.getZ());

        outputRvecRaw_buffer = outputRvecRaw;

        cam_to_estimation.setOrigin(t_cam_to_estimation);
        cam_to_estimation.setRotation(q_cam_to_estimation);
        cam_to_estimation.stamp_ = ros::Time::now();
        cam_to_estimation.frame_id_ = cam;
        cam_to_estimation.child_frame_id_ = "Estimationfrom"+cam;
        tf::transformStampedTFToMsg(cam_to_estimation, msg_T_cam_to_estimation);
        pub_cam_to_estimation.publish(msg_T_cam_to_estimation);
        br0->sendTransform(msg_T_cam_to_estimation);
    }

    // if(topic_time_check() && pnpGoodFlag){
    if(pnpGoodFlag && (servoGoodcount < 30)){
        base_to_estimation.setOrigin(t_base_to_estimation);
        base_to_estimation.setRotation(q_base_to_estimation);
        base_to_estimation.stamp_ = ros::Time::now();
        base_to_estimation.frame_id_ = "base";
        base_to_estimation.child_frame_id_ = cam + "_estimation";
        tf::transformStampedTFToMsg(base_to_estimation, msg_T_base_to_estimation);
        pub_base_to_estimation.publish(msg_T_base_to_estimation);
        // br1->sendTransform(msg_T_base_to_estimation);
    }


// //======================================= Ground Truth ============================================================//
//     Eigen::Vector3d t_body_to_drone_gt = body_pose_vicon.Quat.inverse() * (drone_pose_vicon.pos - body_pose_vicon.pos);//转换到body坐标系
// //  Eigen::Vector3d t_body_to_drone_gt = drone_neighbour_pose_vicon.pos - drone_pose_vicon.pos;//Vicon坐标系
//     Eigen::Quaterniond q_body_to_drone_gt = body_pose_vicon.Quat.inverse() * drone_pose_vicon.Quat;
// //  Eigen::Vector3d euler_gt = quaternion2euler(q_body_to_drone_gt.x(), q_body_to_drone_gt.y(), q_body_to_drone_gt.z(), q_body_to_drone_gt.w());
//   printf(GREEN "[GT] t_body_to_drone = %.3f, %.3f, %.3f | q_body_to_drone (wxyz) = %.3f, %.3f, %.3f, %.3f\n" RESET,
//          t_body_to_drone_gt[0], t_body_to_drone_gt[1], t_body_to_drone_gt[2],
//          q_body_to_drone_gt.w(), q_body_to_drone_gt.x(), q_body_to_drone_gt.y(), q_body_to_drone_gt.z());
//   geometry_msgs::PoseStamped msg_relative_pose;
//   msg_relative_pose.header.stamp = stamp;
//   msg_relative_pose.pose.position.x = t_body_to_drone_gt[0];
//   msg_relative_pose.pose.position.y = t_body_to_drone_gt[1];
//   msg_relative_pose.pose.position.z = t_body_to_drone_gt[2];
//   msg_relative_pose.pose.orientation.w = q_body_to_drone_gt.w();
//   msg_relative_pose.pose.orientation.x = q_body_to_drone_gt.x();
//   msg_relative_pose.pose.orientation.y = q_body_to_drone_gt.y();
//   msg_relative_pose.pose.orientation.z = q_body_to_drone_gt.z();
//   pub_relative_pose_mocap.publish(msg_relative_pose);
// //======================================= Ground Truth ============================================================//

}

bool PnPTargetNodeROS::Square_shape_identity(vector<cv::Point2f> &pointsVector){

    marker_pixels_sorted.clear();
    marker_pixels_up.clear();
    marker_pixels_down.clear();

    bool SquareShapeGoodFlag = true;
    
    if(pointsVector.size() != 4)
    {
        return false;
    }

    // we choose to sort the points by their y coordinate first, then by their x coordinate
    sort(pointsVector.begin(), pointsVector.end(), [](const cv::Point2f& pt1, const cv::Point2f& pt2) {
        return pt1.y < pt2.y;
    });

    std::vector<cv::Point2f> marker_pixels_up(pointsVector.begin(), pointsVector.begin() + 2);
    std::vector<cv::Point2f> marker_pixels_down(pointsVector.begin() + 2, pointsVector.end());

    sort(marker_pixels_up.begin(), marker_pixels_up.end(), [](const cv::Point2f& pt1, const cv::Point2f& pt2) {
        return pt1.x > pt2.x;
    });

    sort(marker_pixels_down.begin(), marker_pixels_down.end(), [](const cv::Point2f& pt1, const cv::Point2f& pt2) {
        return pt1.x > pt2.x;
    });



    marker_pixels_sorted.push_back(marker_pixels_up[0]);
    marker_pixels_sorted.push_back(marker_pixels_down[0]);
    marker_pixels_sorted.push_back(marker_pixels_down[1]); 
    marker_pixels_sorted.push_back(marker_pixels_up[1]);


    // we choose to sort the points by their x coordinate first, then by their y coordinate

    // sort(pointsVector.begin(), pointsVector.end(), [](const cv::Point2f& pt1, const cv::Point2f& pt2) {
    //     return pt1.x < pt2.x;
    // });

    // std::vector<cv::Point2f> marker_pixels_left(pointsVector.begin(), pointsVector.begin() + 1);
    // std::vector<cv::Point2f> marker_pixels_right(pointsVector.begin() + 2, pointsVector.end());

    // sort(marker_pixels_left.begin(), marker_pixels_left.end(), [](const cv::Point2f& pt1, const cv::Point2f& pt2) {
    //     return pt1.y < pt2.y;
    // });

    // sort(marker_pixels_right.begin(), marker_pixels_right.end(), [](const cv::Point2f& pt1, const cv::Point2f& pt2) {
    //     return pt1.y < pt2.y;
    // });

    // marker_pixels_sorted.push_back(marker_pixels_right[0]);
    // marker_pixels_sorted.push_back(marker_pixels_right[1]);
    // marker_pixels_sorted.push_back(marker_pixels_left[1]);
    // marker_pixels_sorted.push_back(marker_pixels_left[0]);


    // 清空原始向量，并用排序后的点填充它


    // // check whether the edegs are parallel
    // Eigen::Vector2d linkvector0 = subtractPoints(marker_pixels_sorted[0], marker_pixels_sorted[1]);
    // Eigen::Vector2d linkvector1 = subtractPoints(marker_pixels_sorted[1], marker_pixels_sorted[2]);
    // Eigen::Vector2d linkvector2 = subtractPoints(marker_pixels_sorted[2], marker_pixels_sorted[3]);
    // Eigen::Vector2d linkvector3 = subtractPoints(marker_pixels_sorted[3], marker_pixels_sorted[0]);
    // cout<<marker_pixels_sorted<<endl;
    // ROS_INFO("linkvector0:%f,%f",linkvector0[0],linkvector0[1]);
    // ROS_INFO("linkvector1:%f,%f",linkvector1[0],linkvector1[1]);
    // ROS_INFO("linkvector2:%f,%f",linkvector2[0],linkvector2[1]);
    // ROS_INFO("linkvector3:%f,%f",linkvector3[0],linkvector3[1]);
    // ROS_INFO("vectorAngle(linkvector0, linkvector2, 1):%f",vectorAngle(linkvector0, linkvector2, 1));
    // ROS_INFO("vectorAngle(linkvector1, linkvector3, 1):%f",vectorAngle(linkvector1, linkvector3, 1));
    // if((abs(vectorAngle(linkvector0, linkvector2, 1) - 180) > 20) || (abs(vectorAngle(linkvector1, linkvector3, 1) - 180) > 20)){
    //     return false;
    // }

    pointsVector.clear();
    pointsVector = marker_pixels_sorted;



    ROS_INFO("we are in Square_shape_identity!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!111");
    // cout<<pointsVector<<endl;

    // std::cout<<pointsVector<<std::endl;
    return true;
    

}

bool PnPTargetNodeROS::T_shape_identify(vector<cv::Point2f> &pointsVector){
    marker_pixels_sorted.clear();
    marker_pixels_up.clear();
    marker_pixels_down.clear();
    float slope1[3] = {0};
    bool TShapeGoodFlag = true;   

    for(int i = 0; i < 4; i++){
        Eigen::Vector2d linkvector0 = subtractPoints(pointsVector[i], pointsVector[(i + 1) % 4]);
        Eigen::Vector2d linkvector1 = subtractPoints(pointsVector[i], pointsVector[(i + 2) % 4]);
        Eigen::Vector2d linkvector2 = subtractPoints(pointsVector[i], pointsVector[(i + 3) % 4]);
        if((abs(vectorAngle(linkvector0, linkvector1, 1)) < 10) || (abs(vectorAngle(linkvector0, linkvector1, 1) - 180) < 10)){
            marker_pixels_up.emplace_back(pointsVector[i]);
        }else if((abs(vectorAngle(linkvector1, linkvector2, 1)) < 10) || (abs(vectorAngle(linkvector1, linkvector2, 1) - 180) < 10)){
            marker_pixels_up.emplace_back(pointsVector[i]);
        }else if((abs(vectorAngle(linkvector2, linkvector0, 1)) < 10) || (abs(vectorAngle(linkvector2, linkvector0, 1) - 180) < 10)){
            marker_pixels_up.emplace_back(pointsVector[i]);
        }else{
            marker_pixels_down.emplace_back(pointsVector[i]);
        }
    }

    if((marker_pixels_up.size() != 3) || (marker_pixels_down.size() != 1)){
        return false;
    }

    // cout<< "marker_pixels_up.size():"<<marker_pixels_up.size()<<endl;
    // cout<< "marker_pixels_down.size():"<<marker_pixels_down.size()<<endl;

    for(int i = 0; i < 3; i++){
        Eigen::Vector2d linkvector0 = subtractPoints(marker_pixels_up[i], marker_pixels_up[(i + 1) % 3]);
        Eigen::Vector2d linkvector1 = subtractPoints(marker_pixels_up[i], marker_pixels_up[(i + 2) % 3]);
        
        cout<<vectorAngle(linkvector0, linkvector1, 1) - 180<<endl;
        if(abs(vectorAngle(linkvector0, linkvector1, 1) - 180) < 30){
            marker_pixels_sorted.emplace_back(marker_pixels_up[i]);
            cout << marker_pixels_sorted << endl;
            Eigen::Vector2d linkvector =  subtractPoints(marker_pixels_up[i], marker_pixels_down[0]);
            if(checkRotationDirection(linkvector, linkvector0)){
                marker_pixels_sorted.emplace_back(marker_pixels_up[(i + 1) % 3]);
                marker_pixels_sorted.emplace_back(marker_pixels_up[(i + 2) % 3]);
                marker_pixels_sorted.emplace_back(marker_pixels_down[0]);
            }else{
                marker_pixels_sorted.emplace_back(marker_pixels_up[(i + 2) % 3]);
                marker_pixels_sorted.emplace_back(marker_pixels_up[(i + 1) % 3]);
                marker_pixels_sorted.emplace_back(marker_pixels_down[0]);
            }
        }
    }

    pointsVector.clear();
    pointsVector = marker_pixels_sorted;  

    if(pointsVector.size() != landmark_num)  
        TShapeGoodFlag = false;

    Eigen::Vector2d vec10 = subtractPoints(pointsVector[1], pointsVector[0]);
    Eigen::Vector2d vec20 = subtractPoints(pointsVector[2], pointsVector[0]);
    if(abs(vectorNorm2D(vec10) - vectorNorm2D(vec20)) > 30)
        TShapeGoodFlag = false;

    if(TShapeGoodFlag){
        marker_pixels_buffer.clear();
        marker_pixels_buffer =  pointsVector;
        return true;
    }else{
        cout<< "marker_pixels_sort.size():"<<marker_pixels_sorted.size()<<endl;
        pointsVector = marker_pixels_buffer;
        return false;
    }

    
}

bool PnPTargetNodeROS::pnp_process(vector<cv::Point2f> &pointsVector, bool useExtrinsicGuess){

    if(pointsVector.size() != landmark_num){
        cout << "pointsVector.size() != landmark_num" << endl;
        return false;
    }

    //solvePnP
    solvePnP(drone_landmarks_cv, pointsVector, cameraMatrix, distCoeffs, outputRvecRaw, outputTvecRaw, useExtrinsicGuess);

    Eigen::Vector3d eulerAngles;
    getEulerAngles(outputRvecRaw, eulerAngles, target_q_in_img);
    target_pos_in_img << outputTvecRaw.val[0], outputTvecRaw.val[1], outputTvecRaw.val[2];//转到相机坐标系下

    msg_target_pose_from_img.header.stamp = ros::Time::now();
    target_pos_in_img[0] = min(max(target_pos_in_img[0], -20.0), 20.0);
    target_pos_in_img[1] = min(max(target_pos_in_img[1], -10.0), 10.0);
    target_pos_in_img[2] = min(max(target_pos_in_img[2], -8.0), 8.0);

    if(target_pos_in_img[2] < 0){
        cout << "target_pos_in_img[2] < 0" << endl;
        return false;
    }

    return true;
}

void PnPTargetNodeROS::Solving_multiple_problems(){
    // Find new outputRvecRaw
    // T_image_to_markers.block<3, 1>(0, 3) = Eigen::Vector3d(0, 0, 0);
    // Eigen::Matrix4d Buffer_T = T_image_to_markers * flip;
    // Eigen::Matrix3d Buffer_R = Buffer_T.block<3, 3>(0, 0);
    // Eigen::Quaterniond Buffer_q = Eigen::Quaterniond(Buffer_R);
    // Eigen::Vector3d outputRvecRaw_copy = Eigen::Vector3d(0, 0, 0);
    // outputRvecRaw_copy = quaternion2euler(Buffer_q.x(), Buffer_q.y(), Buffer_q.z(), Buffer_q.w());
    // outputRvecRaw[0] = outputRvecRaw_copy[0];
    // outputRvecRaw[1] = outputRvecRaw_copy[1];
    // outputRvecRaw[2] = outputRvecRaw_copy[2];
    outputRvecRaw = outputRvecRaw_buffer;

    // Repeat the PnP process
    pnpGoodFlag = pnp_process(marker_pixels, true);

    if(!pnpGoodFlag){
        marker_pixels.clear();
    }
    #ifdef USE_IMU_DIFF
        T_image_to_markers.block<3, 3>(0, 0) = Eigen::Matrix3d::Identity(); // landmark在相机下的姿态暂定为Identity()
    #else
        T_image_to_markers.block<3, 3>(0, 0) = target_q_in_img.toRotationMatrix(); // 改成landmark在相机下的姿态
    #endif
        T_image_to_markers.block<3, 1>(0, 3) = target_pos_in_img;
}

bool PnPTargetNodeROS::topic_time_check(){
    if((ros::Time::now() - stamp_base_to_servogroup).toSec() > 1.0){
        printf(RED "current time: %.3f, stamp_base_to_servogroup: %.3f\n" RESET, ros::Time::now().toSec(), stamp_base_to_servogroup.toSec());
        return false;
    }else if((ros::Time::now() - stamp_servogroup_to_cam).toSec() > 1.0){
        printf(RED "current time: %.3f, stamp_servogroup_to_cam: %.3f\n" RESET, ros::Time::now().toSec(), stamp_servogroup_to_cam.toSec());
        return false;
    }else if(abs((stamp_base_to_servogroup - stamp_servogroup_to_cam).toSec()) > 1.0){
        printf(RED "stamp_base_to_servogroup: %.3f, stamp_servogroup_to_cam: %.3f\n" RESET, stamp_base_to_servogroup.toSec(), stamp_servogroup_to_cam.toSec());
        return false;
    }else{
        return true;
    }
}

void PnPTargetNodeROS::LoadCameraConfig(const std::string& config_path){
    std::ifstream fin(config_path);
    YAML::Node camera_config = YAML::Load(fin);
    
    auto IR_Camera = camera_config["Camera"];

    // camera intrinsic parameters
    double fx = IR_Camera["fx"].as<double>();
	double fy = IR_Camera["fy"].as<double>();
	double cx = IR_Camera["cx"].as<double>();
	double cy = IR_Camera["cy"].as<double>();
	cameraMatrix = (cv::Mat_<double>(3, 3) << fx, 0, cx, 0, fy, cy, 0, 0, 1);

    auto D = IR_Camera["D"].as<std::vector<double>>();
    for(int i = 0; i < D.size(); i++){
        distCoeffs.emplace_back(D[i]);
    }

    for(int i = 0; i < 4; i++){
        for(int j = 0; j < 4; j++){
            T_camera_to_image(i, j) = IR_Camera["T_cam_image"][i][j].as<double>();
        }
    }
    
    for(int i = 0; i < 4; i++){
        for(int j = 0; j < 4; j++){
            T_base_to_camera(i, j) = IR_Camera["T_base_to_cam"][i][j].as<double>();
        }
    }
}

void PnPTargetNodeROS::LoadLandmarkConfig(const std::string& config_path){
    std::ifstream fin(config_path);
    YAML::Node landmark_config = YAML::Load(fin);

    auto IR_landmark = landmark_config["Landmark"];

    for(int i = 0; i < IR_landmark["number"].as<int>(); i++){
        drone_landmarks_cv.emplace_back(cv::Point3f(IR_landmark["layout"][i][0].as<float>() / 1000, IR_landmark["layout"][i][1].as<float>() / 1000, IR_landmark["layout"][i][2].as<float>() / 1000));
    }

    Eigen::Matrix4d T_uav_to_landmark = Eigen::Matrix4d::Identity();
    for(int i = 0; i < 4; i++){
        for(int j = 0; j < 4; j++){
            T_uav_to_landmark(i, j) = IR_landmark["T_Body_Landmark"][i][j].as<double>();
        }
    }
    T_IRLandmark_to_drone = T_uav_to_landmark.inverse();

}

// Wifi:34206412