//
// Created by hzj on 24-8-16.
//
#include "multi_camera_cooperation/icp_target_node_ros.h"

using namespace std;

ICPTargetNodeROS::ICPTargetNodeROS(){
    
}

void ICPTargetNodeROS::init(ros::NodeHandle &nh){
//=============== Read camera attribute ==================//
    nh.param<vector<string>>("cams", cams, {"camA"});
    nh.param<vector<string>>("servogroups", servogroups, {"servogroup12"});
    nh.param<string>("pub_topic", pub_topic, "/ICPCoopEstimation");
    nh.param<string>("config_file_path", config_file_path, "");
    nh.param<string>("landmark_config_file_path", landmark_config_file_path, "");

//=============== Read ROS parameter =====================//
    // std::string uav_config_file;
    // uav_config = make_shared<ConfigParser>(uav_config_file);

    for(int i = 0; i < cams.size(); i++){
        //------------------ Initialize image ------------------//
        Images.push_back(cv::Mat::zeros(640, 480, CV_8UC3));
        //------------------ Initialize marker pixel ------------------//
        marker_pixels.push_back(vector<cv::Point2f>());
        isMarkerReady.push_back(false);
        isTServogroup2CamReady.push_back(false);
        isTBase2ServogroupReady.push_back(false);
        isTBase2CamReady.push_back(false);
        //------------------ Initialize camera intrinsic parameters ------------------//
        camera_config_path.push_back(config_file_path + cams[i] + ".yaml");
        cameraMatrixs.push_back(cv::Mat::zeros(3, 3, CV_64F));  
        distCoeffs.push_back({0, 0, 0, 0, 0});
        LoadCameraConfig(camera_config_path[i], i);
        T_servogroup_to_cam.push_back(Eigen::Matrix4d::Identity());
        T_base_to_servogroup.push_back(Eigen::Matrix4d::Identity());
        T_base_to_cam.push_back(Eigen::Matrix4d::Identity());
        //------------------ ROS subscriber ------------------//
        // sub_marker_pixel.push_back(nh.subscribe<multi_camera_cooperation::landmark>("/" + cams[i] + "/single_cam_process_ros/ir_mono/marker_pixel_raw", 1, boost::bind(&ICPTargetNodeROS::marker_pixel_cb, this, _1, i)));
        sub_marker_pixel.push_back(nh.subscribe<std_msgs::Float64MultiArray>("/" + cams[i] + "/single_cam_process_ros/ir_mono/marker_pixel", 1, boost::bind(&ICPTargetNodeROS::marker_pixel_cb, this, _1, i)));
        sub_T_servogroup_to_cam.push_back(nh.subscribe<geometry_msgs::TransformStamped>("/T_" + servogroups[i] + "_to_" + cams[i], 1, boost::bind(&ICPTargetNodeROS::T_servogroup_to_cam_cb, this, _1, i)));
        sub_T_base_to_servogroup.push_back(nh.subscribe<geometry_msgs::TransformStamped>("/T_base_to_" + servogroups[i], 1, boost::bind(&ICPTargetNodeROS::T_base_to_servogroup_cb, this, _1, i)));
        sub_T_base_to_cam.push_back(nh.subscribe<geometry_msgs::TransformStamped>("/T_base_to_" + cams[i], 1, boost::bind(&ICPTargetNodeROS::T_base_to_cam_cb, this, _1, i)));
        sub_showimage_cb.push_back(nh.subscribe<sensor_msgs::Image>("/" + cams[i] + "/single_cam_process_ros/ir_mono/origin", 1, boost::bind(&ICPTargetNodeROS::ShowImage_cb, this, _1, i)));
        pub_epipolar_geometry.push_back(nh.advertise<sensor_msgs::Image>("/" + cams[i] + "/single_cam_process_ros/ir_mono/epipolar_geometry", 1));
    }
    timer = nh.createWallTimer(ros::WallDuration(0.03), boost::bind(&ICPTargetNodeROS::solution_start_callback, this, _1));
    pub_target_pose = nh.advertise<geometry_msgs::TransformStamped>(pub_topic, 1);
    pub_rough_pose = nh.advertise<geometry_msgs::TransformStamped>("/ICPCoopRoughEstimation", 1);
    // tf_base2estimate = make_shared<tf2_ros::TransformBroadcaster>();

    T_cam_to_image << 0, 0, 1, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 1;
    cv_ptr_compressed = boost::make_shared<cv_bridge::CvImage>();

//=============== Read landmark parameter =====================//
    // landmark_config_path = config_file_path + "landmark.yaml";
    LoadLandmarkConfig(landmark_config_file_path);

    ROS_INFO("ICP target node initialized, waiting for landmark pixel and transform message...");

}

void ICPTargetNodeROS::solution_start_callback(const ros::WallTimerEvent&){
    // ROS_INFO("SOLUTION START !");

    int isMarkerReady_count = 0;
    for(int i = 0; i < isMarkerReady.size(); i++){
        if(isMarkerReady[i] == true){
            isMarkerReady_count++;
        }
    }

    if(isMarkerReady_count > 1 && LaunchFlag == true){
        // std::cout << "isMarkerReady_count: " << isMarkerReady_count << std::endl;
        landmark_pose_solve();
        LaunchFlag = false;
    }else{
        // std::cout << "isMarkerReady_count: " << isMarkerReady_count << std::endl;
    }
}

void ICPTargetNodeROS::ShowImage_cb(const sensor_msgs::Image::ConstPtr &msg, int i){
    cv_ptr_compressed = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    Images[i] = cv_ptr_compressed->image;
}

void ICPTargetNodeROS::marker_pixel_cb(const std_msgs::Float64MultiArray::ConstPtr &msg, int i){
    marker_pixels[i].clear();
    
    for(int i = 1; i < msg->data.size(); i+=2){
        cv::Point2f pixel;
        pixel.x = msg->data[i];
        pixel.y = msg->data[i+1];
        marker_pixels[i].push_back(pixel);
    }

    // std::cout << "marker_pixels[" << i << "]: " << marker_pixels[i].size() << std::endl;

    if((isTBase2CamReady[i] == true)){
        isMarkerReady[i] = true;
        LaunchFlag = true;
    }
}

void ICPTargetNodeROS::T_servogroup_to_cam_cb(const geometry_msgs::TransformStamped::ConstPtr &msg, int i){
    auto q = msg->transform.rotation;
    auto t = msg->transform.translation;
    T_servogroup_to_cam[i].block<3, 3>(0, 0) = Eigen::Quaterniond(q.w, q.x, q.y, q.z).toRotationMatrix();
    T_servogroup_to_cam[i].block<3, 1>(0, 3) = Eigen::Vector3d(t.x, t.y, t.z);
    
    isTServogroup2CamReady[i] = true;
}

void ICPTargetNodeROS::T_base_to_servogroup_cb(const geometry_msgs::TransformStamped::ConstPtr &msg, int i){
    auto q = msg->transform.rotation;
    auto t = msg->transform.translation;
    T_base_to_servogroup[i].block<3, 3>(0, 0) = Eigen::Quaterniond(q.w, q.x, q.y, q.z).toRotationMatrix();
    T_base_to_servogroup[i].block<3, 1>(0, 3) = Eigen::Vector3d(t.x, t.y, t.z);
    
    isTBase2ServogroupReady[i] = true;
}

void ICPTargetNodeROS::T_base_to_cam_cb(const geometry_msgs::TransformStamped::ConstPtr &msg, int i){
    auto q = msg->transform.rotation;
    auto t = msg->transform.translation;
    T_base_to_cam[i].block<3, 3>(0, 0) = Eigen::Quaterniond(q.w, q.x, q.y, q.z).toRotationMatrix();
    T_base_to_cam[i].block<3, 1>(0, 3) = Eigen::Vector3d(t.x, t.y, t.z);
    
    isTBase2CamReady[i] = true;
}

bool ICPTargetNodeROS::triangulation(cv::Mat &Transform1, cv::Mat &Transform2, vector<cv::Point2f> &pointsvectors1, vector<cv::Point2f> &pointsvectors2, vector<cv::Point3f> &points3D){
    if(pointsvectors1.size() != pointsvectors2.size()){
        return false;
    }

    cv::Mat pts_4d;  //保存计算得到的三维点结果
    cv::triangulatePoints(Transform1, Transform2, pointsvectors1, pointsvectors2, pts_4d);
    for(int i = 0; i < pts_4d.cols; i++){
        cv::Mat x = pts_4d.col(i).clone();  //四维
        x = x / x.at<float>(3,0);
        cv::Point3f p(x.at<float>(0,0), x.at<float>(1,0), x.at<float>(2,0));  //归一化的三维点
        points3D.push_back(p);
    }
    return true;
}

void ICPTargetNodeROS::landmark_pose_solve(){
    printf(BOLDREDPURPLE "[Landmark_Pose_Solve] Landmark_Pose_Solve START!\n" RESET);

    auto start = std::chrono::steady_clock::now();
    triangulationGoodFlag = triangulation_process();
    auto end = std::chrono::steady_clock::now();
    auto diff = end - start;
    printf(BOLDCYAN "[Triangulation Process] Triangulation Process running time: %.3f ms\n" RESET, chrono::duration<double, milli>(diff).count());

    if(triangulationGoodFlag){
        // 发布粗估计位姿
        msg_T_base_to_estimation.header.stamp = ros::Time::now();
        msg_T_base_to_estimation.header.frame_id = "base";
        msg_T_base_to_estimation.child_frame_id = "rough_estimation";
        msg_T_base_to_estimation.transform.translation.x = merged_RoughPoints.x();
        msg_T_base_to_estimation.transform.translation.y = merged_RoughPoints.y();
        msg_T_base_to_estimation.transform.translation.z = merged_RoughPoints.z();
        msg_T_base_to_estimation.transform.rotation.x = 0.0;
        msg_T_base_to_estimation.transform.rotation.y = 0.0;
        msg_T_base_to_estimation.transform.rotation.z = 0.0;
        msg_T_base_to_estimation.transform.rotation.w = 1.0;
        pub_rough_pose.publish(msg_T_base_to_estimation);

        // RANSAC Registration
        ransac_process();
    
        if(publish_flag){
            // 计算目标位姿
            Eigen::Vector3d t_base_to_estimation_eigen(T_base_to_estimation.block<3, 1>(0, 3));
            Eigen::Quaterniond q_base_to_estimation_eigen(T_base_to_estimation.block<3, 3>(0, 0));

            // 发布目标位姿
            t_base_to_estimation = tf2::Vector3(t_base_to_estimation_eigen.x(), t_base_to_estimation_eigen.y(), t_base_to_estimation_eigen.z());
            q_base_to_estimation = tf2::Quaternion(q_base_to_estimation_eigen.x(), q_base_to_estimation_eigen.y(), q_base_to_estimation_eigen.z(), q_base_to_estimation_eigen.w());
            printf(GREEN "[ICP Target Solve] t_base_to_estimation = %.3f, %.3f, %.3f | q_base_to_estimation (wxyz) = %.3f, %.3f, %.3f, %.3f\n" RESET,
                    t_base_to_estimation.x(), t_base_to_estimation.y(), t_base_to_estimation.z(),
                    q_base_to_estimation.getW(), q_base_to_estimation.getX(), q_base_to_estimation.getY(), q_base_to_estimation.getZ()); 

            msg_T_base_to_estimation.header.stamp = ros::Time::now();
            msg_T_base_to_estimation.header.frame_id = "base";
            msg_T_base_to_estimation.child_frame_id = "estimation";
            msg_T_base_to_estimation.transform.translation.x = t_base_to_estimation.x();
            msg_T_base_to_estimation.transform.translation.y = t_base_to_estimation.y();
            msg_T_base_to_estimation.transform.translation.z = t_base_to_estimation.z();
            msg_T_base_to_estimation.transform.rotation.x = q_base_to_estimation.getX();
            msg_T_base_to_estimation.transform.rotation.y = q_base_to_estimation.getY();
            msg_T_base_to_estimation.transform.rotation.z = q_base_to_estimation.getZ();
            msg_T_base_to_estimation.transform.rotation.w = q_base_to_estimation.getW();

            pub_target_pose.publish(msg_T_base_to_estimation);
            // tf_base2estimate->sendTransform(msg_T_base_to_estimation);

            publish_flag = false;
        }

        
    }
    std::cout << std::endl;
}

cv::Mat ICPTargetNodeROS::Transform_Calculate(Eigen::Matrix4d &T_base2cam, cv::Mat &cameraMatrix){
    Eigen::Matrix4d T_base2image = T_base2cam * T_cam_to_image;

    T_base2image = T_base2image.inverse();
    cv::Mat Transform = cv::Mat::eye(3, 4, CV_64F);
    for(int i = 0; i < 3; i++){
        for(int j = 0; j < 4; j++){
            Transform.at<double>(i, j) = T_base2image(i, j);
        }
    }
    
    return cameraMatrix * Transform;
    // return Transform;
}

vector<cv::Point3f> ICPTargetNodeROS::mergePoints(vector<cv::Point3f>& points3D, float threshold){
    std::vector<cv::Point3f> points3D_merged_temp;
    std::vector<bool> isMerged(points3D.size(), false);

    for (int i = 0; i < points3D.size(); i++) {
        if (!isMerged[i]) {
            cv::Point3f avgPoint(0, 0, 0);
            float count = 0;

            // 寻找并合并相似的点
            for (int j = i; j < points3D.size(); j++) {
                if (cv::norm(points3D[j] - points3D[i]) < threshold) {
                    avgPoint += points3D[j];
                    count++;
                    isMerged[j] = true;
                }
            }

            // 计算平均点
            if (count > 0) {
                avgPoint.x /= count;
                avgPoint.y /= count;
                avgPoint.z /= count;
                points3D_merged_temp.push_back(avgPoint);
            }
        }
    }
    return points3D_merged_temp;
}

void ICPTargetNodeROS::LoadLandmarkConfig(const std::string& config_path)
{
    ROS_INFO("Load Landmark Config: %s", config_path.c_str());

    std::ifstream fin(config_path);
    YAML::Node landmark_config = YAML::Load(fin);
    
    auto IR_landmark = landmark_config["Landmark"];

    targetPoints3D.clear();
    for(int i = 0; i < IR_landmark["number"].as<int>(); i++){
        targetPoints3D.emplace_back(cv::Point3f(IR_landmark["layout"][i][0].as<float>() / 1000, IR_landmark["layout"][i][1].as<float>() / 1000, IR_landmark["layout"][i][2].as<float>() / 1000));
    }

    Eigen::Matrix4d T_uav_to_landmark = Eigen::Matrix4d::Identity();
    for(int i = 0; i < 4; i++){
        for(int j = 0; j < 4; j++){
            T_uav_to_landmark(i, j) = IR_landmark["T_Body_Landmark"][i][j].as<double>();
        }
    }
    T_landmark_to_uav = T_uav_to_landmark.inverse();
}

void ICPTargetNodeROS::LoadCameraConfig(const std::string& config_path, int i)
{
    ROS_INFO("Load Camera Config: %s", config_path.c_str());

    std::ifstream fin(config_path);
    YAML::Node camera_config = YAML::Load(fin);
    
    auto IR_Camera = camera_config["Camera"];

    // camera intrinsic parameters
    double fx = IR_Camera["fx"].as<double>();
	double fy = IR_Camera["fy"].as<double>();
	double cx = IR_Camera["cx"].as<double>();
	double cy = IR_Camera["cy"].as<double>();
	cameraMatrixs[i] = (cv::Mat_<double>(3, 3) << fx / 1000, 0, cx / 1000, 0, fy / 1000, cy / 1000, 0, 0, 1);

    auto D = IR_Camera["D"].as<std::vector<double>>();
	distCoeffs[i] = cv::Mat(D).clone();
}

bool ICPTargetNodeROS::triangulation_process(){
    printf(BOLDCYAN "[Triangulation Process] Triangulation Process Start.\n" RESET);
    // 三角化得到3D点
    double minDistance = 1000;
    double distance = 0;
    bool triangulation_GoodFlag = false;
    points3D.clear();
    points3D_merged.clear();
    RoughPoints.setZero();

    for(int i = 0; i < cams.size(); i++){
    // for(int i = 0; i < cams.size() - 1; i++){
        int j = (i + 1) % cams.size();

        // // 补丁
        // if(i == 1 || i == 3){
        //     continue;
        // }

        if(marker_pixels[i].size() == 0 || marker_pixels[j].size() == 0){
            std::cout << "No marker pixel detected in camera " << i << " or " << j << std::endl;
            continue;
        }

        vector<cv::Point3f> points3D_temp;
        vector<cv::Point3f> points3D_try;
        cv::Mat Transform1 = Transform_Calculate(T_base_to_cam[i], cameraMatrixs[i]);
        cv::Mat Transform2 = Transform_Calculate(T_base_to_cam[j], cameraMatrixs[j]);
        vector<cv::Point2f> pointsvectors1;
        vector<cv::Point2f> pointsvectors2;
        bool feature_match_Goodflag = true;
        // feature_match_Goodflag = feature_match(i, j, pointsvectors1, pointsvectors2);
        pointsvectors1 = marker_pixels[i];
        pointsvectors2 = marker_pixels[j];

        // draw matched points
        for(int k = 0; k < pointsvectors1.size(); k++){
            cv::circle(Images[i], cv::Point2i((pointsvectors1[k].x) * 1000, (pointsvectors1[k].y) * 1000), 4,  cv::Scalar(255,0,0), 1, cv::LINE_AA);
            cv::putText(Images[i], std::to_string(k), cv::Point2i((pointsvectors1[k].x) * 1000, (pointsvectors1[k].y) * 1000), cv::FONT_HERSHEY_TRIPLEX, 0.65, cv::Scalar(0,255,0), 1, false);
            cv::circle(Images[j], cv::Point2i((pointsvectors2[k].x) * 1000, (pointsvectors2[k].y) * 1000), 4,  cv::Scalar(255,0,0), 1, cv::LINE_AA);
            cv::putText(Images[j], std::to_string(k), cv::Point2i((pointsvectors2[k].x) * 1000, (pointsvectors2[k].y) * 1000), cv::FONT_HERSHEY_TRIPLEX, 0.65, cv::Scalar(0,255,0), 1, false);
        }
        pub_epipolar_geometry[i].publish(cv_bridge::CvImage(std_msgs::Header(), "bgr8", Images[i]).toImageMsg());
        pub_epipolar_geometry[j].publish(cv_bridge::CvImage(std_msgs::Header(), "bgr8", Images[j]).toImageMsg());

        if(feature_match_Goodflag){
            // 三角化
            triangulation_GoodFlag = triangulation(Transform1, Transform2, pointsvectors1, pointsvectors2, points3D_try);
            
            if(triangulation_GoodFlag){
                // distance = 0;
                // for(int k = 0; k < points3D_try.size(); k++){
                //     for(int l = k + 1; l < points3D_try.size(); l++){
                //         distance += distanceBetweenPoints(points3D_try[k], points3D_try[l]);
                //     }
                // }
                // if(distance < minDistance){
                //     minDistance = distance;
                //     points3D_temp = points3D_try;
                // }

                // points3D.insert(points3D.end(), points3D_temp.begin(), points3D_temp.end());
                points3D.insert(points3D.end(), points3D_try.begin(), points3D_try.end());
            }
        }
    }

    // 合并相似的3D点
    points3D_merged = mergePoints(points3D, mergePointsThreshold);
    // points3D_merged = points3D;
    
    // 计算3D点的重心
    for(int i = 0; i < points3D_merged.size(); i++){
        RoughPoints += Eigen::Vector3d(points3D_merged[i].x, points3D_merged[i].y, points3D_merged[i].z);
        cout << "points3D_merged[" << i << "]: " << points3D_merged[i] << endl;
    }
    RoughPoints /= points3D_merged.size();
    window_filter(RoughPoints, RoughPointsBuffer, win_size);

    merged_RoughPoints = tf2::Vector3(RoughPoints.x(), RoughPoints.y(), RoughPoints.z());

    return true;
}

void ICPTargetNodeROS::ransac_process(){
    printf(BOLDCYAN "[RANSAC_Process] RANSAC Process Start.\n" RESET);

    // Initialize
    distance_threshold = 0.1;
    ransac_inliers.resize(targetPoints3D.size()); 
    Good_Matched_Points.clear(); 
    Good_Matched_Inliers.clear();  
    Final_Matched_Points.clear();
    Final_Matched_Inliers.clear();
    for (int i = 0; i < targetPoints3D.size(); i++){
        Final_Matched_Points.emplace_back(cv::Point3f(0, 0, 0));
        Final_Matched_Inliers.emplace_back(-1);
    }


    for(int i = 0; i < points3D_merged.size(); i++){
        for(int j = i + 1; j < points3D_merged.size(); j++){
            for(int k = j + 1; k < points3D_merged.size(); k++){
                vector<cv::Point3f> target_point_vector_temp = {points3D_merged[i], points3D_merged[j], points3D_merged[k]};
                bool RANSAC_GoodFlag = RANSAC_Registration(targetPoints3D, points3D_merged, target_point_vector_temp, ransac_inliers, distance_threshold);
                if(RANSAC_GoodFlag){
                    Good_Matched_Points.emplace_back(target_point_vector_temp);
                    Good_Matched_Inliers.emplace_back(ransac_inliers);
                }
            }
        }
    }

    cout << "Good_Matched_Points.size(): " << Good_Matched_Points.size() << endl;

    // step1: 根据得到的三点匹配的结果，通过最小二乘法求解变换矩阵
    // step2: 遍历所有的匹配结果，计算每个匹配结果的误差，选择最优解
    Eigen::Matrix4d T_base_to_estimation_best = Eigen::Matrix4d::Identity();
    double min_cost = numeric_limits<double>::max();
    for(int i = 0; i < Good_Matched_Points.size(); i++){
        vector<cv::Point3f> src_points = {};
        vector<cv::Point3f> tgt_points = {};
        vector<cv::Point3f> Matched_points_temp = Good_Matched_Points[i];
        vector<int> Matched_inliers_temp = Good_Matched_Inliers[i];

        for(int j = 0; j < Good_Matched_Points[i].size(); j++){
            if(Good_Matched_Inliers[i][j] == 1){
                src_points.emplace_back(Good_Matched_Points[i][j]);
                tgt_points.emplace_back(targetPoints3D[j]);
            }
        }

        cout << "src_points[" << i << "]: " << endl;
        for(int j = 0; j < src_points.size(); j++){
            cout << src_points[j] << endl;
        }
        cout << "tgt_points[" << i << "]: " << endl;
        for(int j = 0; j < tgt_points.size(); j++){
            cout << tgt_points[j] << endl;
        }

        cv::Mat transform;
        transform = Get_3DTransform_Matrix(src_points, tgt_points);

        for(int j = 0; j < 4; j++){
            for(int k = 0; k < 4; k++){
                T_base_to_estimation_best(j, k) = transform.at<double>(j, k);
            }
        } 

        cout << "T_base_to_estimation_best: " << endl;
        for(int j = 0; j < 4; j++){
            for(int k = 0; k < 4; k++){
                cout << T_base_to_estimation_best(j, k) << " ";
            }
            cout << endl;
        }
        cout << endl;

        // 遍历所有的匹配结果，校验每个匹配结果，选择最优解
        double cost = 0;
        int inliers_count = 0;
        for (int j = 0; j < targetPoints3D.size(); j++){
            Eigen::Vector4d target_point(targetPoints3D[j].x, targetPoints3D[j].y, targetPoints3D[j].z, 1);
            Eigen::Vector4d transformed_point = T_base_to_estimation_best.inverse() * target_point;
            
            double min_error = numeric_limits<double>::max();
            for (int k = 0; k < points3D_merged.size(); k++){
                double error = (transformed_point - Eigen::Vector4d(points3D_merged[k].x, points3D_merged[k].y, points3D_merged[k].z, 1)).norm();
                if(error < distance_threshold && error < min_error){
                    min_error = error;
                    Matched_points_temp[j] = points3D_merged[k];
                    Matched_inliers_temp[j] = 1;
                }
            }

            if(Matched_inliers_temp[j] == 1){
                cost += min_error;
                inliers_count++;
            }
        }

        cost = cost / inliers_count - inliers_count;
        if(cost < min_cost && cost < (numeric_limits<double>::max() - 1)){
            min_cost = cost;
            Final_Matched_Points = Matched_points_temp;
            Final_Matched_Inliers = Matched_inliers_temp;
        }
    }

    if(Good_Matched_Points.size() > 0){
        // print ransac result
        for(int i = 0; i < Final_Matched_Points.size(); i++){
            printf(GREEN "[RANSAC] Final_Matched_Points[%d] = %.3f, %.3f, %.3f\n" RESET, i, Final_Matched_Points[i].x, Final_Matched_Points[i].y, Final_Matched_Points[i].z);
        }
        
        vector<cv::Point3f> src_points = {};
        vector<cv::Point3f> tgt_points = {};
        for(int j = 0; j < Final_Matched_Points.size(); j++){
            if(Final_Matched_Inliers[j] == 1){
                src_points.emplace_back(Final_Matched_Points[j]);
                tgt_points.emplace_back(targetPoints3D[j]);
            }
        }

        cv::Mat transform;
        transform = Get_3DTransform_Matrix(src_points, tgt_points);
        transform = transform.inv();
        if(transform.at<double>(2, 2) < 0.75){
            swap(src_points[1], src_points[2]);
        }
        transform = Get_3DTransform_Matrix(src_points, tgt_points);
        transform = transform.inv();
        if(transform.at<double>(2, 2) < 0.75){
            return;
        }else{
            for(int j = 0; j < 4; j++){
                for(int k = 0; k < 4; k++){
                    T_base_to_estimation(j, k) = transform.at<double>(j, k);
                }
            } 
        }

        publish_flag = true;

        // std::cout << "T_base_to_estimation: " << std::endl;
        // for(int j = 0; j < 4; j++){
        //     for(int k = 0; k < 4; k++){
        //         std::cout << T_base_to_estimation(j, k) << " ";
        //     }
        //     std::cout << std::endl;
        // }
    }
}

vector<cv::Point2f> ICPTargetNodeROS::pixel2cam(const vector<cv::Point2f> &p, const cv::Mat &K){
    vector<cv::Point2f> pointsvectors;
    for(int i = 0; i < p.size(); i++){
        pointsvectors.emplace_back(cv::Point2f((p[i].x - K.at<double>(0, 2)) / K.at<double>(0, 0), (p[i].y - K.at<double>(1, 2)) / K.at<double>(1, 1)));
    }

    return pointsvectors;
}

bool ICPTargetNodeROS::feature_match(int left_cam_idx, int right_cam_idx, vector<cv::Point2f> &matched_marker_pixels_left, vector<cv::Point2f> &matched_marker_pixels_right){
    printf(GREEN "[Feature Match] Left Camera: %d, Right Camera: %d\n" RESET, left_cam_idx, right_cam_idx);
    // draw epipolar line
    epipolar_line_solve(left_cam_idx, right_cam_idx, -1);
    
    // initialize the match_evalution matrix
    double distance_threshold = 50;
    vector<vector<double>> match_evalution(marker_pixels[left_cam_idx].size(), vector<double>(marker_pixels[right_cam_idx].size(), numeric_limits<double>::max()));

    for(int i = 0; i < marker_pixels[left_cam_idx].size(); i++){
        cv::Mat line_right = epipolar_line_solve(left_cam_idx, right_cam_idx, i);
        for(int j = 0; j < marker_pixels[right_cam_idx].size(); j++){
            cv::Point2f point_r = marker_pixels[right_cam_idx][j];
            cv::Mat point_right = (cv::Mat_<double>(3, 1) << point_r.x, point_r.y, 1);
            double distance = 1000 * abs(line_right.at<double>(0) * point_r.x + line_right.at<double>(1) * point_r.y + line_right.at<double>(2)) / sqrt(pow(line_right.at<double>(0), 2) + pow(line_right.at<double>(1), 2));
            if(distance < distance_threshold){
                cv::Mat line_left = epipolar_line_solve(right_cam_idx, left_cam_idx, j);
                cv::Point2f point_l = marker_pixels[left_cam_idx][i];
                cv::Mat point_left = (cv::Mat_<double>(3, 1) << point_l.x, point_l.y, 1);
                double check_distance = 1000 * abs(line_left.at<double>(0) * point_l.x + line_left.at<double>(1) * point_l.y + line_left.at<double>(2)) / sqrt(pow(line_left.at<double>(0), 2) + pow(line_left.at<double>(1), 2));
                if(check_distance < distance_threshold){
                    match_evalution[i][j] = distance;
                }
            }else{
                match_evalution[i][j] = numeric_limits<double>::max();
            }
        }
    }

    // find the best match by Kuhn-Munkres algorithm
    cout << "marker_pixels[left_cam_idx].size(): " << marker_pixels[left_cam_idx].size() << " | marker_pixels[right_cam_idx].size(): " << marker_pixels[right_cam_idx].size() << endl;

    linear_sum_assignment(marker_pixels[left_cam_idx], marker_pixels[right_cam_idx], matched_marker_pixels_left, matched_marker_pixels_right, match_evalution, distance_threshold);
    // for(int i = 0; i < matched_marker_pixels_left.size(); i++){
    //     cout << "Matched marker pixel left: " << matched_marker_pixels_left[i] << " | Matched marker pixel right: " << matched_marker_pixels_right[i] << endl;
    // }

    return (matched_marker_pixels_left.size() > 0);
}

cv::Mat ICPTargetNodeROS::epipolar_line_solve(int left_cam_idx, int right_cam_idx, int marker_idx){
    cv::Mat F, E;
    cv::Mat K_left = cameraMatrixs[left_cam_idx];
    cv::Mat K_right = cameraMatrixs[right_cam_idx];
    Eigen::Matrix4d T_left2right = ((T_base_to_cam[left_cam_idx] * T_cam_to_image).inverse()) * (T_base_to_cam[right_cam_idx] * T_cam_to_image);
    cv::Mat R = (cv::Mat_<double>(3, 3) << T_left2right(0, 0), T_left2right(0, 1), T_left2right(0, 2), T_left2right(1, 0), T_left2right(1, 1), T_left2right(1, 2), T_left2right(2, 0), T_left2right(2, 1), T_left2right(2, 2));
    cv::Mat t = (cv::Mat_<double>(3, 1) << T_left2right(0, 3), T_left2right(1, 3), T_left2right(2, 3));

    E = (cv::Mat_<double>(3, 3) << 0, -t.at<double>(2), t.at<double>(1), t.at<double>(2), 0, -t.at<double>(0), -t.at<double>(1), t.at<double>(0), 0) * R;
    F = K_right.inv().t() * E * K_left.inv();
    // cout << "Essential matrix: " << endl << E << endl;
    // cout << "Fundamental matrix: " << endl << F << endl;

    if(marker_idx == -1){
        for(int i = 0; i < marker_pixels[left_cam_idx].size(); i++){
            cv::Point2f point_l = marker_pixels[left_cam_idx][i];
            cv::Mat point_left = (cv::Mat_<double>(3, 1) << point_l.x, point_l.y, 1);
            cv::Mat line_right = (point_left.t() * F).t();

            //draw epipolar line
            cv::Point2f epipolar_point1, epipolar_point2;

            epipolar_point1.x = 0;
            epipolar_point1.y = (-line_right.at<double>(2) / line_right.at<double>(1) * 1000);
            epipolar_point2.x = Images[right_cam_idx].cols;
            epipolar_point2.y = (-(line_right.at<double>(2) + line_right.at<double>(0) * epipolar_point2.x / 1000) / line_right.at<double>(1) * 1000);
            cv::line(Images[right_cam_idx], epipolar_point1, epipolar_point2, cv::Scalar(0, 0, 255), 1);
        }

        return (cv::Mat_<double>(3, 1) << 0, 0, 0);
    }else{
        cv::Point2f point_l = marker_pixels[left_cam_idx][marker_idx];
        cv::Mat point_left = (cv::Mat_<double>(3, 1) << point_l.x, point_l.y, 1);
        cv::Mat line_right = (point_left.t() * F).t();

        return line_right;
    }

}

bool ICPTargetNodeROS::RANSAC_Registration(vector<cv::Point3f> &source_point_vector, vector<cv::Point3f> &target_point_vector, vector<cv::Point3f> &target_point_vector_temp, vector<int> &ransac_inliers, double distance_threshold){
    vector<cv::Point3f> target_point_vector_temp_temp = {};
    for(int i = 0; i < target_point_vector.size(); i++){
        if(target_point_vector[i] != target_point_vector_temp[0] || target_point_vector[i] != target_point_vector_temp[1] || target_point_vector[i] != target_point_vector_temp[2]){
            target_point_vector_temp_temp.emplace_back(target_point_vector[i]);
        }
    }

    vector<cv::Point3f> matched_point_vector;
    for(int i = 0; i < source_point_vector.size(); i++){
        matched_point_vector.emplace_back(cv::Point3f(0.0f, 0.0f, 0.0f));
        ransac_inliers[i] = -1;
    }

    double distance1 = distanceBetweenPoints(target_point_vector_temp[0], target_point_vector_temp[1]);
    double distance2 = distanceBetweenPoints(target_point_vector_temp[0], target_point_vector_temp[2]);
    double distance3 = distanceBetweenPoints(target_point_vector_temp[1], target_point_vector_temp[2]);
    std::vector<int> index = {-1, -1, -1};
    for(int i = 0; i < source_point_vector.size(); i++){
        for(int j = 0; j < source_point_vector.size() && j != i; j++){
            for(int k = 0; k < source_point_vector.size() && k != i && k != j; k++){
                // 选出的三个点不能在同一直线上
                Eigen::Vector3d vec1 = subtractPoints(source_point_vector[i], source_point_vector[j]);
                Eigen::Vector3d vec2 = subtractPoints(source_point_vector[i], source_point_vector[k]);
                double angle = vectorAngle(vec1, vec2, 1);
                if(!(abs(angle - 180) < 5 || abs(angle) < 5)){
                    double distance1_temp = distanceBetweenPoints(source_point_vector[i], source_point_vector[j]);
                    double distance2_temp = distanceBetweenPoints(source_point_vector[i], source_point_vector[k]);
                    double distance3_temp = distanceBetweenPoints(source_point_vector[j], source_point_vector[k]);
                    if(abs(distance1_temp - distance1) < distance_threshold && abs(distance2_temp - distance2) < distance_threshold && abs(distance3_temp - distance3) < distance_threshold){
                        index[0] = i;
                        index[1] = j;
                        index[2] = k;
                    }
                }
            }
        }
    }

    // 输出三点匹配结果
    // cout << "index: " << index[0] << " " << index[1] << " " << index[2] << endl;
    // for(int i = 0; i < 3; i++){
    //     cout << "source_point_vector[" << index[i] << "] = " << source_point_vector[index[i]] << " | target_point_vector_temp[" << i << "] = " << target_point_vector_temp[i] << endl;
    // }
    // cout << "distance1: " << distance1 << " | distance2: " << distance2 << " | distance3: " << distance3 << endl;


    // 三点匹配后，匹配剩余点 
    if(index[0] != -1 && index[1] != -1 && index[2] != -1){
        for(int i = 0; i < 3; i++){
            matched_point_vector[index[i]] = target_point_vector_temp[i];
            ransac_inliers[index[i]] = 1;
        }

        // // 构造三点平面与其坐标系
        // Eigen::Vector3d source_vec1 = subtractPoints(source_point_vector[index[0]], source_point_vector[index[1]]);
        // Eigen::Vector3d source_vec2 = subtractPoints(source_point_vector[index[0]], source_point_vector[index[2]]);
        // Eigen::Vector3d source_CrossProduct = vectorCrossProduct(source_vec1, source_vec2); // 法向量
        // Eigen::Vector3d source_normal = source_CrossProduct / source_CrossProduct.norm(); // 单位法向量
        // Eigen::Vector3d source_axis_x = source_vec1 / source_vec1.norm();
        // Eigen::Vector3d source_axis_y = vectorCrossProduct(source_normal, source_axis_x);

        // Eigen::Vector3d target_vec1 = subtractPoints(target_point_vector_temp[0], target_point_vector_temp[1]);
        // Eigen::Vector3d target_vec2 = subtractPoints(target_point_vector_temp[0], target_point_vector_temp[2]);
        // Eigen::Vector3d target_CrossProduct = vectorCrossProduct(target_vec1, target_vec2); // 法向量
        // Eigen::Vector3d target_normal = target_CrossProduct / target_CrossProduct.norm(); // 单位法向量
        // Eigen::Vector3d target_axis_x = target_vec1 / target_vec1.norm();
        // Eigen::Vector3d target_axis_y = vectorCrossProduct(target_normal, target_axis_x);
    
        // for(int i = 0; i < source_point_vector.size(); i++){
        //     if(ransac_inliers[i] == -1){ // 未匹配的点
        //         vector<cv::Point3f> possible_point = {};
        //         for(int j = 0; j < source_point_vector.size() && ransac_inliers[j] == 1; j++){
        //             cv::Point3f point = matched_point_vector[j];
        //             Eigen::Vector3d vec = subtractPoints(source_point_vector[j], source_point_vector[i]);
        //             Eigen::Vector3d temp = target_axis_x * vec.dot(source_axis_x);
        //             point = cvPoint3fPlusEigenVector3d(point, temp);
        //             temp = target_axis_y * vec.dot(source_axis_y);
        //             point = cvPoint3fPlusEigenVector3d(point, temp);
        //             temp = target_normal * vec.dot(source_normal);
        //             point = cvPoint3fPlusEigenVector3d(point, temp);
        //             possible_point.emplace_back(point);
        //         }

        //         int change_index = -1;
        //         for(int k = 0; k < target_point_vector_temp_temp.size(); k++){
        //             bool isMatched = true;
        //             double min_distance = numeric_limits<double>::max();
        //             double total_distance = 0;
        //             for(int j = 0; j < possible_point.size(); j++){
        //                 if(distanceBetweenPoints(possible_point[j], target_point_vector_temp_temp[k]) > distance_threshold){
        //                     isMatched = false;
        //                 }else{
        //                     total_distance += distanceBetweenPoints(possible_point[j], target_point_vector_temp_temp[k]);
        //                 }
        //             }

        //             if(isMatched && total_distance < min_distance){
        //                 min_distance = total_distance;
        //                 matched_point_vector[i] = target_point_vector_temp_temp[k];
        //                 ransac_inliers[i] = 1;
        //                 change_index = k;
        //             }
        //         }

        //         if(change_index != -1){
        //             target_point_vector_temp_temp.erase(target_point_vector_temp_temp.begin() + change_index);
        //         }
        //     }
        // }

        // for(int i = 0; i < source_point_vector.size(); i++){
        //     if(ransac_inliers[i] == -1){
        //         vector<cv::Point3f> possible_point = {};
        //         for(int j = 0; j < source_point_vector.size() && ransac_inliers[j] == 1; j++){
        //             possible_point.emplace_back(matched_point_vector[j] + source_point_vector[i] - source_point_vector[j]);
        //         }

        //         cv::Point3f merged_point = {0, 0, 0};
        //         for(int j = 0; j < possible_point.size(); j++){
        //             merged_point.x += possible_point[j].x / possible_point.size();
        //             merged_point.y += possible_point[j].y / possible_point.size();
        //             merged_point.z += possible_point[j].z / possible_point.size();
        //         }

        //         matched_point_vector[i] = merged_point;
        //     }
        // }

    target_point_vector_temp = matched_point_vector;

        return true;
    }else{
        return false;
    }

}


cv::Mat ICPTargetNodeROS::Get_3DTransform_Matrix(const std::vector<cv::Point3f>& srcPoints, const std::vector<cv::Point3f>&  dstPoints)
{
	double srcSumX = 0.0f;
	double srcSumY = 0.0f;
	double srcSumZ = 0.0f;
 
	double dstSumX = 0.0f;
	double dstSumY = 0.0f;
	double dstSumZ = 0.0f;
 
	//至少三组点
	if (srcPoints.size() != dstPoints.size() || srcPoints.size() < 3)
	{
		return cv::Mat();
	}
 
	int pointsNum = srcPoints.size();
	for (int i = 0; i < pointsNum; ++i)
	{
		srcSumX += srcPoints[i].x;
		srcSumY += srcPoints[i].y;
		srcSumZ += srcPoints[i].z;
 
		dstSumX += dstPoints[i].x;
		dstSumY += dstPoints[i].y;
		dstSumZ += dstPoints[i].z;
	}
 
	cv::Point3d centerSrc, centerDst;
 
	centerSrc.x = double(srcSumX / pointsNum);
	centerSrc.y = double(srcSumY / pointsNum);
	centerSrc.z = double(srcSumZ / pointsNum);
 
	centerDst.x = double(dstSumX / pointsNum);
	centerDst.y = double(dstSumY / pointsNum);
	centerDst.z = double(dstSumZ / pointsNum);
 
	//Mat::Mat(int rows, int cols, int type)
	cv::Mat srcMat(3, pointsNum, CV_64FC1);
	cv::Mat dstMat(3, pointsNum, CV_64FC1);

	for (int i = 0; i < pointsNum; ++i)//N组点
	{
		//三行
		srcMat.at<double>(0, i) = srcPoints[i].x - centerSrc.x;
		srcMat.at<double>(1, i) = srcPoints[i].y - centerSrc.y;
		srcMat.at<double>(2, i) = srcPoints[i].z - centerSrc.z;
 
		dstMat.at<double>(0, i) = dstPoints[i].x - centerDst.x;
		dstMat.at<double>(1, i) = dstPoints[i].y - centerDst.y;
		dstMat.at<double>(2, i) = dstPoints[i].z - centerDst.z;
 
	}
 
	cv::Mat matS = srcMat * dstMat.t();
 
	cv::Mat matU, matW, matV;
	cv::SVDecomp(matS, matW, matU, matV);
 
	cv::Mat matTemp = matU * matV;
	double det = cv::determinant(matTemp);//行列式的值
 
	double datM[] = { 1, 0, 0, 0, 1, 0, 0, 0, det };
	cv::Mat matM(3, 3, CV_64FC1, datM);
 
	cv::Mat matR = matV.t() * matM * matU.t();
 
	double* datR = (double*)(matR.data);
	double delta_X = centerDst.x - (centerSrc.x * datR[0] + centerSrc.y * datR[1] + centerSrc.z * datR[2]);
	double delta_Y = centerDst.y - (centerSrc.x * datR[3] + centerSrc.y * datR[4] + centerSrc.z * datR[5]);
	double delta_Z = centerDst.z - (centerSrc.x * datR[6] + centerSrc.y * datR[7] + centerSrc.z * datR[8]);
 
 
	//生成RT齐次矩阵(4*4)
	cv::Mat R_T = (cv::Mat_<double>(4, 4) <<
		matR.at<double>(0, 0), matR.at<double>(0, 1), matR.at<double>(0, 2), delta_X,
		matR.at<double>(1, 0), matR.at<double>(1, 1), matR.at<double>(1, 2), delta_Y,
		matR.at<double>(2, 0), matR.at<double>(2, 1), matR.at<double>(2, 2), delta_Z,
		0, 0, 0, 1
		);
 
	return R_T;
}

void ICPTargetNodeROS::window_filter(Eigen::Vector3d &points, vector<Eigen::Vector3d> &window, int window_size){
    if(window.size() < window_size){
        window.emplace_back(points);
    }else{
        window.erase(window.begin());
        window.emplace_back(points);
    }
    
    Eigen::Vector3d filter_result = Eigen::Vector3d::Zero();
    for(int i = 0; i < window.size(); i++){
        filter_result += window[i];
    }
    filter_result /= window.size();

    points = filter_result;
}

int main(int argc, char **argv)
{
    ros::init(argc, argv, "icp_target_node_ros");
    ros::NodeHandle nh("~");
    ros::Rate rate(30);

    ICPTargetNodeROS icp_manager;
    icp_manager.init(nh);
 
    ros::spin();
}