/*
 * @Description:
 * @Author: sunm
 * @Github: https://github.com/sunmiaozju
 * @LastEditors: sunm
 * @Date: 2019-03-01 11:25:55
 * @LastEditTime: 2019-03-12 10:00:33
 */

#include "lidar_euclidean_cluster.h"


static cv::Mat invRt, invTt;
static bool init_matrix = false;



namespace LidarDetector {
LidarClusterDetector::LidarClusterDetector()
    : private_nh("~")
    , processing_now(false),image_sub(nh, "/camera/image_raw", 1),info_sub(nh, "/points_raw", 1),bboxes_sub(nh, "/darknet_ros/bounding_boxes",1),sync(MySyncPolicy(10),image_sub, info_sub,bboxes_sub)
{

    initROS();

    splitString(str_range, dis_range);
    splitString(str_seg_distances, seg_distances);

    generateColors(color_table, 255, 100);
}

LidarClusterDetector::~LidarClusterDetector() {}

/**

 * @description: ROS参数初始化
 */

void LidarClusterDetector::initROS()
{
    /*sub_rawimage = nh.subscribe(
        "camera/image_raw", 10, &LidarClusterDetector::ImageCallback, this);
    sub_rawPointCloud = nh.subscribe(
        "points_raw", 10, &LidarClusterDetector::getPointCloud_cb, this);*/
    //darknet_ros_msg::BoundingBoxes::ConstPtr& msg_bboxs
//*****************************************************************************************************************************************
    /*message_filters::Subscriber<sensor_msgs::Image> image_sub(nh, "camera/image_raw", 1);
    message_filters::Subscriber<sensor_msgs::PointCloud2> PointsClouds_sub(nh, "points_raw", 1);
    message_filters::TimeSynchronizer<sensor_msgs::Image, sensor_msgs::PointCloud2> sync(image_sub, PointsClouds_sub, 10);
    sync.registerCallback(boost::bind(&LidarDetector::LidarClusterDetector::getPointCloud_cb,this, _1, _2));*/

//*****************************************************************************************************************************************

    //message_filters::Subscriber<sensor_msgs::Image> image_sub(nh, "camera/image_raw", 1);
    //message_filters::Subscriber<sensor_msgs::PointCloud2> info_sub(nh, "/points_raw", 1);
    //typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::PointCloud2> MySyncPolicy;
    //message_filters::Synchronizer<MySyncPolicy> sync(MySyncPolicy(10), image_sub, info_sub);
    sync.registerCallback(boost::bind(&LidarClusterDetector::datasfusionCallback,this,_1,_2,_3));


    floor_PointCloud = nh.advertise<sensor_msgs::PointCloud2>("floor_cloud", 10);
    no_floor_PointCloud = nh.advertise<sensor_msgs::PointCloud2>("no_floor_cloud", 10);
    project2ImagePublisher = nh.advertise<sensor_msgs::Image>("project2Imag",10);
    //project2ImagePublisher = nh.advertise<sensor_msgs::CompressedImage>("project2Imag",10);
    pub_bounding_boxs_ = nh.advertise<jsk_recognition_msgs::BoundingBoxArray>("/detected_bounding_boxs", 5);


    private_nh.param<double>("nearDistance", nearDistance, 1.2);
    private_nh.param<double>("farDistance", farDistance, 15);
    private_nh.param<double>("downsampleLeafSize", leaf_size, 0.05);
    private_nh.param<double>("height_threshhold", height_threshhold, 0.5); // 8 字赛道大黄桶改高度阈值
    private_nh.param<double>("floor_max_height", floor_max_height, 0.3);
    private_nh.param<double>("floor_max_angle", floor_max_angle, 0.2);
    private_nh.param<double>("small_scale", small_scale, 0.5);
    private_nh.param<double>("large_scale", large_scale, 2.0);
    private_nh.param<double>("angle_threshold", angle_threshold, 0.5);
    private_nh.param<double>("radial_divide_angle", radial_divide_angle, 0.5);
    private_nh.param<double>("concentric_divide_distance", concentric_divide_distance, 0.1);
    private_nh.param<double>("min_local_height_threshold", min_local_height_threshold, 0.05);
    private_nh.param<double>("sensor_height", sensor_height, 0.18);
    private_nh.param<double>("local_threshold_slope", local_threshold_slope, 5.0);
    private_nh.param<double>("general_threshold_slope", general_threshold_slope, 3.0);
    private_nh.param<double>("left_right_dis_threshold", left_right_dis_threshold, 6.5);

    private_nh.param<std::vector<double>>("cluster_distance_", cluster_distance_, {0.1});
    private_nh.param<std::vector<double>>("seg_distance_", seg_distance_, {30.});
    private_nh.param<int>("MIN_CLUSTER_SIZE", MIN_CLUSTER_SIZE, 5);
    private_nh.param<int>("MAX_CLUSTER_SIZE", MAX_CLUSTER_SIZE, 40);


    private_nh.param<std::string>("str_range", str_range, "15,30,45,60");
    private_nh.param<std::string>("str_seg_distances", str_seg_distances, "0.5,1.1,1.6,2.1,2.6");

    private_nh.param<double>("cluster_min_points", cluster_min_points, 10);
    private_nh.param<double>("cluster_max_points", cluster_max_points, 100);

    //private_nh.param<Eigen::Matrix4f>("RT", RT, [ -5.2615470635116957e-02, -2.5975094048441560e-04,
    //   9.9861481301816002e-01, -3.0947097117909478e-01,
     //  -9.9861452929939554e-01, -7.8373642531026633e-04,
      // -5.2615659545069582e-02, 1.2289111067155559e-01,
     //  7.9631777086769384e-04, -9.9999965914327449e-01,<<
      // -2.1815440229566718e-04, 5.2904412902046738e-01, 0., 0., 0., 1. ]);
    //private_nh.param<Eigen::Matrix3f>("camera_param", camera_param, [ 1.8115084435116496e+03, 0., 7.2603879492812200e+02, 0.,
      // 1.8115537049601367e+03, 5.6456028652283851e+02, 0., 0., 1. ]);
    RT << -5.5585914027477923e-02, -2.9293535892667866e-02,9.9802409535874181e-01, -9.0545436392073131e-01,
    -9.9836633587756829e-01, -1.1607762791972398e-02,-5.5945681060865393e-02, 6.1322111459847888e-02,
    1.3223673775793365e-02, -9.9950345101848970e-01,-2.8600451989454934e-02, 8.4909176344305137e-01,
    0., 0., 0., 1.;
    camera_param << 1807.555297851562, 0., 737.4499029093786,
    0., 1809.189575195312, 578.3378473122066,
    0., 0., 1. ;  // 内参，投影




    image_w = 1440;
    image_h = 1080;

    //init_matrix = false;

}

// 原始图片回调函数，转cv格式
/*void LidarClusterDetector::ImageCallback(const sensor_msgs::Image::ConstPtr& image_msg)
{

    cv_bridge::CvImagePtr cv_image = cv_bridge::toCvCopy(image_msg, "RGB8Packed");   //8888888888888888888888888888888888888888888888888888888888888888888888888888
    cv_raw_image = cv_image->image;

    // 图像去畸变
    // 使用相机内参和畸变系数可以图像去畸变
    //cv::undistort(image, current_frame, camera_instrinsics, distortion_coefficients);

    //static image_transport::ImageTransport it(nh);
    //static image_transport::Publisher pub_image = it.advertise("identified_image", 1);
    //static sensor_msgs::ImagePtr msg;
    //msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", current_frame).toImageMsg();
    //pub_image.publish(msg);

    //image_frame_id = image_msg->header.frame_id;
    //image_size.height = current_frame.rows;
    //image_size.width = current_frame.cols;
}
*/

void LidarClusterDetector::ImageCallback(const sensor_msgs::Image::ConstPtr& image_msg)
{
    cv_bridge::CvImagePtr cv_image_ptr = cv_bridge::toCvCopy(image_msg, "bgr8");   //888888888888888888888888888888888888888888888888
    cv_raw_image = cv_image_ptr->image;
    img_header = image_msg->header;



}

void LidarClusterDetector::CV_project(std::vector<Detected_Obj>& global_obj_list,cv::Mat& raw_image)
{
    std::vector<cv::Point3f> point_data;
    std::vector<cv::Point2f> projectedPoints;

    for (size_t i = 0; i < global_obj_list.size(); i++)
    {
        point_data.push_back(cv::Point3f(global_obj_list[i].centroid_.x,global_obj_list[i].centroid_.y,global_obj_list[i].centroid_.z));
        ROS_INFO("this is x");
        ROS_INFO("%f",global_obj_list[i].centroid_.x);
        ROS_INFO("this is y");
        ROS_INFO("%f",global_obj_list[i].centroid_.y);
        ROS_INFO("this is z");
        ROS_INFO("%f",global_obj_list[i].centroid_.z);
    }
    if(point_data.size()==0)
    {
      std::cout<<"there is no cluster centrals in data_fusion!"<<std::endl;
    }
    else
    {
      //相机的内参矩阵
      cv::Mat distCoeffs(5, 1, cv::DataType<double>::type);   // Distortion vector
      distCoeffs.at<double>(0) = -0.1055567994013133;
      distCoeffs.at<double>(1) = 0.4845675774971356;
      distCoeffs.at<double>(2) = 0.0009905036821113906;
      distCoeffs.at<double>(3) = -0.0009068024758048277;
      distCoeffs.at<double>(4) = 0;

      cv::Mat cameraMatrix(3, 3, cv::DataType<double>::type);
      //float tempMatrix[3][3] = { { 3522.3, 0, 0 }, { 0, 3538.4, 0 }, { 1968.9,1375.4,1.0 } };
      float tempMatrix[3][3] = { { 1807.555297851562, 0, 737.4499029093786 }, { 0, 1809.189575195312, 578.3378473122066 }, { 0, 0, 1.0 } };

      //cv::Mat image;
      //cv::undistort(raw_image, image, cameraMatrix, distCoeffs); //去除畸变矫正


      for (int i = 0; i < 3; i++)
      {
          for (int j = 0; j < 3; j++)
          {
              cameraMatrix.at<double>(i, j) = tempMatrix[i][j];
          }
      }

      //ROS_INFO("%f",tempMatrix[1][2]);
      //标定出的外参矩阵
      cv::Mat rvec(3, 3, cv::DataType<double>::type);

      rvec.at<double>(0,0)=-5.5585914027477923e-02;
      rvec.at<double>(0,1)=-2.9293535892667866e-02;
      rvec.at<double>(0,2)=9.9802409535874181e-01;
      rvec.at<double>(1,0)=-9.9836633587756829e-01;
      rvec.at<double>(1,1)=-1.1607762791972398e-02;
      rvec.at<double>(1,2)=-5.5945681060865393e-02;
      rvec.at<double>(2,0)=1.3223673775793365e-02;
      rvec.at<double>(2,1)=-9.9950345101848970e-01;
      rvec.at<double>(2,2)=-2.8600451989454934e-02;

      cv::Mat tvec(3, 1, cv::DataType<double>::type);
      tvec.at<double>(0,0)=-9.0545436392073131e-01;
      tvec.at<double>(1,0)=6.1322111459847888e-02;
      tvec.at<double>(2,0)=8.4909176344305137e-01;


      cv::projectPoints(point_data, rvec, tvec, cameraMatrix, distCoeffs, projectedPoints);


      for (size_t i = 0; i<projectedPoints.size(); i++)
      {
          cv::Point2f p = projectedPoints[i];

          if(1!=0)
          //if (p.y<1080 && p.x<1440)
          {

            //cv::circle(raw_image, cv::Point2f(p.x,p.y), 30, (0,0,255), -1);
            ROS_INFO("this is picture x");
            ROS_INFO("%f",p.x);
            ROS_INFO("this is picture y");
            ROS_INFO("%f",p.y);
           }
      }
      sensor_msgs::ImagePtr msg = cv_bridge::CvImage(img_header, "bgr8", raw_image).toImageMsg();
      project2ImagePublisher.publish(*msg);

    }

}


void LidarClusterDetector::project2image(std::vector<Detected_Obj>& global_obj_list, cv::Mat& raw_image)
{

    //ROS_INFO("project");
    Eigen::Matrix<float, 3, 4> T_lidar2cam_top3_local, T_lidar2image_local;//lida2image=T_lidar2cam*(T_cam02cam2)*T_cam2image
    T_lidar2cam_top3_local = RT.topRows(3);
    T_lidar2image_local = camera_param * T_lidar2cam_top3_local;

    Eigen::Vector4f raw_point;
    Eigen::Vector3f trans_point;

    for (size_t i = 0; i < global_obj_list.size(); i++)
    {
        raw_point(0, 0) = global_obj_list[i].centroid_.x;
        raw_point(1, 0) = global_obj_list[i].centroid_.y;
        raw_point(2, 0) = global_obj_list[i].centroid_.z;
        raw_point(3, 0) = 1;
        trans_point = T_lidar2image_local * raw_point;
        x_cluster_central_image = static_cast<int>(trans_point(0, 0) / trans_point(2, 0));
        y_cluster_central_image = static_cast<int>(trans_point(1, 0) / trans_point(2, 0));

        Eigen::Vector4f tf_point;
        tf_point = RT * raw_point;
        //if(x_cluster_central_image<0 || x_cluster_central_image>(raw_image.cols-1) || y_cluster_central_image<0 || y_cluster_central_image>(raw_image.rows-1))continue;


        //ROS_INFO(convert.str(raw_image.cols-1));
        //ROS_INFO(convert.str(raw_image.rows-1));
        ROS_INFO("%d",raw_image.cols-1);  //ROS_INFO(%f,(raw_image.cols-1).c_str());
        ROS_INFO("%d",raw_image.rows-1);
        ROS_INFO("%f",RT(0,2));
        ROS_INFO("this is x");
        ROS_INFO("%f",global_obj_list[i].centroid_.x);
        ROS_INFO("this is y");
        ROS_INFO("%f",global_obj_list[i].centroid_.y);
        ROS_INFO("this is z");
        ROS_INFO("%f",global_obj_list[i].centroid_.z);
        /*for (size_t b = 0; b < bboxes.bounding_boxes.size(); b++)
        {
            if (x_cluster_central_image > bboxes.bounding_boxes[b].xmin && x_cluster_central_image < bboxes.bounding_boxes[b].xmax
            && y_cluster_central_image > bboxes.bounding_boxes[b].ymin && y_cluster_central_image < bboxes.bounding_boxes[b].ymax)
            {
                if(bboxes.bounding_boxes[b].id == 0) // 0:blue 1 :orange
                {
                    cv::circle(raw_image, cv::Point2f(x_cluster_central_image, y_cluster_central_image), 30, (255,0,0), -1);  //radiu:30;blue,green,red


                }
                else{
                    cv::circle(raw_image, cv::Point2f(x_cluster_central_image, y_cluster_central_image), 30, (0,0,255), -1);  //radiu:30;blue,green,red


                }
            }



        }*/
        ROS_INFO("this is picture x");
        ROS_INFO("%d",static_cast<int>(tf_point(0,0)/tf_point(2,0)));
        ROS_INFO("this is picture y");
        ROS_INFO("%d",static_cast<int>(tf_point(1,0)/tf_point(2,0)));
        cv::circle(raw_image, cv::Point2f(static_cast<int>(tf_point(0,0)/tf_point(2,0)),static_cast<int>(tf_point(1,0)/tf_point(2,0))), 30, (0,0,255), -1);  //radiu:30;blue,green,red
        //yolo bboxes comprais
    }
    sensor_msgs::ImagePtr msg = cv_bridge::CvImage(img_header, "bgr8", raw_image).toImageMsg();
    project2ImagePublisher.publish(*msg);
}



/**
 * @description: 原始点云回调函数
 */

void LidarClusterDetector::getPointCloud_cb(const sensor_msgs::PointCloud2ConstPtr& msg_rawPointCloud)
{
    if (!processing_now) {
        processing_now = true;
        start_time = std::chrono::system_clock::now();

        msg_header = msg_rawPointCloud->header;

        pcl::PointCloud<pcl::PointXYZ>::Ptr raw_sensor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr clipped_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr downsample_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr removed_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr only_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr don_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr ray_no_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr ray_only_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);

        pcl::fromROSMsg(*msg_rawPointCloud, *raw_sensor_cloud_ptr);

        clipCloud(raw_sensor_cloud_ptr, clipped_cloud_ptr, height_threshhold, nearDistance, farDistance, left_right_dis_threshold);

        downsampleCloud(clipped_cloud_ptr, downsample_cloud_ptr, leaf_size);

        removeFloorRayFiltered(downsample_cloud_ptr, ray_only_floor_cloud_ptr, ray_no_floor_cloud_ptr, sensor_height,
            local_threshold_slope, general_threshold_slope);

        std::vector<Detected_Obj> global_obj_list;
        cluster_by_distance(ray_no_floor_cloud_ptr, global_obj_list);

        jsk_recognition_msgs::BoundingBoxArray bbox_array;


        for (size_t i = 0; i < global_obj_list.size(); i++)
        {
            bbox_array.boxes.push_back(global_obj_list[i].bounding_box_);
        }
        bbox_array.header = msg_header;



        pubPointCloud(floor_PointCloud, ray_only_floor_cloud_ptr);
        pubPointCloud(no_floor_PointCloud, ray_no_floor_cloud_ptr);

        pub_bounding_boxs_.publish(bbox_array);


        //project2image(global_obj_list, cv_raw_image, RT, camera_param,bboxes);
        processing_now = false;
    }
}
                                                               //sensor_msgs::Image   CompressedImage
//*************************************************************同步订阅话题的回调函数*************************************************************
void LidarClusterDetector::datasfusionCallback(const sensor_msgs::Image::ConstPtr& image_msg,const sensor_msgs::PointCloud2ConstPtr& msg_rawPointCloud,const darknet_ros_msg::BoundingBoxesConstPtr& msg_bboxs)
{
    ROS_INFO("THERE IS datasfusion");
    ROS_INFO("point pose: x: %d :::::: %d",msg_rawPointCloud->header.stamp.sec,msg_rawPointCloud->header.stamp.nsec);
    ROS_INFO("Image pose: x: %d :::::: %d",image_msg->header.stamp.sec,image_msg->header.stamp.nsec);
    ROS_INFO("Image pose: x: %f ::::::",ros::Time::now().toSec());


    cv_bridge::CvImagePtr cv_image_ptr = cv_bridge::toCvCopy(image_msg, "bgr8");   //88888888888888888888888888888888888888888888888888888888888888888888888888888888888888
    cv_raw_image = cv_image_ptr->image;


    if (!processing_now) {
        processing_now = true;
        start_time = std::chrono::system_clock::now();

        msg_header = msg_rawPointCloud->header;

        pcl::PointCloud<pcl::PointXYZ>::Ptr raw_sensor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr clipped_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr downsample_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr removed_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr only_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr don_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr ray_no_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr ray_only_floor_cloud_ptr(
            new pcl::PointCloud<pcl::PointXYZ>);

        pcl::fromROSMsg(*msg_rawPointCloud, *raw_sensor_cloud_ptr);

        clipCloud(raw_sensor_cloud_ptr, clipped_cloud_ptr, height_threshhold, nearDistance, farDistance, left_right_dis_threshold);

        downsampleCloud(clipped_cloud_ptr, downsample_cloud_ptr, leaf_size);

        removeFloorRayFiltered(downsample_cloud_ptr, ray_only_floor_cloud_ptr, ray_no_floor_cloud_ptr, sensor_height,
            local_threshold_slope, general_threshold_slope);

        std::vector<Detected_Obj> global_obj_list;
        cluster_by_distance(ray_no_floor_cloud_ptr, global_obj_list);

        jsk_recognition_msgs::BoundingBoxArray bbox_array;


        for (size_t i = 0; i < global_obj_list.size(); i++)
        {
            bbox_array.boxes.push_back(global_obj_list[i].bounding_box_);
        }
        bbox_array.header = msg_header;



        pubPointCloud(floor_PointCloud, ray_only_floor_cloud_ptr);
        pubPointCloud(no_floor_PointCloud, ray_no_floor_cloud_ptr);

        pub_bounding_boxs_.publish(bbox_array);


        //CV_project(global_obj_list,cv_raw_image);
        //project2image(global_obj_list, cv_raw_image);
        pointcloud2_to_image(global_obj_list, cv_raw_image,msg_bboxs);

        processing_now = false;
    }
}

//******************************cluster_centrals_project2image*****************************************//
/*void LidarClusterDetector::f_project2image(std::vector<Detected_Obj>& global_obj_list, cv_bridge::CvImagePtr raw_image_ptr, Eigen::Matrix4f RT, Eigen::Matrix3f camera_param)
{

    Eigen::Matrix<float, 3, 4> T_lidar2cam_top3_local, T_lidar2image_local;//lida2image=T_lidar2cam*(T_cam02cam2)*T_cam2image
    T_lidar2cam_top3_local = RT.topRows(3);
    T_lidar2image_local = camera_param * T_lidar2cam_top3_local;

    Eigen::Vector4f raw_point;
    Eigen::Vector3f trans_point;

    for (int i = 0; i < global_obj_list.size(); i++)
    {
        raw_point(0, 0) = global_obj_list[i].centroid_.x;
        raw_point(1, 0) = global_obj_list[i].centroid_.y;
        raw_point(2, 0) = global_obj_list[i].centroid_.z;
        raw_point(3, 0) = 1;
        trans_point = T_lidar2image_local * raw_point;
        x_cluster_central_image = (int)(trans_point(0, 0) / trans_point(2, 0));
        y_cluster_central_image = (int)(trans_point(1, 0) / trans_point(2, 0));
        if(x_cluster_central_image<0 || x_cluster_central_image>(raw_image_ptr->image.cols-1) || y_cluster_central_image<0 || y_cluster_central_image>(raw_image_ptr->image.rows-1))continue;


        //yolo bboxes comprais

        cv::circle(raw_image_ptr->image, cv::Point2f(x_cluster_central_image, y_cluster_central_image), 30, (255,0,0), -1);  //radiu:30;blue,green,red
        //cv::imshow("view",raw_image);
        //cv::destroyWindow("view");

        //static sensor_msgs::ImagePtr msg;
        //msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", raw_image).toImageMsg();

        project2ImagePublisher.publish(raw_image_ptr->toImageMsg());

    }
}
*/


// ****************************adamu3********Cluster**********adamu3******************************************


//******************************聚类并计算障碍物中心和Bounding Box*****************************************//


void LidarClusterDetector::cluster_segment(pcl::PointCloud<pcl::PointXYZ>::Ptr in_pc,
                                    double in_max_cluster_distance, std::vector<Detected_Obj> &obj_list)   //, double arr[][2]
{

    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>);

    //create 2d pc
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_2d(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::copyPointCloud(*in_pc, *cloud_2d);
    //make it flat  压缩到XY平面
    for (size_t i = 0; i < cloud_2d->points.size(); i++)
    {
        cloud_2d->points[i].z = 0;
    }

    if (cloud_2d->points.size() > 0)   //建tree？
        tree->setInputCloud(cloud_2d);

    std::vector<pcl::PointIndices> local_indices;

    pcl::EuclideanClusterExtraction<pcl::PointXYZ> euclid;
    euclid.setInputCloud(cloud_2d);
    euclid.setClusterTolerance(in_max_cluster_distance);
    euclid.setMinClusterSize(MIN_CLUSTER_SIZE);
    euclid.setMaxClusterSize(MAX_CLUSTER_SIZE);
    euclid.setSearchMethod(tree);
    euclid.extract(local_indices);

    for (size_t i = 0; i < local_indices.size(); i++)
    {
        // the structure to save one detected object 保存一个检测到的目标（聚类）
        Detected_Obj obj_info;
        // 三维Bounding Box
        float min_x = std::numeric_limits<float>::max();
        float max_x = -std::numeric_limits<float>::max();
        float min_y = std::numeric_limits<float>::max();
        float max_y = -std::numeric_limits<float>::max();
        float min_z = std::numeric_limits<float>::max();
        float max_z = -std::numeric_limits<float>::max();

        for (auto pit = local_indices[i].indices.begin(); pit != local_indices[i].indices.end(); ++pit)
        {
            //fill new colored cluster point by point  逐点填充新的彩色簇
            pcl::PointXYZ p;
            p.x = in_pc->points[*pit].x;
            p.y = in_pc->points[*pit].y;
            p.z = in_pc->points[*pit].z;

            obj_info.centroid_.x += p.x;
            obj_info.centroid_.y += p.y;
            obj_info.centroid_.z += p.z;

            if (p.x < min_x)
                min_x = p.x;
            if (p.y < min_y)
                min_y = p.y;
            if (p.z < min_z)
                min_z = p.z;
            if (p.x > max_x)
                max_x = p.x;
            if (p.y > max_y)
                max_y = p.y;
            if (p.z > max_z)
                max_z = p.z;
        }

        //min, max points
        obj_info.min_point_.x = min_x;
        obj_info.min_point_.y = min_y;
        obj_info.min_point_.z = min_z;

        obj_info.max_point_.x = max_x;
        obj_info.max_point_.y = max_y;
        obj_info.max_point_.z = max_z;

        //calculate centroid, average  计算形心
        if (local_indices[i].indices.size() > 0)
        {
            obj_info.centroid_.x /= local_indices[i].indices.size();
            obj_info.centroid_.y /= local_indices[i].indices.size();
            obj_info.centroid_.z /= local_indices[i].indices.size();
            //**********************************************//
            //double arr[(local_indices.size() - 1)][2];
            //arr[i][0] = obj_info.centroid_.x;
            //arr[i][1] = obj_info.centroid_.y;
            //**********************************************//
        }

        //calculate bounding box 计算边界框长宽高
        double length_ = obj_info.max_point_.x - obj_info.min_point_.x;
        double width_ = obj_info.max_point_.y - obj_info.min_point_.y;
        double height_ = obj_info.max_point_.z - obj_info.min_point_.z;

        obj_info.bounding_box_.header = msg_header;

        obj_info.bounding_box_.pose.position.x = obj_info.min_point_.x + length_ / 2;
        obj_info.bounding_box_.pose.position.y = obj_info.min_point_.y + width_ / 2;
        obj_info.bounding_box_.pose.position.z = obj_info.min_point_.z + height_ / 2;

        obj_info.bounding_box_.dimensions.x = ((length_ < 0) ? -1 * length_ : length_);
        obj_info.bounding_box_.dimensions.y = ((width_ < 0) ? -1 * width_ : width_);
        obj_info.bounding_box_.dimensions.z = ((height_ < 0) ? -1 * height_ : height_);

        obj_list.push_back(obj_info);
    }
    //************************************************//
    //cout << arr[0][0] << endl;
    //cout << arr[0][1] << endl;
    //************************************************//
}

void LidarClusterDetector::cluster_by_distance(pcl::PointCloud<pcl::PointXYZ>::Ptr in_pc, std::vector<Detected_Obj> &obj_list)
{
    //cluster the pointcloud according to the distance of the points using different thresholds (not only one for the entire pc)
    //in this way, the points farther in the pc will also be clustered

    //0 => 0-15m d=0.5
    //1 => 15-30 d=1
    //2 => 30-45 d=1.6
    //3 => 45-60 d=2.1
    //4 => >60   d=2.6

    std::vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> segment_pc_array(5);

    for (size_t i = 0; i < segment_pc_array.size(); i++)
    {
        pcl::PointCloud<pcl::PointXYZ>::Ptr tmp(new pcl::PointCloud<pcl::PointXYZ>);
        segment_pc_array[i] = tmp;
    }

    for (size_t i = 0; i < in_pc->points.size(); i++)
    {
        pcl::PointXYZ current_point;
        current_point.x = in_pc->points[i].x;
        current_point.y = in_pc->points[i].y;
        current_point.z = in_pc->points[i].z;

        float origin_distance = sqrt(pow(current_point.x, 2) + pow(current_point.y, 2));

        // 如果点的距离大于120m, 忽略该点
        if (origin_distance >= 120)
        {
            continue;
        }

        if (origin_distance < seg_distance_[0])
        {
            segment_pc_array[0]->points.push_back(current_point);
        }
        // else if (origin_distance < seg_distance_[1])
        // {
        //     segment_pc_array[1]->points.push_back(current_point);
        // }
        // else if (origin_distance < seg_distance_[2])
        // {
        //     segment_pc_array[2]->points.push_back(current_point);
        // }
        // else if (origin_distance < seg_distance_[3])
        // {
        //     segment_pc_array[3]->points.push_back(current_point);
        // }
        // else
        // {
        //     segment_pc_array[4]->points.push_back(current_point);
        // }
    }

    std::vector<pcl::PointIndices> final_indices;
    std::vector<pcl::PointIndices> tmp_indices;

    for (size_t i = 0; i < segment_pc_array.size(); i++)
    {
        cluster_segment(segment_pc_array[i], cluster_distance_[i], obj_list);
    }
}


// ****************************adamu3******Cluster************adamu3******************************************




/**
 * @description: 基于距离的点云聚类
 */

void LidarClusterDetector::segmentByDistance(const pcl::PointCloud<pcl::PointXYZ>::Ptr in_cloud_ptr)
{
    // 根据距离不同，设置不同的聚类阈值
    // 0 => 0-15m d=0.5
    // 1 => 15-30 d=1
    // 2 => 30-45 d=1.6
    // 3 => 45-60 d=2.1
    // 4 => >60   d=2.6

    std::vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> cloud_segments_array(5);
    for (size_t i = 0; i < cloud_segments_array.size(); i++) {
        pcl::PointCloud<pcl::PointXYZ>::Ptr tmp(new pcl::PointCloud<pcl::PointXYZ>);
        cloud_segments_array[i] = tmp;
    }

    for (size_t i = 0; i < in_cloud_ptr->points.size(); i++) {
        pcl::PointXYZ p;
        p.x = in_cloud_ptr->points[i].x;
        p.y = in_cloud_ptr->points[i].y;
        p.z = in_cloud_ptr->points[i].z;

        float origin_dis = sqrt(pow(p.x, 2) + pow(p.y, 2));

        if (origin_dis < dis_range[0]) {
            cloud_segments_array[0]->points.push_back(p);
        } else if (origin_dis < dis_range[1]) {
            cloud_segments_array[1]->points.push_back(p);
        } else if (origin_dis < dis_range[2]) {
            cloud_segments_array[2]->points.push_back(p);
        } else if (origin_dis < dis_range[3]) {
            cloud_segments_array[3]->points.push_back(p);
        } else {
            cloud_segments_array[4]->points.push_back(p);
        }
    }

    std::vector<ClusterPtr> clusters;


    for (size_t i = 0; i < cloud_segments_array.size(); i++) {
        clusterCpu(cloud_segments_array[i], clusters, cluster_centroids, seg_distances[i]);
    }
    pcl::ExtractIndices<pcl::PointXYZ> extractor;
    extractor.setInputCloud(in_cloud_ptr);
}

/**
 * @description: 对聚类结果进行后处理，生成聚类的相关信息保存到cluster类中
 */
void LidarClusterDetector::clusterCpu(const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_cloud,
    std::vector<ClusterPtr>& clusters, std::vector<pcl::PointXYZ>& cluster_centroids,const double& max_cluster_dis)
{
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>);

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_2d(new pcl::PointCloud<pcl::PointXYZ>);

    pcl::copyPointCloud(*in_cloud, *cloud_2d);

    for (size_t i = 0; i < cloud_2d->points.size(); i++) {
        cloud_2d->points[i].z = 0;
    }

    tree->setInputCloud(cloud_2d);
    std::vector<pcl::PointIndices> cluster_indices;

    pcl::EuclideanClusterExtraction<pcl::PointXYZ> euc;
    // setClusterTolerance(). If you take a very small value, it can happen that
    // an actual object can be seen as multiple clusters. On the other hand, if
    // you set the value too high, it could happen, that multiple objects are
    // seen as one cluster. So our recommendation is to just test and try out
    // which value suits your dataset.
    euc.setClusterTolerance(max_cluster_dis);
    euc.setMinClusterSize(cluster_min_points);
    euc.setMaxClusterSize(cluster_max_points);
    euc.setSearchMethod(tree);
    euc.setInputCloud(cloud_2d);
    euc.extract(cluster_indices);

    for (size_t j = 0; j < cluster_indices.size(); j++) {
        ClusterPtr one_cluster;
        one_cluster->setCloud(in_cloud, color_table, cluster_indices[j].indices, j);
        clusters.push_back(one_cluster);
        cluster_centroids.push_back(one_cluster->central_point);
    }
}

/**
 * @description: 基于ray_groud_filtered对地面进行分割
 */
void LidarClusterDetector::removeFloorRayFiltered(const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_cloud,
    pcl::PointCloud<pcl::PointXYZ>::Ptr& out_only_ground_cloud,
    pcl::PointCloud<pcl::PointXYZ>::Ptr& out_no_ground_cloud,
    const double& sensor_height, const double& local_max_slope, const double& general_max_slope)
{
    pcl::PointIndices only_ground_indices;
    out_only_ground_cloud->points.clear();
    out_no_ground_cloud->points.clear();

    std::vector<PointCloudXYZRT> radial_divided_cloud;

    convertXYZ2XYZRT(in_cloud, radial_divided_cloud);

#pragma omp for
    for (size_t i = 0; i < radial_divided_cloud.size(); i++) {
        float prev_radius = 0.0;
        float prev_height = -sensor_height;
        bool prev_ground = false;
        bool current_ground = false;

        for (size_t j = 0; j < radial_divided_cloud[i].size(); j++) {
            float local_twoPoints_dis = radial_divided_cloud[i][j].radius - prev_radius;
            float local_height_threshold = tan(local_max_slope * M_PI / 180.) * local_twoPoints_dis;
            float general_height_threshold = tan(general_max_slope * M_PI / 180.) * radial_divided_cloud[i][j].radius;
            float current_height = radial_divided_cloud[i][j].point.z;

            if (radial_divided_cloud[i][j].radius > concentric_divide_distance && local_height_threshold < min_local_height_threshold) {
                local_height_threshold = min_local_height_threshold;
            }

            if (current_height <= (prev_height + local_height_threshold) && current_height >= (prev_height - local_height_threshold)) {
                if (!prev_ground) {
                    if (current_height <= (-sensor_height + general_height_threshold) && current_height >= (-sensor_height - general_height_threshold)) {
                        current_ground = true;
                    } else {
                        current_ground = false;
                    }
                } else {
                    current_ground = true;
                }
            } else {
                current_ground = false;
            }

            if (current_ground) {
                only_ground_indices.indices.push_back(radial_divided_cloud[i][j].original_index);
                prev_ground = true;
            } else {
                prev_ground = false;
            }
            prev_radius = radial_divided_cloud[i][j].radius;
            prev_height = radial_divided_cloud[i][j].point.z;
        }
    }

    pcl::ExtractIndices<pcl::PointXYZ> extractor;
    extractor.setInputCloud(in_cloud);
    extractor.setIndices(boost::make_shared<pcl::PointIndices>(only_ground_indices));

    extractor.setNegative(false); //true removes the indices, false leaves only the indices
    extractor.filter(*out_only_ground_cloud);

    extractor.setNegative(true); //true removes the indices, false leaves only the indices
    extractor.filter(*out_no_ground_cloud);
}

/**
 * @description: 将原始点云转化为 XYZRadialTheta 结构的点云
 */
void LidarClusterDetector::convertXYZ2XYZRT(const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_cloud,
    std::vector<PointCloudXYZRT>& out_radial_divided_cloud)
{
    out_radial_divided_cloud.clear();
    double radial_divide_num = ceil(360
        / radial_divide_angle);
    out_radial_divided_cloud.resize(radial_divide_num);

    for (size_t i = 0; i < in_cloud->points.size(); i++) {
        PointXYZRT p;
        float radius = (float)sqrt(in_cloud->points[i].x * in_cloud->points[i].x + in_cloud->points[i].y * in_cloud->points[i].y);
        float thera = (float)atan2(in_cloud->points[i].y, in_cloud->points[i].x) * 180 / M_PI;

        if (thera < 0)
            thera += 360;

        size_t radial_div = (size_t)floor(thera / radial_divide_angle);
        size_t concentric_div = (size_t)floor(radius / concentric_divide_distance);

        p.point = in_cloud->points[i];
        p.radius = radius;
        p.theta = thera;
        p.radial_div = radial_div;
        p.concentric_div = concentric_div;
        p.original_index = i;

        out_radial_divided_cloud[radial_div].push_back(p);
    }

#pragma omp for
    for (size_t j = 0; j < out_radial_divided_cloud.size(); j++) {
        std::sort(out_radial_divided_cloud[j].begin(), out_radial_divided_cloud[j].end(),
            [](const PointXYZRT& a, const PointXYZRT& b) { return a.radius < b.radius; });
    }
}

/**
 * @description: 非结构化点云分割：根据大尺度范围的法向量和小尺度范围内的法向量的差异，去除了差异变化不明显的点(近似平面的点)
 * 参考链接：http://pointclouds.org/documentation/tutorials/don_segmentation.php
 */
void LidarClusterDetector::differenceOfNormalsSegmentation(const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_cloud,
    pcl::PointCloud<pcl::PointXYZ>::Ptr& out_cloud)
{
    pcl::search::Search<pcl::PointXYZ>::Ptr tree;
    // 对于深度图这种结构化的数据，使用OrganizedNeighbor作为查找树
    // 对于激光雷达产生的非结构化数据，使用KdTree作为查找树
    tree.reset(new pcl::search::KdTree<pcl::PointXYZ>(false));
    // 为查找树添加点云数据
    tree->setInputCloud(in_cloud);

    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::PointNormal> normal_eatimation;
    // pcl::gpu::NormalEstimation<pcl::PointXYZ, pcl::PointNormal> normal_estimation;

    normal_eatimation.setInputCloud(in_cloud);
    normal_eatimation.setSearchMethod(tree);

    // setting viewpoint is very important, so that we can ensure that normals
    // estimated at different scales share a consistent orientation.
    normal_eatimation.setViewPoint(std::numeric_limits<float>::max(),
        std::numeric_limits<float>::max(), std::numeric_limits<float>::max());

    pcl::PointCloud<pcl::PointNormal>::Ptr normal_small_scale(new pcl::PointCloud<pcl::PointNormal>);
    pcl::PointCloud<pcl::PointNormal>::Ptr normal_large_scale(new pcl::PointCloud<pcl::PointNormal>);
    // calculate normals with the small scale
    normal_eatimation.setRadiusSearch(small_scale);
    normal_eatimation.compute(*normal_small_scale);
    // calculate normals with the large scale
    normal_eatimation.setRadiusSearch(large_scale);
    normal_eatimation.compute(*normal_large_scale);

    // Create and initial the output cloud for DoN (Difference of Normals) results
    pcl::PointCloud<pcl::PointNormal>::Ptr diff_normal_cloud(new pcl::PointCloud<pcl::PointNormal>);
    pcl::copyPointCloud<pcl::PointXYZ, pcl::PointNormal>(*in_cloud, *diff_normal_cloud);

    // Create DoN operator
    // The pcl::DifferenceOfNormalsEstimation class has 3 template parameters,
    // the first corresponds to the input point cloud type, in this case
    // pcl::PointXYZ, the second corresponds to the type of the normals
    // estimated for the point cloud, in this case pcl::PointNormal, and the
    // third corresponds to the vector field output type, in this case also
    // pcl::PointNormal.
    pcl::DifferenceOfNormalsEstimation<pcl::PointXYZ, pcl::PointNormal, pcl::PointNormal> diff_normal_estimator;
    diff_normal_estimator.setInputCloud(in_cloud);
    diff_normal_estimator.setNormalScaleSmall(normal_small_scale);
    diff_normal_estimator.setNormalScaleLarge(normal_large_scale);
    diff_normal_estimator.initCompute();

    diff_normal_estimator.computeFeature(*diff_normal_cloud);

    // Build the condition for filtering
    pcl::ConditionOr<pcl::PointNormal>::Ptr range_cond(new pcl::ConditionOr<pcl::PointNormal>);
    // 设置条件：curvature必须满足大于(greater than) 角度阈值 angle_threshold
    range_cond->addComparison(pcl::FieldComparison<pcl::PointNormal>::ConstPtr(
        new pcl::FieldComparison<pcl::PointNormal>("curvature", pcl::ComparisonOps::GT, angle_threshold)));

    // Build the filter
    pcl::ConditionalRemoval<pcl::PointNormal> cond_removal;
    cond_removal.setCondition(range_cond);
    cond_removal.setInputCloud(diff_normal_cloud);

    pcl::PointCloud<pcl::PointNormal>::Ptr diff_normal_filtered_cloud(new pcl::PointCloud<pcl::PointNormal>);

    // Apply filter
    cond_removal.filter(*diff_normal_filtered_cloud);

    pcl::copyPointCloud<pcl::PointNormal, pcl::PointXYZ>(*diff_normal_filtered_cloud, *out_cloud);
}

/**
 * @description: 去除地面
 */
void LidarClusterDetector::removeFloor(const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_cloud,
    pcl::PointCloud<pcl::PointXYZ>::Ptr& out_cloud,
    pcl::PointCloud<pcl::PointXYZ>::Ptr& only_floor_cloud,
    const double& max_height, const double& floor_max_angle)
{
    pcl::SACSegmentation<pcl::PointXYZ> seg;
    pcl::PointIndices::Ptr indexs(new pcl::PointIndices);
    pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
    // 设置分割对象是垂直平面
    seg.setModelType(pcl::SACMODEL_PERPENDICULAR_PLANE);
    // 设置随机采样方式
    seg.setMethodType(pcl::SAC_RANSAC);
    // 设置最大迭代次数
    seg.setMaxIterations(100);
    // 设置垂直的轴
    seg.setAxis(Eigen::Vector3f(0, 0, 1));
    // 设置垂直角度的最大阈值
    seg.setEpsAngle(floor_max_angle);
    // 设置查询点到目标模型的最大距离
    seg.setDistanceThreshold(max_height);
    seg.setOptimizeCoefficients(true);
    seg.setInputCloud(in_cloud);
    seg.segment(*indexs, *coefficients);

    if (indexs->indices.size() == 0) {
        printf("%s\n", "[lidar_euclidean_cluster_node]: could't seg floor");
    }

    pcl::ExtractIndices<pcl::PointXYZ> extract;
    extract.setInputCloud(in_cloud);
    extract.setIndices(indexs);
    extract.setNegative(true); // true removes the indices, false leaves only the indices
    extract.filter(*out_cloud);

    extract.setNegative(false); // true removes the indices, false leaves only the indices
    extract.filter(*only_floor_cloud);
}

/**
 * @description: 截取点云，去除高度过高的点,去除距离激光雷达中心过近的点, 去除非车辆前面的点
 */
void LidarClusterDetector::clipCloud(const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_cloud,
    pcl::PointCloud<pcl::PointXYZ>::Ptr& out_cloud,
    const double& height, const double& near_dis, const double& far_dis,
    const double& left_right_dis)
{
    pcl::ExtractIndices<pcl::PointXYZ> extractor;
    extractor.setInputCloud(in_cloud);
    pcl::PointIndices indices;

#pragma omp for
    for (size_t i = 0; i < in_cloud->points.size(); i++) {
        double dis;
        // 计算 需要移除的点
        if (in_cloud->points[i].z > height) {
            indices.indices.push_back(i);
        } else if (in_cloud->points[i].x < 0 || in_cloud->points[i].y > left_right_dis || in_cloud->points[i].y < -left_right_dis) { // 激光雷达 x正方向朝前，y正方向朝左，z正方向朝上
            indices.indices.push_back(i);
        } else if ((dis = sqrt(pow(in_cloud->points[i].x, 2) + pow(in_cloud->points[i].y, 2))) < near_dis || dis > far_dis) {
            indices.indices.push_back(i);
        }
    }
    extractor.setIndices(boost::make_shared<pcl::PointIndices>(indices));
    extractor.setNegative(true); //true removes the indices, false leaves only the indices
    extractor.filter(*out_cloud);
}

/**
 * @description: 点云下采样
 */
void LidarClusterDetector::downsampleCloud(const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_cloud,
    pcl::PointCloud<pcl::PointXYZ>::Ptr& out_cloud,
    const double& leaf_size)
{
    pcl::VoxelGrid<pcl::PointXYZ> voxel;
    voxel.setInputCloud(in_cloud);
    voxel.setLeafSize(leaf_size, leaf_size, leaf_size);
    voxel.filter(*out_cloud);
}

/**
 * @description: 点云发布函数
 */
void LidarClusterDetector::pubPointCloud(
    const ros::Publisher& publisher,
    const pcl::PointCloud<pcl::PointXYZ>::Ptr& in_pointcloud)
{
    sensor_msgs::PointCloud2 msg_pointcloud;
    pcl::toROSMsg(*in_pointcloud, msg_pointcloud);
    msg_pointcloud.header = msg_header;
    publisher.publish(msg_pointcloud);
}

void LidarClusterDetector::splitString(const std::string& in_string, std::vector<double>& out_array)
{
    std::string tmp;
    std::istringstream in(in_string);
    while (std::getline(in, tmp, ',')) {
        out_array.push_back(stod(tmp));
    }
}

//*****************************************************************************************************


void LidarClusterDetector::resetMatrix()
{
  init_matrix = false;
}

void LidarClusterDetector::initMatrix(const cv::Mat& cameraExtrinsicMat)
{
  invRt = cameraExtrinsicMat(cv::Rect(0, 0, 3, 3));
  cv::Mat invT = -invRt.t() * (cameraExtrinsicMat(cv::Rect(3, 0, 1, 3)));
  invTt = invT.t();
  init_matrix = true;
  ROS_INFO("invTt");
  ROS_INFO("%f",invTt.at<double>(1));

}


cv::Mat LidarClusterDetector::polyfit(std::vector<cv::Point>& in_point, int n)
{
    int size = in_point.size();
    //所求未知数个数
    int x_num = n + 1;
    //构造矩阵U和Y
    cv::Mat mat_u(size, x_num, CV_64F);
    cv::Mat mat_y(size, 1, CV_64F);

    for (int i = 0; i < mat_u.rows; ++i)
        for (int j = 0; j < mat_u.cols; ++j)
        {
            mat_u.at<double>(i, j) = pow(in_point[i].y, j);
        }

    for (int i = 0; i < mat_y.rows; ++i)
    {
        mat_y.at<double>(i, 0) = in_point[i].x;
    }

    //矩阵运算，获得系数矩阵K
    cv::Mat mat_k(x_num, 1, CV_64F);
    mat_k = (mat_u.t()*mat_u).inv()*mat_u.t()*mat_y;
    //std::cout << mat_k << endl;
    return mat_k;



}





void LidarClusterDetector::pointcloud2_to_image(std::vector<Detected_Obj>& global_obj_list,const cv::Mat& raw_image,const darknet_ros_msg::BoundingBoxesConstPtr& msg_bboxs)
{

    ROS_INFO("THIS IS pointcloud2_to_image");
    cv::Mat CameraExtrinsicMat(4, 4, cv::DataType<double>::type); //cameraExtrinsicMat
    cv::Mat CameraMat(3, 3, cv::DataType<double>::type);//cameraMat
    cv::Mat DistCoeff(1, 5, cv::DataType<double>::type);//distCoeff

    /*-4.1663491306720202e-02, -2.8709286353907904e-02,9.9871914488968538e-01, -1.1370553874523417e+00,
       -9.9913164024351186e-01, 1.5422263627479227e-03,-4.1636366341887776e-02, -5.3279678670307432e-02,
       -3.4490063018455253e-04, -9.9958661376335756e-01,-2.8748610899658189e-02, 7.9774389207494045e-01,
       0., 0., 0., 1.*/
    CameraExtrinsicMat.at<double>(0,0)=-8.8398628964871495e-02;
    CameraExtrinsicMat.at<double>(0,1)=-1.1186960430763260e-01;
    CameraExtrinsicMat.at<double>(0,2)= 9.8978324598327294e-01;
    CameraExtrinsicMat.at<double>(0,3)=-1.1831286974932009e+00;

    CameraExtrinsicMat.at<double>(1,0)=-9.9348849285208440e-01;
    CameraExtrinsicMat.at<double>(1,1)=-6.1800812555905971e-02;
    CameraExtrinsicMat.at<double>(1,2)=-9.5714545069821799e-02;
    CameraExtrinsicMat.at<double>(1,3)= 1.7310770036434223e-01;


    CameraExtrinsicMat.at<double>(2,0)= 7.1876957139434705e-02;
    CameraExtrinsicMat.at<double>(2,1)=-9.9179929985833470e-01;
    CameraExtrinsicMat.at<double>(2,2)=-1.0567805748069170e-01;
    CameraExtrinsicMat.at<double>(2,3)= 7.8200447181965571e-01;

    CameraExtrinsicMat.at<double>(3,0)= 0.;
    CameraExtrinsicMat.at<double>(3,1)= 0.;
    CameraExtrinsicMat.at<double>(3,2)= 0.;
    CameraExtrinsicMat.at<double>(3,3)= 1.;



    CameraMat.at<double>(0,0)= 1.6664880590809096e+03;
    CameraMat.at<double>(0,1)= 0.;
    CameraMat.at<double>(0,2)= 6.7757875740659529e+02;

    CameraMat.at<double>(1,0)= 0.;
    CameraMat.at<double>(1,1)= 1.6707258783129939e+03;
    CameraMat.at<double>(1,2)= 4.6993646243537444e+02;

    CameraMat.at<double>(2,0)= 0.;
    CameraMat.at<double>(2,1)= 0.;
    CameraMat.at<double>(2,2)= 1.;



    DistCoeff.at<double>(0,0)= -2.1892654126526639e-01;
    DistCoeff.at<double>(0,1)=  8.2638265822900758e-01;
    DistCoeff.at<double>(0,2)= -1.2517843138148176e-02;
    DistCoeff.at<double>(0,3)= -9.0618693923175120e-03;
    DistCoeff.at<double>(0,4)= -1.6050762891199606e+00;




    if (!init_matrix)
    {
    initMatrix(CameraExtrinsicMat);
    }

    cv::Mat point(1, 3, CV_64F);
    cv::Point2d imagepoint;




    std::vector<PicturePoint> blue_cone_points;
    std::vector<PicturePoint> orange_cone_points;
    std::vector<cv::Point> points;

    for (size_t p = 0; p < global_obj_list.size(); p++)
    {
        for (int i = 0; i < 3; i++)
        {
            point.at<double>(i) = invTt.at<double>(i);
            point.at<double>(i) += double(global_obj_list[p].centroid_.x )* invRt.at<double>(0, i);
            point.at<double>(i) += double(global_obj_list[p].centroid_.y )* invRt.at<double>(1, i);
            point.at<double>(i) += double(global_obj_list[p].centroid_.z )* invRt.at<double>(2, i);

        }



        double tmpx = point.at<double>(0) / point.at<double>(2);
        double tmpy = point.at<double>(1) / point.at<double>(2);


        double r2 = tmpx * tmpx + tmpy * tmpy;
        double tmpdist =
            1 + DistCoeff.at<double>(0) * r2 + DistCoeff.at<double>(1) * r2 * r2 + DistCoeff.at<double>(4) * r2 * r2 * r2;

        imagepoint.x =
            tmpx * tmpdist + 2 * DistCoeff.at<double>(2) * tmpx * tmpy + DistCoeff.at<double>(3) * (r2 + 2 * tmpx * tmpx);
        imagepoint.y =
            tmpy * tmpdist + DistCoeff.at<double>(2) * (r2 + 2 * tmpy * tmpy) + 2 * DistCoeff.at<double>(3) * tmpx * tmpy;
        imagepoint.x = CameraMat.at<double>(0, 0) * imagepoint.x + CameraMat.at<double>(0, 2);
        imagepoint.y = CameraMat.at<double>(1, 1) * imagepoint.y + CameraMat.at<double>(1, 2);

        int px = int(imagepoint.x + 0.5);
        int py = int(imagepoint.y + 0.5);


        //ROS_INFO("this is rows");
        //ROS_INFO("%d",raw_image.rows);
        //ROS_INFO("this is cols");
        //ROS_INFO("%d",raw_image.cols);




        for (size_t b = 0; b < msg_bboxs->bounding_boxes.size(); b++)
        {
            if (px > msg_bboxs->bounding_boxes[b].xmin-20 && px < msg_bboxs->bounding_boxes[b].xmax+20
            && py > msg_bboxs->bounding_boxes[b].ymin-20 && py < msg_bboxs->bounding_boxes[b].ymax+20)
            {
                if(msg_bboxs->bounding_boxes[b].id == 0) // 0:blue 1 :orange 2 :small_yellow  3:big_yellow
                {
                    cv::circle(raw_image, cv::Point2f(px, py), 20, cv::Scalar(255,0,0), -1);  //radiu:30;blue,green,red

                    PicturePoint P;
                    P.x = px;
                    P.y = py;
                    blue_cone_points.push_back(P);

                }
                else{
                    cv::circle(raw_image, cv::Point2f(px, py), 20, cv::Scalar(0,0,255), -1);  //radiu:30;blue,green,red
                    PicturePoint P;
                    P.x = px;
                    P.y = py;
                    orange_cone_points.push_back(P);


                }
            }



        }



        //rows 1080
        //cols 1440


    }


    if(blue_cone_points.size()>0 && orange_cone_points.size()>0)  //防止没有行心程序崩溃
    {

        //先将red、blue点排序按y大小排序（y大表示近，y小表示远）
        cv::Point temp_blue;
        for(size_t x = 0;x<blue_cone_points.size()-1;x++)
        {
            for(size_t y = x+1;y<blue_cone_points.size();y++)
            {
                if(blue_cone_points[x].y<blue_cone_points[y].y)
                {
                    continue;
                }
                else
                {
                    temp_blue.x = blue_cone_points[x].x;
                    temp_blue.y = blue_cone_points[x].y;
                    blue_cone_points[x].x = blue_cone_points[y].x;
                    blue_cone_points[x].y = blue_cone_points[y].y;
                    blue_cone_points[y].x = temp_blue.x;
                    blue_cone_points[y].y = temp_blue.y;
                }
            }
        }

        cv::Point temp_oragne;
        for(size_t x = 0;x<orange_cone_points.size()-1;x++)
        {
            for(size_t y = x+1;y<orange_cone_points.size();y++)
            {
                if(orange_cone_points[x].y<orange_cone_points[y].y)
                {
                    continue;
                }
                else
                {
                    temp_oragne.x = orange_cone_points[x].x;
                    temp_oragne.y = orange_cone_points[x].y;
                    orange_cone_points[x].x = orange_cone_points[y].x;
                    orange_cone_points[x].y = orange_cone_points[y].y;
                    orange_cone_points[y].x = temp_oragne.x;
                    orange_cone_points[y].y = temp_oragne.y;
                }
            }
        }

        //先将red、blue点排序按y大小排序（y大表示近，y小表示远）



        if (blue_cone_points.size()>orange_cone_points.size())
        {

            for(size_t m = 0;m < blue_cone_points.size();m++)
            {
                cv::Point central_point;
                if(m <= orange_cone_points.size()-1){
                    central_point.x = (int)(blue_cone_points[m].x + orange_cone_points[m].x)/2 ;
                    central_point.y = (int)(blue_cone_points[m].y + orange_cone_points[m].y)/2 ;
                    points.push_back(central_point);
                }
                else{
                    central_point.x = (int)(blue_cone_points[m].x + orange_cone_points[orange_cone_points.size()-1].x)/2;
                    central_point.y = (int)(blue_cone_points[m].y + orange_cone_points[orange_cone_points.size()-1].y)/2;
                    points.push_back(central_point);
                }


            }

        }
        else
        {
            for(size_t n = 0;n < orange_cone_points.size();n++)
            {
                cv::Point central_point;
                if(n <= blue_cone_points.size()-1){
                    central_point.x = (int)(blue_cone_points[n].x + orange_cone_points[n].x)/2 ;
                    central_point.y = (int)(blue_cone_points[n].y + orange_cone_points[n].y)/2 ;
                    points.push_back(central_point);
                }
                else{
                    central_point.x = (int)(orange_cone_points[n].x + blue_cone_points[blue_cone_points.size()-1].x)/2;
                    central_point.y = (int)(orange_cone_points[n].y + blue_cone_points[blue_cone_points.size()-1].y)/2;
                    points.push_back(central_point);
                }


            }

        }

    //先将点排序按y大小排序
        cv::Point temp;
        for(size_t x = 0;x<points.size()-1;x++)
        {
            for(size_t y = x+1;y<points.size();y++)
            {
                if(points[x].y<points[y].y)
                {
                    continue;
                }
                else
                {
                    temp.x = points[x].x;
                    temp.y = points[x].y;
                    points[x].x = points[y].x;
                    points[x].y = points[y].y;
                    points[y].x = temp.x;
                    points[y].y = temp.y;
                }
            }
        }



        int n = points.size()-1;

        cv::Mat mat_k = polyfit(points, n);

        //Mat out(150, 500, CV_8UC3,Scalar::all(0));

	    //画出拟合曲线
	    /*for (int i = points[0].y; i < points[boost::size(points)-1].y; ++i)
	    {
		    cv::Point2d ipt;
		    ipt.y = i;
		    ipt.x = 0;
		    for (int j = 0; j < n + 1; ++j)
		    {
			    ipt.x += mat_k.at<double>(j, 0)*pow(i,j);
		    }
		    circle(raw_image, ipt, 10, cv::Scalar(0, 255, 0), CV_FILLED, CV_AA);
	    }

	    //画出原始散
	    for (int i = 0; i < boost::size(points); ++i)
	    {
		    cv::Point ipt = points[i];
		    circle(raw_image, ipt, 30, cv::Scalar(0, 255, 0), CV_FILLED, CV_AA);
	    }*/

    }
    sensor_msgs::ImagePtr msg = cv_bridge::CvImage(img_header, "bgr8", raw_image).toImageMsg();
    project2ImagePublisher.publish(*msg);





}//pointcloud2_to_image

} // namespace LidarDetector









