#include "include/data_fusion.h"

// DataFusion 实现
DataFusion::DataFusion()
{
    // Config cfg_; 
    Init();
}


void DataFusion::Init()
{
    cfg_ = gscm.getConfig(); 
    initFusionParams();
    
    #if 1
        image_ = cv::Mat::zeros(480,640, 16);
        image1_ = cv::Mat::zeros(600,600, 16);
        image2_ = cv::Mat::zeros(600,600, 16);
    #endif
}

cv::Mat DataFusion::rotationMatrix(double rx, double ry, double rz) 
{
        cv::Mat Rx = (cv::Mat_<double>(3, 3) << 1, 0, 0,
                                        0, cos(rx), -sin(rx),
                                        0, sin(rx), cos(rx));

        cv::Mat Ry = (cv::Mat_<double>(3, 3) << cos(ry), 0, sin(ry),
                                        0, 1, 0,
                                        -sin(ry), 0, cos(ry));

        cv::Mat Rz = (cv::Mat_<double>(3, 3) << cos(rz), -sin(rz), 0,
                                        sin(rz), cos(rz), 0,
                                         0, 0, 1);

        return Rz * Ry * Rx;
}

void DataFusion::initFusionParams()
{
    double PER_Radian = CV_PI / 180.0;
    double rx = PER_Radian * cfg_.cam_inttri.rx;  // -105 degrees in radians
    double ry = PER_Radian * cfg_.cam_inttri.ry;
    double rz = PER_Radian * cfg_.cam_inttri.rz;  // -90 degrees in radians
    cv::Mat R_cam = rotationMatrix(rx, ry, rz);

    // 0.485, 0.0 0.0825
    float lidar_2_rlm = 0.25 - 0.5 * 0.225;

    cv::Mat t_cam = (cv::Mat_<double>(3, 1) << cfg_.cam_inttri.x_cam, cfg_.cam_inttri.y_cam, lidar_2_rlm - cfg_.cam_inttri.z_cam);

    double fx = cfg_.cam_inttri.fx, fy = cfg_.cam_inttri.fy;
    double cx = cfg_.cam_inttri.cx, cy = cfg_.cam_inttri.fx;
    cv::Mat K = (cv::Mat_<double>(3, 3) << fx, 0, cx,
                                   0, fy, cy,
                                   0, 0, 1);


    R_combined_ = R_cam.t();
    t_combined_ = t_cam;
    K_ = K;
}

std::vector<cv::Point3d> DataFusion::downSamplePoints(std::vector<cv::Point3d> dense_pts,  float distance)
{
    // distance = DOWN_SAMPLE_DIS;
    std::vector<cv::Point3d> res;
    int len = dense_pts.size();
    if(len <= 2 ) return dense_pts;
    else
    {
        auto p_1st = dense_pts.front();
        res.push_back(p_1st);
        for(size_t i = 1; i < dense_pts.size(); i++)
        {
            auto p_cur = dense_pts[i];
            float p2p = (p_cur.x - p_1st.x) * (p_cur.x - p_1st.x) + (p_cur.y - p_1st.y) * (p_cur.y - p_1st.y);
            if(i != dense_pts.size() -1)
            {
                if(p2p > distance)
                {
                    res.push_back(p_cur);
                    p_1st = p_cur;
                }
            }
            else
            {
                res.push_back(p_cur);
            }

        }
    }

    return res;

}

std::vector<BBox>  DataFusion::AiMetaBboxRect2Bbox(const msg::AiMetadata::SharedPtr ai_msg)
{
    std::vector<BBox> bboxes;
    for (size_t i = 0;  i < ai_msg->det.bboxes.size(); i++)
    {
        auto ai_box = ai_msg->det.bboxes[i];
        auto cam_xyz = ai_msg->det.bboxes[i].cam_obs;
        float prob = ai_msg->det.bboxes[i].prob;
        // int mask_rw = 0;
        // int mask_rh = 0;
        // auto mask_roi = 0;
        std::vector<cv::Point3d> cv_obs;
        for(size_t j = 0; j< cam_xyz.size(); j++)
        {
            cv_obs.push_back(cv::Point3d(cam_xyz[j].x, cam_xyz[j].y, 0));
        }
        // box
        BBox box = {ai_box.class_id, prob, ai_box.x1, ai_box.y1, ai_box.x2, ai_box.y2, ai_box.cam_obs_len, cv_obs};
        bboxes.push_back(box);

    }
    return bboxes;
}

void DataFusion::update(const msg::ObstacleData::SharedPtr lidar_msg, const msg::AiMetadata::SharedPtr ai_msg)
{
    if(lidar_msg == nullptr || ai_msg == nullptr)
    {
        LOG_E("ERRROR BADLY =====================");
        return ;
    }
    
    Timera t1;
    // prepration:
    auto lidar_timestamp = lidar_msg->time_stamp;
    auto ai_timestamp = ai_msg->time_stamp;  
    auto mask_w = ai_msg->det.mask_w;
    auto mask_h = ai_msg->det.mask_h;
    auto mask = ai_msg->det.mask;

    if(lidar_timestamp == 0 || ai_timestamp == 0)
    {
        LOG_E("UnFind time-closed sensor data ");
        return ;
    }

    // uint32_t sum = std::accumulate(mask.begin(), mask.end(), 0u);
    // std::cout << "maks sum=========================: " << sum << std::endl;

    auto boxes = AiMetaBboxRect2Bbox(ai_msg);

    if(cfg_.debug_info.debug_camdar)
    // if(DEBUG_INFO_CAMDAR)
    if(cfg_.debug_info.debug_camdar)
    {
        std::cout << "========================================\n";
        std::cout << "lidar-ai fusion start ----->\n";
        std::cout << "START FUSION: " << std::endl;
        std::cout << "AI    time_stamp: " << ai_timestamp << std::endl;
        std::cout << "LIDAR time_stamp: " <<  lidar_timestamp << std::endl;
        std::cout << "========================================\n";
    }

    auto fuse_pose = lidar_msg->fuse_pose;
    // case1: fusion with bbox, filter un-det lidar point

    int len = boxes.size();
    int lidar_len = lidar_msg->obs.size();

    std::vector<cv::Point3d> point3dAlls; // all lidar point sets

    std::vector<int> pointStatus(lidar_len, 0);
    std::vector<int> pointNumInBox(len);
    std::vector<std::vector<cv::Point3d>> point3dInBoxes(len);
    std::vector<std::vector<cv::Point2d>> hit_box_pts(len);    // u-v

    // save un-detect point3d and u-v
    std::vector<cv::Point3d> point3dLRFView;   // left-right-front
    std::vector<cv::Point3d> point3dRearView;  // rear 
    std::vector<cv::Point3d> point3dInView;    // insight
    std::vector<cv::Point2d> pixel2dOutImg;    // out-uv
    std::vector<cv::Point3d> point3dUnderGrnd; // under-ground
    for(size_t index = 0; index < lidar_msg->obs.size(); index=index+1)
    {   
        auto p = lidar_msg->obs[index];
        float radius2 = p.x * p.x + p.y * p.y;

        if(isPointInCar(p.x, p.y, p.z) == false)
        {
            continue;
        }

        float OBS_ATTITUDE_Z = 0;
        if(radius2 > 1.5 * 1.5)
        {
            // OBS_ATTITUDE_Z = 0.15 + -0.25;
            // OBS_ATTITUDE_Z = LOWER_AIMS_ATTITUDE + GRND_2_LIDAR_ATTITUDE;
            OBS_ATTITUDE_Z = cfg_.car_config.lower_attitude + cfg_.car_config.lidar_attitude;
        }
        else
        {
            // 0m -- 0.5  
            // 1.5m --- 1
            // OBS_ATTITUDE_Z = LOWER_AIMS_ATTITUDE * std::abs((std::sqrt(radius2) - 0.5))  + GRND_2_LIDAR_ATTITUDE;
            // OBS_ATTITUDE_Z = LOWER_AIMS_ATTITUDE + GRND_2_LIDAR_ATTITUDE;
            OBS_ATTITUDE_Z = cfg_.car_config.lower_attitude + cfg_.car_config.lidar_attitude;


        }
        if(p.z <= OBS_ATTITUDE_Z)
        {
            
            point3dUnderGrnd.push_back(cv::Point3d(p.x, p.y, p.z));
            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
            pointStatus[index] = LidarStatus::UNDER_GRND;
            continue;
        }
        if(p.x <= cfg_.car_config.car_behind)
        // if(p.x <= 0.4)
        {
            point3dRearView.push_back(cv::Point3d(p.x, p.y, p.z));
            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
            pointStatus[index] = LidarStatus::REAR_RLM;
            continue;
        }
        
        // if(p.z <= GRND_2_LIDAR_ATTITUDE + LOWER_AIMS_ATTITUDE)

        cv::Mat lidar_point = (cv::Mat_<double>(3, 1) << p.x, p.y, p.z);
        // 将激光雷达点转换到相机坐标系
        cv::Mat cam_point = R_combined_ * (lidar_point - t_combined_);

        // 将相机坐标系中的点投影到图像平面
        cv::Mat image_point_homogeneous = K_* cam_point;

        cv::Point2d pres;
        if (image_point_homogeneous.at<double>(2, 0) == 0) 
        {
            pres =  cv::Point2d(0, 0);
        } 
        else 
        {
            pres = cv::Point2d(image_point_homogeneous.at<double>(0, 0) / image_point_homogeneous.at<double>(2, 0), 
                       image_point_homogeneous.at<double>(1, 0) / image_point_homogeneous.at<double>(2, 0));
        }

        int u = static_cast<int>(pres.x * 1); 
        int v = static_cast<int>(pres.y * 1);

        //TODO: 
        //grass mask
        //remove lidar-project-to-img's lidar on grass mask
        // if(0 <= u && u < image_.cols && 0 <= v && v < image_.rows) 
        if(0 <= u && u < 640 && 0 + cfg_.cam_inttri.upper_pixel  <= v && v < 640 - cfg_.cam_inttri.lower_pixel ) 
        // if(0 <= u && u < image_.cols && 0 + UPPER_PIXEL  <= v && v < image_.rows - LOWER_PIXEL ) 
        {

            if(boxes.size() == 0)
            {
                pointStatus[index] = LidarStatus::INNER_CAM;; // status: no detction outer-box
                point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
                point3dInView.push_back(cv::Point3d(p.x, p.y, p.z));
            }
            else
            {
                for(size_t i = 0; i < boxes.size(); i++)
                {
                    if(isPointInBox(boxes[i], u, v)) 
                    {
                        // u,v 
                        // if(isPoint3dInMask(boxes[i], u, v))
                        if(isPoint3dInMaskAll(mask, boxes[i], mask_w, mask_h, u, v))
                        {   
                            pointStatus[index] = LidarStatus::HIDEN_CAM; // status:4 remove lidar point
                            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
                            // point3dInView.push_back(cv::Point3d(p.x, p.y, p.z));
                        }
                        else
                        {
                            pointStatus[index] = LidarStatus::INNER_BOX; // status:1 inner-box
                            pointNumInBox[i] += 1;
                            point3dInBoxes[i].push_back(cv::Point3d(p.x, p.y, p.z));
                            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
                            hit_box_pts[i].push_back(cv::Point2d(u, v));
                        }

                        // break;
                    }
                    else // 没有框去接纳他
                    {
                        if(pointStatus[index] == LidarStatus::DEFAULT)
                        {
                            if(i == boxes.size() - 1) // 遍历到最后一个框了
                            {   
                                pointStatus[index] = LidarStatus::INNER_CAM; // status:2 outer-box
                                point3dInView.push_back(cv::Point3d(p.x, p.y, p.z));
                                point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
                            }
                        }
                    }
                }
            }
        }
        else
        {
            pointStatus[index] = LidarStatus::OUTER_CAM; // status:2 left-right-front-view
            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
            point3dLRFView.push_back(cv::Point3d(p.x, p.y, p.z));
        }
    }

    // //后处理:
    auto camdar_msg = std::make_shared<msg::FusionCamdar>();
    auto msg = std::make_shared<msg::AiMetadata>();

    msg::FusionData fusion_data;
    std::vector<std::vector<cv::Point3d>>  fusion_camdar_pts;
    std::vector<std::vector<cv::Point3d>>  fusion_camera_pts;
    std::vector<cv::Point3d> outer_pts;

    camdar_msg->time_stamp = ai_timestamp;
    camdar_msg->fuse_pose = fuse_pose;
    int fusion_data_num = 0;

    // std::vector<cv::Point3d> point3dLRFView;   // left-right-front
    // std::vector<cv::Point3d> point3dRearView;  // rear 
    // std::vector<cv::Point3d> point3dInView;    // insight
    // std::vector<cv::Point2d> pixel2dOutImg;    // out-uv
    // std::vector<cv::Point3d> point3dUnderGrnd; // under-ground
    // std::cout << "point3dLRFView.size(): " << point3dLRFView.size() << std::endl;
    // std::cout << "point3dRearView.size(): " << point3dRearView.size() << std::endl;
    // std::cout << "point3dInView.size(): " << point3dInView.size() << std::endl;
    // std::cout << "pixel2dOutImg.size(): " << pixel2dOutImg.size() << std::endl;
    // std::cout << "point3dUnderGrnd.size(): " << point3dUnderGrnd.size() << std::endl;

    # if 1
    for(size_t i = 0; i < boxes.size(); i++)
    {
        // if (pointNumInBox[i] > POINT_HIT_BOXNUM)
        if (pointNumInBox[i] > 10)
        {
            // down-sample matched lidar xyz
            auto fusion_pts = downSamplePoints(point3dInBoxes[i], DOWN_SAMPLE_DIS);
            fusion_camdar_pts.push_back(fusion_pts);
            // auto type = boxes[i].class_id;
            fusion_data_num += 1;

            msg::FusionData fusion_data;
            fusion_data.class_id = boxes[i].class_id;
            fusion_data.fusion_mode = 1;
            fusion_data.point_num = fusion_pts.size();

            for(size_t j = 0; j < fusion_pts.size(); j++)
            {
                msg::ObstaclePoint obsp;
                obsp.x = fusion_pts[j].x;
                obsp.y = fusion_pts[j].y;
                obsp.z = fusion_pts[j].z;
                fusion_data.points.push_back(obsp);
            }
            camdar_msg->fusion_data.push_back(fusion_data);
        }
        else
        {
            if(boxes[i].class_id != 1)   // 1: brush
            {
                //un-known-class
                fusion_camera_pts.push_back(boxes[i].cam_xyz);
                fusion_data_num += 1;
                msg::FusionData fusion_data;
                fusion_data.class_id = boxes[i].class_id;
                fusion_data.fusion_mode = 2;
                fusion_data.point_num =  boxes[i].len;

                if(boxes[i].len == 0) continue;
                for(int j = 0; j < boxes[i].len; j++)
                {
                    msg::ObstaclePoint obsp;
                    obsp.x = boxes[i].cam_xyz[j].x;
                    obsp.y = boxes[i].cam_xyz[j].y;
                    obsp.z = 0;
                    fusion_data.points.push_back(obsp);
                }
                camdar_msg->fusion_data.push_back(fusion_data);
            }
            // post point  in box
            #if 0
            {
                // down-sample matched lidar xyz
                auto fusion_pts = downSamplePoints(point3dInBoxes[i], DOWN_SAMPLE_DIS);
                fusion_camdar_pts.push_back(fusion_pts);
                // auto type = boxes[i].class_id;
                fusion_data_num += 1;

                msg::FusionData fusion_data;
                fusion_data.class_id = 254;
                fusion_data.fusion_mode = 3;
                fusion_data.point_num = fusion_pts.size();

                for(size_t j = 0; j < fusion_pts.size(); j++)
                {
                    msg::ObstaclePoint obsp;
                    obsp.x = fusion_pts[j].x;
                    obsp.y = fusion_pts[j].y;
                    obsp.z = fusion_pts[j].z;
                    fusion_data.points.push_back(obsp);
                }
                camdar_msg->fusion_data.push_back(fusion_data);

            }
            #endif

        }
    }
    #endif

    bool bPoint = true;
    for(auto i = 0; i < lidar_len; i++)
    {
        if(
        pointStatus[i] == LidarStatus::OUTER_CAM
        // || pointStatus[i] == LidarStatus::INNER_CAM 
        || pointStatus[i] == LidarStatus::REAR_RLM 
        || pointStatus[i] == LidarStatus::INNER_CAM
        
        ) 
        {
            if(bPoint)
            {
                auto p = lidar_msg->obs[i];
                bPoint = false;
                outer_pts.push_back(cv::Point3d(p.x, p.y, p.z));
            }
            else
            {
                auto pre_p = outer_pts.back();
                auto cur_p = lidar_msg->obs[i];
                float distance = (pre_p.x - cur_p.x) * (pre_p.x - cur_p.x) + (pre_p.y - cur_p.y) * (pre_p.y - cur_p.y);
                if(distance > DOWN_SAMPLE_DIS)
                {
                    outer_pts.push_back(cv::Point3d(cur_p.x,  cur_p.y, cur_p.z));
                }  
            }
        }
    }

    for(auto i = 0u; i < outer_pts.size(); i++)
    {
        fusion_data_num += 1;
        msg::FusionData fusion_data;
        fusion_data.class_id = 254;    // only lidar
        fusion_data.fusion_mode = 3;
        fusion_data.point_num = 1;
        msg::ObstaclePoint  obsp;
        obsp.x = outer_pts[i].x; 
        obsp.y = outer_pts[i].y;
        obsp.z = outer_pts[i].z;
        fusion_data.points.push_back(obsp);

        camdar_msg->fusion_data.push_back(fusion_data);
    }

    camdar_msg->fusion_num = fusion_data_num;

    // camdar_data_pub_->publish(*camdar_msg);
    gdsp.publishCamdarData(camdar_msg);


    // if(bSave)
    if(cfg_.save_flag.save)
    {
        drawFusionMetaData(ai_msg, lidar_msg, boxes);
    }

    // if(bSave)
    if(cfg_.save_flag.save)

    {
        drawFusionPubData(point3dRearView, point3dInView, point3dLRFView, outer_pts, 
            fusion_camdar_pts, fusion_camera_pts, point3dInBoxes,
            ai_msg, lidar_msg);

    }


    // // send_data:A
    // if(DEBUG_INFO_CAMDAR)
    if(cfg_.debug_info.debug_camdar)
    {
        struct timeval time1;
        gettimeofday(&time1, NULL);
        long long cur_time = time1.tv_sec * 1000 + time1.tv_usec / 1000;
        std::cout << "========================================\n";
        std::cout << "TIME DIFFERENCE ----->\n";
        std::cout << "AI    time_stamp: " <<  ai_timestamp << std::endl;
        std::cout << "LIDAE time_stamp: " <<  lidar_timestamp << std::endl;
        std::cout << "end   time_stamp: " <<   cur_time               << std::endl;
        std::cout << "time dif        : " <<  cur_time - ai_timestamp << std::endl;
        std::cout << "========================================\n"; 
    }

    // if(SAVE_CAMDAR)
    if(cfg_.save_sensor.save_camdar)
    {
        write_camdar_2_csv(camdar_msg);
        camdar_index_ += 1;
    }

    t1.out("camdar fusion: ");

}



void DataFusion::update(const msg::ObstacleData::SharedPtr lidar_msg)
{
   if(lidar_msg == nullptr )
    {
        LOG_E("ERRROR BADLY =====================");
        return ;
    }
    
    Timera t1;
    // prepration:
    auto lidar_timestamp = lidar_msg->time_stamp;
    std::cout << lidar_timestamp << std::endl;

    if(lidar_timestamp == 0)
    {
        LOG_E("UnFind lidar sensor data ");
        return ;
    }

    if(cfg_.debug_info.debug_camdar)
    // if(DEBUG_INFO_CAMDAR)
    {
        std::cout << "========================================\n";
        std::cout << "lidar fusion start ----->\n";
        std::cout << "START FUSION: " << std::endl;
        std::cout << "LIDAR time_stamp: " <<  lidar_timestamp << std::endl;
        std::cout << "========================================\n";
    }

    auto fuse_pose = lidar_msg->fuse_pose;

    int len = 1;
    int lidar_len = lidar_msg->obs.size();
    
    std::vector<cv::Point3d> point3dAlls; // all lidar point sets

    std::vector<int> pointStatus(lidar_len, 0);
    std::vector<int> pointNumInBox(len);
    std::vector<std::vector<cv::Point3d>> point3dInBoxes(len);
    std::vector<std::vector<cv::Point2d>> hit_box_pts(len);    // u-v

    // save un-detect point3d and u-v
    std::vector<cv::Point3d> point3dLRFView;   // left-right-front
    std::vector<cv::Point3d> point3dRearView;  // rear 
    std::vector<cv::Point3d> point3dInView;    // insight
    std::vector<cv::Point2d> pixel2dOutImg;    // out-uv
    std::vector<cv::Point3d> point3dUnderGrnd; // under-ground
    for(size_t index = 0; index < lidar_msg->obs.size(); index=index+1)
    {   

        auto p = lidar_msg->obs[index];
        float radius2 = p.x * p.x + p.y * p.y;
        float OBS_ATTITUDE_Z = 0;
        if(isPointInCar(p.x, p.y, p.z) == false)
        {
            continue;
        }
        if(radius2 > 1.5 * 1.5)
        {
            OBS_ATTITUDE_Z = cfg_.car_config.lower_attitude + cfg_.car_config.lidar_attitude;
            // OBS_ATTITUDE_Z = LOWER_AIMS_ATTITUDE + GRND_2_LIDAR_ATTITUDE;
        }
        else
        {
            // 0m -- 0.5  
            // 1.5m --- 1
            // OBS_ATTITUDE_Z = LOWER_AIMS_ATTITUDE * std::abs((std::sqrt(radius2) - 0.5))  + GRND_2_LIDAR_ATTITUDE;
            // OBS_ATTITUDE_Z = LOWER_AIMS_ATTITUDE + GRND_2_LIDAR_ATTITUDE;
            OBS_ATTITUDE_Z = cfg_.car_config.lower_attitude + cfg_.car_config.lidar_attitude;

        }
        if(p.z <= OBS_ATTITUDE_Z)
        {
            point3dUnderGrnd.push_back(cv::Point3d(p.x, p.y, p.z));
            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
            pointStatus[index] = LidarStatus::UNDER_GRND;
            continue;
        }
        // if(p.x <= CAR_BEHIND)
        if(p.x <= cfg_.car_config.car_behind)

        {
            point3dRearView.push_back(cv::Point3d(p.x, p.y, p.z));
            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
            pointStatus[index] = LidarStatus::REAR_RLM;
            // continue;
        }
        


        cv::Mat lidar_point = (cv::Mat_<double>(3, 1) << p.x, p.y, p.z);
        // 将激光雷达点转换到相机坐标系
        cv::Mat cam_point = R_combined_ * (lidar_point - t_combined_);

        // 将相机坐标系中的点投影到图像平面
        cv::Mat image_point_homogeneous = K_* cam_point;

        cv::Point2d pres;
        if (image_point_homogeneous.at<double>(2, 0) == 0) 
        {
            pres =  cv::Point2d(0, 0);
        } 
        else 
        {
            pres = cv::Point2d(image_point_homogeneous.at<double>(0, 0) / image_point_homogeneous.at<double>(2, 0), 
                       image_point_homogeneous.at<double>(1, 0) / image_point_homogeneous.at<double>(2, 0));
        }

        int u = static_cast<int>(pres.x * 1); 
        int v = static_cast<int>(pres.y * 1);
        if(0 <= u && u < 640 && 0 + cfg_.cam_inttri.upper_pixel  <= v && v < 640 - cfg_.cam_inttri.lower_pixel ) 
        // if(0 <= u && u < image_.cols && 0 + UPPER_PIXEL  <= v && v < image_.rows - LOWER_PIXEL ) 
        {
            pointStatus[index] = LidarStatus::INNER_CAM;; // status: no detction outer-box
            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
            point3dInView.push_back(cv::Point3d(p.x, p.y, p.z));
            
        }
        else
        {
            pointStatus[index] = LidarStatus::OUTER_CAM; // status:2 left-right-front-view
            point3dAlls.push_back(cv::Point3d(p.x, p.y, p.z));
            point3dLRFView.push_back(cv::Point3d(p.x, p.y, p.z));
        }
    }

    //后处理:
    auto camdar_msg = std::make_shared<msg::FusionCamdar>();
    auto msg = std::make_shared<msg::AiMetadata>();

    msg::FusionData fusion_data;
    std::vector<std::vector<cv::Point3d>>  fusion_camdar_pts;
    std::vector<std::vector<cv::Point3d>>  fusion_camera_pts;
    std::vector<cv::Point3d> outer_pts;

    camdar_msg->time_stamp = lidar_timestamp;
    camdar_msg->fuse_pose = fuse_pose;
    int fusion_data_num = 0;

    bool bPoint = true;
    for(auto i = 0; i < lidar_len; i++)
    {
        if(
        pointStatus[i] == LidarStatus::OUTER_CAM
        || pointStatus[i] == LidarStatus::INNER_CAM 
        || pointStatus[i] == LidarStatus::REAR_RLM 
        ) 
        {
            
            if(bPoint)
            {
                auto p = lidar_msg->obs[i];
                bPoint = false;
                outer_pts.push_back(cv::Point3d(p.x, p.y, p.z));
            }
            else
            {
                auto pre_p = outer_pts.back();
                auto cur_p = lidar_msg->obs[i];
                float distance = (pre_p.x - cur_p.x) * (pre_p.x - cur_p.x) + (pre_p.y - cur_p.y) * (pre_p.y - cur_p.y);
                if(distance > DOWN_SAMPLE_DIS)
                {
                    outer_pts.push_back(cv::Point3d(cur_p.x,  cur_p.y, cur_p.z));
                }  
            }
        }
    }
    for(auto i = 0u; i < outer_pts.size(); i++)
    {
        fusion_data_num += 1;
        msg::FusionData fusion_data;
        fusion_data.class_id = 254;    // only lidar
        fusion_data.fusion_mode = 3;
        fusion_data.point_num = 1;
        msg::ObstaclePoint  obsp;
        obsp.x = outer_pts[i].x; 
        obsp.y = outer_pts[i].y;
        obsp.z = outer_pts[i].z;
        fusion_data.points.push_back(obsp);

        camdar_msg->fusion_data.push_back(fusion_data);
    }

    camdar_msg->fusion_num = fusion_data_num;

    // camdar_data_pub_->publish(*camdar_msg);
    gdsp.publishCamdarData(camdar_msg);

    // if(bSave)
    if(cfg_.save_flag.save)
    {
        drawFusionMetaData(lidar_msg);
    }

    // if(bSave)
    if(cfg_.save_flag.save)
    {
        drawFusionPubData(point3dRearView, point3dInView, point3dLRFView, outer_pts, 
            fusion_camdar_pts, fusion_camera_pts, point3dInBoxes,
            lidar_msg);

    }


    // send_data:A
    // if(DEBUG_INFO_CAMDAR)
    if(cfg_.debug_info.debug_camdar)
    {
        struct timeval time1;
        gettimeofday(&time1, NULL);
        long long cur_time = time1.tv_sec * 1000 + time1.tv_usec / 1000;
        std::cout << "========================================\n";
        std::cout << "TIME DIFFERENCE ----->\n";
        std::cout << "LIDAE time_stamp: " <<  lidar_timestamp << std::endl;
        std::cout << "end   time_stamp: " <<   cur_time               << std::endl;
        std::cout << "time dif        : " <<  cur_time - lidar_timestamp << std::endl;
        std::cout << "========================================\n"; 
    }

    // if(SAVE_CAMDAR)
    if(cfg_.save_sensor.save_camdar)
    {
        write_camdar_2_csv(camdar_msg);
        camdar_index_ += 1;
    }

    t1.out("camdar fusion: ");  
}


bool DataFusion::isPointInCar(float px, float py, float pz)
{
    //car  666*472*294 513
    float LEFT_CAR = -0.30;
    float RIGHT_CAR = 0.30;
    float TOP_CAR = 0.60;
    float BOTTOM_CAR = -0.20;
    // 0.830525 0.0284423 0.0247233

    if((py > LEFT_CAR && py < RIGHT_CAR) && (px > BOTTOM_CAR && px < TOP_CAR ) )
    {
        return false;
    }
    return true;
}


bool DataFusion::isPoint3dInMaskAll(const std::vector<uint8_t>& mask, const BBox& bbox, int mask_w, int mask_h, int px, int py)
{

    if(bbox.class_id != 0 ) return false;
    float ration_x = 640.0 / mask_w;
    float ration_y = 640.0 / mask_h;

    int r_x = std::floor((px) / ration_x); 
    int r_y = std::floor((py) / ration_y);
    
    int a_xy = r_y * mask_w + r_x;
    int value = static_cast<int>(mask[a_xy]);
    if(value == 0)
    {
        return true;
    }

    return false;

}


void DataFusion::drawFusionMetaData(const msg::ObstacleData::SharedPtr lidar_msg)
{
        cv::Mat image_draw = image_.clone(); // ai-img
        cv::Mat image_draw1 = image1_.clone(); // lidar-meta + ai-xyz

        int cx = 300, cy = 300;
        // int world2pixel = 100;
        cv::circle(image_draw1, cv::Point(cx, cy), 2, COLOR_RED, -1);
        cv::rectangle(image_draw1, cv::Point(280, 250), cv::Point(320, 310), COLOR_GRAY, 2);
        cv::rectangle(image_draw1, cv::Point(150, 150), cv::Point(450, 450), COLOR_GREEN, 1);


        // 绘制边界框
        // for (const auto& bbox : boxes) {
        //     cv::rectangle(image_draw, cv::Point(bbox.x1, bbox.y1), cv::Point(bbox.x2, bbox.y2), COLOR_YELLOW, 2);
        //     cv::putText(image_draw, std::to_string(bbox.class_id), cv::Point(bbox.x1, bbox.y1 - 10), cv::FONT_HERSHEY_SIMPLEX, 0.9, COLOR_YELLOW, 2);
        // }

        // 绘制激光pcl
        for(size_t index = 0; index < lidar_msg->obs.size(); index=index+1)
        {
            auto point = lidar_msg->obs[index];
            if(point.x < 3 && point.x > -3 && point.y < 3 && point.y > -3)
            {
                auto dx = int(300.0 - 100.0 * point.y);
                auto dy = int(300.0 - 100.0 * point.x);
                circle(image_draw1, cv::Point(dx, dy), 2, COLOR_WHITE, -1);
            }
        } 

        // 绘制 ai-xyz  
        // for (const auto& bbox : boxes) {
        //     if(bbox.len > 0)
        //     {
        //         for(int camindex = 0; camindex < bbox.len; camindex++)
        //         {
        //             auto point = bbox.cam_xyz[camindex];
        //             if(point.x < 1.5 && point.x > 0 && point.y < 1.5 && point.y > -1.5)
        //             {
        //                 auto dx = int(300.0 - 100.0 * point.y);
        //                 auto dy = int(300.0 - 100.0 * point.x);
        //                 circle(image_draw1, cv::Point(dx, dy), 2, COLOR_YELLOW, -1);
        //             }

        //         }
        //     }
        // }


        std::ostringstream oss;
        oss << "darw_image" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << " " << std::to_string(lidar_msg->time_stamp) << ".jpg";
        std::ostringstream oss1;
        oss1 << "darw_lidar" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << "_" << std::to_string(lidar_msg->time_stamp) << "_" << std::to_string(lidar_msg->time_stamp) << ".jpg";


        save_img_no_ += 1;
        std::string fileName = oss.str();
        std::string fileName1 = oss1.str();
        cv::imwrite(fileName, image_draw);
        cv::imwrite(fileName1, image_draw1);
}


void DataFusion::drawFusionMetaData(const msg::AiMetadata::SharedPtr ai_msg, 
    const msg::ObstacleData::SharedPtr lidar_msg, const std::vector<BBox>& boxes)
{
        cv::Mat image_draw = image_.clone(); // ai-img
        cv::Mat image_draw1 = image1_.clone(); // lidar-meta + ai-xyz

        int cx = 300, cy = 300;
        // int world2pixel = 100;
        cv::circle(image_draw1, cv::Point(cx, cy), 2, COLOR_RED, -1);
        cv::rectangle(image_draw1, cv::Point(280, 250), cv::Point(320, 310), COLOR_GRAY, 2);
        cv::rectangle(image_draw1, cv::Point(150, 150), cv::Point(450, 450), COLOR_GREEN, 1);


        // 绘制边界框
        for (const auto& bbox : boxes) {
            cv::rectangle(image_draw, cv::Point(bbox.x1, bbox.y1), cv::Point(bbox.x2, bbox.y2), COLOR_YELLOW, 2);
            cv::putText(image_draw, std::to_string(bbox.class_id), cv::Point(bbox.x1, bbox.y1 - 10), cv::FONT_HERSHEY_SIMPLEX, 0.9, COLOR_YELLOW, 2);
        }

        // 绘制激光pcl
        for(size_t index = 0; index < lidar_msg->obs.size(); index=index+1)
        {
            auto point = lidar_msg->obs[index];
            if(point.x < 3 && point.x > -3 && point.y < 3 && point.y > -3)
            {
                auto dx = int(300.0 - 100.0 * point.y);
                auto dy = int(300.0 - 100.0 * point.x);
                circle(image_draw1, cv::Point(dx, dy), 2, COLOR_WHITE, -1);
            }
        } 

        // 绘制 ai-xyz  
        for (const auto& bbox : boxes) {
            if(bbox.len > 0)
            {
                for(int camindex = 0; camindex < bbox.len; camindex++)
                {
                    auto point = bbox.cam_xyz[camindex];
                    if(point.x < 1.5 && point.x > 0 && point.y < 1.5 && point.y > -1.5)
                    {
                        auto dx = int(300.0 - 100.0 * point.y);
                        auto dy = int(300.0 - 100.0 * point.x);
                        circle(image_draw1, cv::Point(dx, dy), 2, COLOR_YELLOW, -1);
                    }

                }
            }
        }


        std::ostringstream oss;
        oss << "darw_image" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << " " << std::to_string(ai_msg->time_stamp) << ".jpg";
        std::ostringstream oss1;
        oss1 << "darw_lidar" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << "_" << std::to_string(ai_msg->time_stamp) << "_" << std::to_string(lidar_msg->time_stamp) << ".jpg";


        save_img_no_ += 1;
        std::string fileName = oss.str();
        std::string fileName1 = oss1.str();
        cv::imwrite(fileName, image_draw);
        cv::imwrite(fileName1, image_draw1);
}


void DataFusion::drawFusionPubData(std::vector<cv::Point3d> point3dRearView,
         std::vector<cv::Point3d> point3dInView,
         std::vector<cv::Point3d> point3dLRFView,
         std::vector<cv::Point3d> outer_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camdar_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camera_pts,
         std::vector<std::vector<cv::Point3d>> point3dInBoxes,
         const msg::AiMetadata::SharedPtr ai_msg, const msg::ObstacleData::SharedPtr lidar_msg)
{
    cv::Mat image_draw1 = image2_.clone();
    cv::Mat image_draw2 = image2_.clone();
    int cx = 300, cy = 300;
    // int world2pixel = 100;
    cv::circle(image_draw1, cv::Point(cx, cy), 2, COLOR_RED, -1);
    cv::rectangle(image_draw1, cv::Point(280, 250), cv::Point(320, 310), COLOR_GRAY, 2);
    cv::rectangle(image_draw1, cv::Point(150, 150), cv::Point(450, 450), COLOR_GREEN, 1);

    cv::circle(image_draw2, cv::Point(cx, cy), 2, COLOR_RED, -1);
    cv::rectangle(image_draw2, cv::Point(280, 250), cv::Point(320, 310), COLOR_GRAY, 2);
    cv::rectangle(image_draw2, cv::Point(150, 150), cv::Point(450, 450), COLOR_GREEN, 1);

    // if(bDense_)
    if(cfg_.save_flag.dense)
    {
        // out of view  -- rear
        drawFusionData2World(point3dRearView, image_draw1, COLOR_BLUE);
        // in view without box
        drawFusionData2World(point3dInView, image_draw1, COLOR_WHITE);
        // // left-view right-view
        drawFusionData2World(point3dLRFView, image_draw1, COLOR_GREEN);

        // drawFusionData2World(outer_pts, image_draw1, COLOR_RED);
        // // cam-guess

        for(auto &camera_pts : fusion_camera_pts)
            drawFusionData2World(camera_pts, image_draw1, COLOR_YELLOW);

        // for(auto &camdar_pts: fusion_camdar_pts)
        //     drawFusionData2World(camdar_pts, image_draw1, COLOR_RED);
        for(auto p3dInBOx : point3dInBoxes)
        {
            drawFusionData2World(p3dInBOx, image_draw1, COLOR_RED);
        }


    } 
    // if(bSimpify_)
    if(cfg_.save_flag.simpify)
    {
        auto sim_pts = downSamplePoints(point3dRearView, DOWN_SAMPLE_DIS);
        drawFusionData2World(sim_pts, image_draw2, COLOR_BLUE);
        auto sim_pts1 = downSamplePoints(point3dInView, DOWN_SAMPLE_DIS);
        drawFusionData2World(sim_pts1, image_draw2, COLOR_WHITE);
        auto sim_pts2 = downSamplePoints(point3dLRFView, DOWN_SAMPLE_DIS);
        drawFusionData2World(sim_pts2, image_draw2, COLOR_GREEN);

        // auto sim_pts3 = downSamplePoints(outer_pts, DOWN_SAMPLE_DIS);
        // drawFusionData2World(sim_pts3, image_draw2, COLOR_RED);

        for(auto &camera_pts : fusion_camera_pts)
            drawFusionData2World(camera_pts, image_draw2, COLOR_YELLOW);


        for(auto &camdar_pts: fusion_camdar_pts)
            drawFusionData2World(camdar_pts, image_draw2, COLOR_RED);
    }

    #if 0
        cv::Mat image_draw3 = image2_.clone();
        cv::imwrite("test.jpg", image_draw3);
    #endif


        std::ostringstream oss1;
        oss1 << "darw_complex" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << "_" << std::to_string(ai_msg->time_stamp) << "_" << std::to_string(lidar_msg->time_stamp) << ".jpg";
        std::ostringstream oss2;
        oss2 << "darw_simplify" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << "_" << std::to_string(ai_msg->time_stamp) << "_" << std::to_string(lidar_msg->time_stamp) << ".jpg";


        std::string fileName1 = oss1.str();
        std::string fileName2 = oss2.str();

        cv::imwrite(fileName1, image_draw1);
        cv::imwrite(fileName2, image_draw2);

}



void DataFusion::drawFusionPubData(std::vector<cv::Point3d> point3dRearView,
         std::vector<cv::Point3d> point3dInView,
         std::vector<cv::Point3d> point3dLRFView,
         std::vector<cv::Point3d> outer_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camdar_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camera_pts,
         std::vector<std::vector<cv::Point3d>> point3dInBoxes,
         const msg::ObstacleData::SharedPtr lidar_msg)
{
    cv::Mat image_draw1 = image2_.clone();
    cv::Mat image_draw2 = image2_.clone();
    int cx = 300, cy = 300;
    // int world2pixel = 100;
    cv::circle(image_draw1, cv::Point(cx, cy), 2, COLOR_RED, -1);
    cv::rectangle(image_draw1, cv::Point(280, 250), cv::Point(320, 310), COLOR_GRAY, 2);
    cv::rectangle(image_draw1, cv::Point(150, 150), cv::Point(450, 450), COLOR_GREEN, 1);

    cv::circle(image_draw2, cv::Point(cx, cy), 2, COLOR_RED, -1);
    cv::rectangle(image_draw2, cv::Point(280, 250), cv::Point(320, 310), COLOR_GRAY, 2);
    cv::rectangle(image_draw2, cv::Point(150, 150), cv::Point(450, 450), COLOR_GREEN, 1);

    // if(bDense_)
    if(cfg_.save_flag.dense)
    {
        // out of view  -- rear
        drawFusionData2World(point3dRearView, image_draw1, COLOR_BLUE);
        // in view without box
        drawFusionData2World(point3dInView, image_draw1, COLOR_WHITE);
        // // left-view right-view
        drawFusionData2World(point3dLRFView, image_draw1, COLOR_GREEN);

        // drawFusionData2World(outer_pts, image_draw1, COLOR_RED);
        // // cam-guess

        for(auto &camera_pts : fusion_camera_pts)
            drawFusionData2World(camera_pts, image_draw1, COLOR_YELLOW);

        // for(auto &camdar_pts: fusion_camdar_pts)
        //     drawFusionData2World(camdar_pts, image_draw1, COLOR_RED);
        for(auto p3dInBOx : point3dInBoxes)
        {
            drawFusionData2World(p3dInBOx, image_draw1, COLOR_RED);
        }


    } 
    // if(bSimpify_)
    if(cfg_.save_flag.simpify)
    {
        auto sim_pts = downSamplePoints(point3dRearView, DOWN_SAMPLE_DIS);
        drawFusionData2World(sim_pts, image_draw2, COLOR_BLUE);
        auto sim_pts1 = downSamplePoints(point3dInView, DOWN_SAMPLE_DIS);
        drawFusionData2World(sim_pts1, image_draw2, COLOR_WHITE);
        auto sim_pts2 = downSamplePoints(point3dLRFView, DOWN_SAMPLE_DIS);
        drawFusionData2World(sim_pts2, image_draw2, COLOR_GREEN);

        // auto sim_pts3 = downSamplePoints(outer_pts, DOWN_SAMPLE_DIS);
        // drawFusionData2World(sim_pts3, image_draw2, COLOR_RED);

        for(auto &camera_pts : fusion_camera_pts)
            drawFusionData2World(camera_pts, image_draw2, COLOR_YELLOW);


        for(auto &camdar_pts: fusion_camdar_pts)
            drawFusionData2World(camdar_pts, image_draw2, COLOR_RED);
    }

    #if 0
        cv::Mat image_draw3 = image2_.clone();
        cv::imwrite("test.jpg", image_draw3);
    #endif


        std::ostringstream oss1;
        oss1 << "darw_complex" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << "_" << std::to_string(lidar_msg->time_stamp) << "_" << std::to_string(lidar_msg->time_stamp) << ".jpg";
        std::ostringstream oss2;
        oss2 << "darw_simplify" << "_" << std::setw(9) << std::setfill('0') << save_img_no_ << "_" << std::to_string(lidar_msg->time_stamp) << "_" << std::to_string(lidar_msg->time_stamp) << ".jpg";


        std::string fileName1 = oss1.str();
        std::string fileName2 = oss2.str();

        cv::imwrite(fileName1, image_draw1);
        cv::imwrite(fileName2, image_draw2);

}

void DataFusion::drawFusionData2World(std::vector<cv::Point3d> ori_draw_pts, cv::Mat& img_draw, cv::Scalar COLOR_CUR)
{
    auto draw_pts = ori_draw_pts;
    for(auto j = 0u; j < draw_pts.size(); j++)
    {
        auto point = draw_pts[j];
        if(point.x < 3 && point.x > -3 && point.y < 3 && point.y > -3)
        {
            auto dx = int(300.0 - 100.0 * point.y);
            auto dy = int(300.0 - 100.0 * point.x);
            circle(img_draw, cv::Point(dx, dy), 2, COLOR_CUR, -1);
        }
    }
}

void DataFusion::write_camdar_2_csv(const msg::FusionCamdar::SharedPtr camdar_msg)
{
    std::ofstream file(SAVE_CAMDAR_CSV, std::ios::app);
    // 检查文件是否成功打开
    if (!file.is_open()) {
            std::cerr << "无法打开文件: " << SAVE_CAMDAR_CSV << std::endl;
            // return false;
        }
    file << "index: " << camdar_index_ << "\n";
    file << "time_stamp: " << camdar_msg->time_stamp << "\n";
    file << "fusion_num: " << camdar_msg->fusion_num  << "\n";
    auto fusion_data = camdar_msg->fusion_data;
    for(size_t i = 0; i < fusion_data.size(); i++)
    {
        file << "point_num: " << fusion_data[i].point_num << "\n";
        file << "class_id: " << fusion_data[i].class_id << "\n";
        file << "fusion_mode: " << fusion_data[i].fusion_mode << "\n";
        file << "points:\n";
        auto obsp = fusion_data[i].points;
        for(int j = 0; j < fusion_data[i].point_num ; j++)
        {
            file << obsp[j].x << " " << obsp[j].y << " " << obsp[j].z << "\n";
        }
    }
    file.close();
}
