#ifndef DATA_FUSION_H
#define DATA_FUSION_H

#include <iostream>
#include <memory>
#include <string>
#include <map>
#include <vector>
#include "msg_topic.h"
#include "utils.h"
#include "global_status_manager.h"
#include "global_data_subpub.h"
#include <opencv2/opencv.hpp>
#include <sys/time.h>
#include <sstream>
#include <iomanip>
#include <string>

#define COLOR_RED cv::Scalar(0, 0, 255)
#define COLOR_GREEN cv::Scalar(0, 255, 0)
#define COLOR_BLUE cv::Scalar(255, 0, 0)
#define COLOR_YELLOW cv::Scalar(0, 255, 255)
#define COLOR_WHITE cv::Scalar(255, 255, 255)
#define COLOR_GRAY cv::Scalar(128, 128, 128)


struct BBox {
    int class_id;
    float prob;
    int x1;
    int y1;
    int x2;
    int y2;
    int len;
    std::vector<cv::Point3d> cam_xyz;
    // int mask_rw;
    // int mask_rh;
    // std::vector<uint8_t> mask_roi;
};
// DataFusion 类
    enum LidarStatus {
        DEFAULT = 0,
        INNER_BOX = 1,  
        INNER_CAM = 2,  
        OUTER_CAM = 3,
        HIDEN_CAM = 4,
        REAR_RLM  = 5,
        UNDER_GRND = 6

    };
class DataFusion {
public:
    // 构造函数
    DataFusion();    
    void Init();
    void update(const msg::ObstacleData::SharedPtr lidar_msg, const msg::AiMetadata::SharedPtr ai_msg);
    void update(const msg::ObstacleData::SharedPtr lidar_msg);


private:

    Config cfg_; 
    cv::Mat R_combined_;
    cv::Mat t_combined_;
    cv::Mat K_;
    cv::Mat image_;
    cv::Mat image1_;
    cv::Mat image2_;
    float DOWN_SAMPLE_DIS = 0.20f;
    int save_img_no_ = 0;
    int save_lidar_no_ = 0;
    int camdar_index_ = 0;

    std::string SAVE_AI_CSV = "/data/save_ai.csv";
    std::string SAVE_LIDAR_CSV = "/data/save_lidar.csv";
    std::string SAVE_CAMDAR_CSV = "/data/save_camdar.csv";
    // std::string SAVE_APP_TXT = "/data/dcrobo/params/app2camdar.txt";
    std::string SAVE_APP_TXT = "/kdrm210_main/modules/camdar/config/app2camdar.txt";

    void initFusionParams();
    cv::Mat rotationMatrix(double rx, double ry, double rz);
    std::vector<BBox>  AiMetaBboxRect2Bbox(const msg::AiMetadata::SharedPtr ai_msg);
    std::vector<cv::Point3d> downSamplePoints(std::vector<cv::Point3d> dense_pts,  float distance);
    bool isPointInCar(float px, float py, float pz);
    bool isPointInBox(const BBox& bbox, int px, int py) {
        return (px >= bbox.x1 && px <= bbox.x2 && py >= bbox.y1 && py <= bbox.y2);}
    bool isPoint3dInMask(const BBox& bbox, int px, int py);
    bool isPoint3dInMaskAll(const std::vector<uint8_t>& mask, const BBox& bbox, int mask_w, int mask_h, int px, int py);


    // visual

    void drawFusionMetaData(const msg::AiMetadata::SharedPtr ai_msg, 
            const msg::ObstacleData::SharedPtr lidar_msg, 
            const std::vector<BBox>& boxes);


    void drawFusionMetaData(const msg::ObstacleData::SharedPtr lidar_msg);

    void drawFusionPubData(std::vector<cv::Point3d> point3dRearView,
         std::vector<cv::Point3d> point3dInView,
         std::vector<cv::Point3d> point3dLRFView,
         std::vector<cv::Point3d> outer_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camdar_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camera_pts,
         std::vector<std::vector<cv::Point3d>> point3dInBoxes,
         const msg::AiMetadata::SharedPtr ai_msg, const msg::ObstacleData::SharedPtr lidar_msg);

    void drawFusionPubData(std::vector<cv::Point3d> point3dRearView,
         std::vector<cv::Point3d> point3dInView,
         std::vector<cv::Point3d> point3dLRFView,
         std::vector<cv::Point3d> outer_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camdar_pts,
         std::vector<std::vector<cv::Point3d>>  fusion_camera_pts,
         std::vector<std::vector<cv::Point3d>> point3dInBoxes,
         const msg::ObstacleData::SharedPtr lidar_msg);

    void drawFusionData2World(std::vector<cv::Point3d> ori_draw_pts, cv::Mat& img_draw, cv::Scalar COLOR_CUR);

    void write_camdar_2_csv(const msg::FusionCamdar::SharedPtr camdar_msg);


};

#endif // DATA_FUSION_H
