#include <ros/ros.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <cv_bridge/cv_bridge.h>
#include <pcl/point_cloud.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/point_types.h>
#include <opencv2/opencv.hpp>

using namespace std;
using namespace cv;
using namespace pcl;

// 全局变量
Mat extrinsicMat; // 外参矩阵
Mat cameraMat;    // 内参矩阵
Mat distCoeff;    // 畸变矩阵
Mat image_;        // 用于存储图像的全局变量
ros::Publisher cloud_pub; // 用于发布着色的点云
ros::Publisher image_pub; // 用于发布融合图像
pcl::PointCloud<pcl::PointXYZRGB>::Ptr rgb_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);

// 读取标定数据
void readCalibrationData(const string& filename) {
    cv::FileStorage fs(filename, cv::FileStorage::READ);
    if (!fs.isOpened()) {
        ROS_ERROR("Failed to open calibration file: %s", filename.c_str());
        return;
    }
    fs["CameraExtrinsicMat"] >> extrinsicMat;
    fs["CameraMat"] >> cameraMat;
    fs["DistCoeff"] >> distCoeff;
    fs.release();
}

// 投影函数
void projection(const pcl::PointCloud<pcl::PointXYZ>::Ptr& ccloud, cv::Mat& img) {
    rgb_cloud->clear(); // 清空点云数据
    vector<cv::Point3f> points3d;
    points3d.reserve(ccloud->size());
    cv::Point3f point;

    // 遍历点云中的每个点
    for (int i = 0; i < ccloud->size(); i++) {
        point.x = ccloud->points[i].y;
        point.y = ccloud->points[i].x;
        point.z = ccloud->points[i].z;
        // 只处理z正方向的点（假设z正方向为摄像头视锥方向）
        if (point.y > 0) { 
            points3d.push_back(point);
        }
    }

    vector<cv::Point2f> projectedPoints;
    cv::projectPoints(points3d, extrinsicMat(cv::Range(0, 3), cv::Range(0, 3)), extrinsicMat(cv::Range(0, 3), cv::Range(3, 4)), cameraMat, distCoeff, projectedPoints);
    pcl::PointXYZRGB point_rgb;
    for (int i = 0; i < projectedPoints.size(); i++) {
        cv::Point2f p = projectedPoints[i];
        point_rgb.x = points3d[i].x;
        point_rgb.y = points3d[i].y;
        point_rgb.z = points3d[i].z;

        // 检查投影点是否在图像边界内
        if (p.y < img.rows && p.y >= 0 && p.x < img.cols && p.x >= 0) {
            const Vec3b& color = img.at<Vec3b>(p);
            point_rgb.b = color[0];
            point_rgb.g = color[1];
            point_rgb.r = color[2];
            rgb_cloud->points.push_back(point_rgb); // 只添加在图像边界内的点
        }
    }
}

// 回调函数：处理激光雷达点云数据
void cloudCallback(const sensor_msgs::PointCloud2ConstPtr& cloud_msg) {
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::fromROSMsg(*cloud_msg, *cloud);
    // 调用projection函数处理点云
    projection(cloud, image_);
    sensor_msgs::PointCloud2 ros_cloud;
    pcl::toROSMsg(*rgb_cloud,ros_cloud);
    ros_cloud.header.frame_id = "camera_link";
    cloud_pub.publish(ros_cloud);
    rgb_cloud->points.clear(); // 清空点云数据，为下一次回调做准备
}

// 回调函数：处理相机图像数据
void imageCallback(const sensor_msgs::ImageConstPtr& image_msg) {
    cv_bridge::CvImagePtr cv_ptr = cv_bridge::toCvCopy(image_msg, "bgr8");
    image_ = cv_ptr->image;
}

int main(int argc, char** argv) {
    ros::init(argc, argv, "slambot_control_vis");
    ros::NodeHandle nh;

    readCalibrationData("/root/class22_ws/src/slambot_control_vis/config/calibration-2-result-nodata"); // 正确的标定文件路径

    cloud_pub = nh.advertise<sensor_msgs::PointCloud2>("/point_cloud_colored", 1);
    image_pub = nh.advertise<sensor_msgs::Image>("/projection_result", 1);

    ros::Subscriber cloud_sub = nh.subscribe<sensor_msgs::PointCloud2>("/rslidar_points", 1, cloudCallback);
    ros::Subscriber image_sub = nh.subscribe<sensor_msgs::Image>("/camera/color/image_raw", 1, imageCallback);

    ros::Rate loop_rate(10);

    while (ros::ok()) {
        ros::spinOnce();
        loop_rate.sleep();
    }

    return 0;
}
