#ifndef __ROS_UTILITY_HPP__
#define __ROS_UTILITY_HPP__
#include <ros/ros.h>
#include <rosbag/bag.h>
#include <ros/callback_queue.h>
#include <sensor_msgs/PointCloud2.h>
#include <pcl_conversions/pcl_conversions.h>
#include <cv_bridge/cv_bridge.h>
#include <opencv2/opencv.hpp>
#include <tf2_ros/buffer.h>
#include <tf2_ros/transform_listener.h>
#include <sensor_msgs/Image.h>
#include <sensor_msgs/CompressedImage.h>
#include <geometry_msgs/TransformStamped.h>
#include <nav_msgs/Odometry.h>

// TimeSyncronizer headers
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/exact_time.h>
#include <message_filters/sync_policies/approximate_time.h>
#include "common_tools.h"
#include "utility.hpp"
#define QUEUE_SIZE 1000000
#define WAIT_CYCLE 1000

Eigen::Matrix4f Odom2matrix(const nav_msgs::Odometry &msg)
{
    Eigen::Affine3f aff = Eigen::Affine3f::Identity();
    aff.linear() = Eigen::Quaternionf(msg.pose.pose.orientation.w, msg.pose.pose.orientation.x, msg.pose.pose.orientation.y, msg.pose.pose.orientation.z).toRotationMatrix();
    aff.translation() = Eigen::Vector3f(msg.pose.pose.position.x, msg.pose.pose.position.y, msg.pose.pose.position.z);
    return aff.matrix();
}

Eigen::Matrix4f Tf2matrix(const geometry_msgs::TransformStamped &msg)
{
    Eigen::Affine3f aff = Eigen::Affine3f::Identity();
    aff.linear() = Eigen::Quaternionf(msg.transform.rotation.w, msg.transform.rotation.x, msg.transform.rotation.y, msg.transform.rotation.z).toRotationMatrix();
    aff.translation() = Eigen::Vector3f(msg.transform.translation.x, msg.transform.translation.y, msg.transform.translation.z);
    return aff.matrix();
}

geometry_msgs::TransformStamped Odom2TfStamped(const nav_msgs::Odometry &msgOdom)
{
    geometry_msgs::TransformStamped msgTf;

    msgTf.header = msgOdom.header;
    msgTf.child_frame_id = msgOdom.child_frame_id;

    msgTf.transform.translation.x = msgOdom.pose.pose.position.x;
    msgTf.transform.translation.y = msgOdom.pose.pose.position.y;
    msgTf.transform.translation.z = msgOdom.pose.pose.position.z;

    msgTf.transform.rotation = msgOdom.pose.pose.orientation;

    return msgTf;
}

void SaveOdom(const nav_msgs::OdometryConstPtr &msg, std::ofstream &outfile)
{
    outfile << msg->header.stamp.toSec() << " "
            << msg->pose.pose.position.x << " "
            << msg->pose.pose.position.y << " "
            << msg->pose.pose.position.z << " "
            << msg->pose.pose.orientation.x << " "
            << msg->pose.pose.orientation.y << " "
            << msg->pose.pose.orientation.z << " "
            << msg->pose.pose.orientation.w << std::endl;
}

class OdomSaver
{
public:
    std::ofstream trajFile;
    ros::Subscriber odomSub;

    OdomSaver(std::shared_ptr<ros::NodeHandle> nh, const std::string &odomTopic, const std::string &odomPath)
    {
        trajFile = std::ofstream(odomPath, std::ios::out);
        trajFile << std::fixed << std::setprecision(6);
        odomSub = nh->subscribe<nav_msgs::Odometry>(odomTopic, QUEUE_SIZE, &OdomSaver::OdomCallback, this, ros::TransportHints().tcpNoDelay());
    }

    ~OdomSaver()
    {
        trajFile.close();
    }

    void OdomCallback(const nav_msgs::OdometryConstPtr &msg)
    {
        ROS_INFO("In %s OdomCallback!", msg->child_frame_id.c_str());
        SaveOdom(msg, trajFile);
    }
};

class OdomTfBuffer
{
protected:
    ros::NodeHandle nh;
    ros::CallbackQueue callbackQueue;
    ros::AsyncSpinner spinner;

    tf2_ros::Buffer tfBuffer;
    std::unique_ptr<tf2_ros::TransformListener> tfListener;
    std::vector<ros::Subscriber> odomSubs;

public:
    enum class ReturnCode : uint8_t {
        OK = 0,
        NOT_YET = 1,
        EXTRA = 2, 
    };

    OdomTfBuffer(const std::vector<std::string> &odomTopics, double cacheTime = 1000.0, bool useListener = false)
        : nh("~"), spinner(1, &callbackQueue), tfBuffer(ros::Duration(cacheTime))
    {
        nh.setCallbackQueue(&callbackQueue);
        for (const auto &odomTopic : odomTopics)
        {
            odomSubs.emplace_back(nh.subscribe<nav_msgs::Odometry>(odomTopic, QUEUE_SIZE, &OdomTfBuffer::OdomCallback, this, ros::TransportHints().tcpNoDelay()));
        }
        if (useListener)
        {
            tfListener = std::make_unique<tf2_ros::TransformListener>(tfBuffer, true);
        }
        spinner.start();
    }
    ~OdomTfBuffer()
    {
        spinner.stop();
    }

    void OdomCallback(const nav_msgs::Odometry::ConstPtr &msgOdom)
    {
        tfBuffer.setTransform(Odom2TfStamped(*msgOdom), "odometry");
    }

    ReturnCode GetTransform(const std::string &sourceFrame, const std::string &targetFrame, Eigen::Matrix4f &transform, ros::Time timestamp = ros::Time(0))
    {
        geometry_msgs::TransformStamped transformStamped;
        try
        {
            transformStamped = tfBuffer.lookupTransform(targetFrame, sourceFrame, timestamp);
        }
        catch (tf2:: ExtrapolationException &ex)
        {
            ROS_WARN("%s", ex.what());
            return ReturnCode::EXTRA;
        }
        catch (tf2::TransformException &ex)
        {
            ROS_WARN("%s", ex.what());
            return ReturnCode::NOT_YET;
        }
        transform = Tf2matrix(transformStamped);
        return ReturnCode::OK;
    }
};

class CameraSubscriberBase : public ImageProcessor
{
protected:
    ros::Subscriber imageSub;
    ros::Subscriber compressedImageSub;

public:
    CameraSubscriberBase(std::shared_ptr<ros::NodeHandle> nh, const CameraParam &camParam, const std::string &imageTopic, const std::string &outputDir, bool compressed = false)
        : ImageProcessor(camParam, outputDir)
    {
        std::cout << "Constructing CameraSubscriberBase with camParam:" << std::endl
                  << this->camParam << std::endl;

        if (compressed)
        {
            compressedImageSub = nh->subscribe<sensor_msgs::CompressedImage>(imageTopic, QUEUE_SIZE,
                                                                             &CameraSubscriberBase::CompressedImageCallbackWrapper, this,
                                                                             ros::TransportHints().tcpNoDelay());
        }
        else
        {
            imageSub = nh->subscribe<sensor_msgs::Image>(imageTopic, QUEUE_SIZE,
                                                         &CameraSubscriberBase::ImageCallbackWrapper, this,
                                                         ros::TransportHints().tcpNoDelay());
        }
    }

    virtual void ImageCallback(double timestamp, cv::Mat image) = 0;

    void ImageCallbackWrapper(const sensor_msgs::ImageConstPtr &msg)
    {
        ROS_INFO("In cam%s ImageCallback!", camParam.name.c_str());
        if (undistortMap.empty())
        {
            ROS_ERROR("Cam %s image parameters have not been set!", camParam.name.c_str());
            return;
        }
        cv_bridge::CvImagePtr cvPtr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
        double timestamp = msg->header.stamp.toSec();
        cv::Mat undistortImg = UndistortImage(cvPtr->image);
        ImageCallback(timestamp, undistortImg);
    }

    void CompressedImageCallbackWrapper(const sensor_msgs::CompressedImage::ConstPtr &msg)
    {
        ROS_INFO("In cam%s CompressedImageCallback!", camParam.name.c_str());
        if (undistortMap.empty())
        {
            ROS_ERROR("Cam %s image parameters have not been set!", camParam.name.c_str());
            return;
        }
        cv_bridge::CvImagePtr cvPtr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
        double timestamp = msg->header.stamp.toSec();
        cv::Mat undistortImg = UndistortImage(cvPtr->image);
        ImageCallback(timestamp, undistortImg);
    }
};

class CameraSaver : public CameraSubscriberBase
{
public:
    CameraSaver(std::shared_ptr<ros::NodeHandle> nh, const CameraParam &camParam, const std::string &imageTopic, const std::string &outputDir, bool compressed = false)
        : CameraSubscriberBase(nh, camParam, imageTopic, outputDir, compressed) {}

    void ImageCallback(double timestamp, cv::Mat image) override
    {
        SaveImage(image, imageDir, timestamp, camParam.name);
    }
};

class CameraBagSaver : public CameraSubscriberBase
{
protected:
    std::shared_ptr<rosbag::Bag> bag;
    std::string imageTopicOut;
    bool received;
public:
    CameraBagSaver(std::shared_ptr<ros::NodeHandle> nh, const std::shared_ptr<rosbag::Bag> bagPtr,
        const CameraParam &camParam, const std::pair<std::string, std::string> &imageTopicInOut,
        const std::string &outputDir, bool compressed = false)
        : CameraSubscriberBase(nh, camParam, imageTopicInOut.first, outputDir, compressed),
        imageTopicOut(imageTopicInOut.second), bag(bagPtr), received(false) {}

    void ImageCallback(double timestamp, cv::Mat image) override
    {
        SaveImage(image, imageDir, timestamp, camParam.name);
        auto camName = "cam" + camParam.name;
        cv_bridge::CvImage cvImage;
        cvImage.header.stamp = ros::Time(timestamp);
        cvImage.header.frame_id = camName;
        cvImage.encoding = "bgr8";
        cvImage.image = image;

        sensor_msgs::ImagePtr imageMsg = cvImage.toImageMsg();
        bag->write(imageTopicOut, imageMsg->header.stamp, *imageMsg);
        received = true;
    }

    bool Received()
    {
        return received;
    }

    void ResetReceived()
    {
        received = false;
    }
};

class CameraBuffer : public CameraSubscriberBase
{
protected:
    std::shared_ptr<std::deque<std::tuple<double, cv::Mat>>> bufferPtr;

public:
    CameraBuffer(std::shared_ptr<ros::NodeHandle> nh, const CameraParam &camParam, const std::string &imageTopic, const std::string &outputDir, bool compressed = false)
        : CameraSubscriberBase(nh, camParam, imageTopic, outputDir, compressed)
    {
        bufferPtr = std::make_shared<std::deque<std::tuple<double, cv::Mat>>>();
    }

    void ImageCallback(double timestamp, cv::Mat image) override
    {
        SaveImage(image, imageDir, timestamp, camParam.name);

        bufferPtr->emplace_back(timestamp, image);
    }

    std::shared_ptr<std::deque<std::tuple<double, cv::Mat>>> GetBufferPtr()
    {
        return bufferPtr;
    }
};

class CameraOdomSaver : public CameraSaver
{
private:
    std::unique_ptr<OdomSaver> odomSaverPtr;

public:
    CameraOdomSaver(std::shared_ptr<ros::NodeHandle> nh, const CameraParam &camParam, const std::string &odomTopic, const std::string &imageTopic, const std::string &outputDir, bool compressed = false)
        : CameraSaver(nh, camParam, imageTopic, outputDir, compressed)
    {
        auto camTrajDir = outputDir + "/cam_traj/";
        CommonTools::IfNotExistThenCreate(camTrajDir);

        std::string cameraTrajPath = outputDir + "/cam_" + camParam.name + "_traj.txt";
        odomSaverPtr = std::make_unique<OdomSaver>(nh, odomTopic, cameraTrajPath);
    }
};

class LidarOdomSyncerBase
{
protected:
    std::string pcdDir;
    std::ofstream lidarTrajFile;
    std::shared_ptr<message_filters::Subscriber<nav_msgs::Odometry>> filterSubOdom;
    std::shared_ptr<message_filters::Subscriber<sensor_msgs::PointCloud2>> filterSubCloud;
    typedef message_filters::sync_policies::ExactTime<nav_msgs::Odometry, sensor_msgs::PointCloud2> SyncPolicy;
    std::shared_ptr<message_filters::Synchronizer<SyncPolicy>> odomCloudSync;

public:
    LidarOdomSyncerBase(std::shared_ptr<ros::NodeHandle> nh, const std::string &odomTopic, const std::string &pointcloudTopic, const std::string &outputDir)
        : pcdDir(outputDir + "/lidar_scan/")
    {
        CommonTools::IfNotExistThenCreate(pcdDir);
        lidarTrajFile.open(outputDir + "/imu_traj.txt", std::ios::out);
        lidarTrajFile << std::fixed << std::setprecision(6);
        filterSubOdom = std::make_shared<message_filters::Subscriber<nav_msgs::Odometry>>(*nh, odomTopic, QUEUE_SIZE, ros::TransportHints().tcpNoDelay());
        filterSubCloud = std::make_shared<message_filters::Subscriber<sensor_msgs::PointCloud2>>(*nh, pointcloudTopic, QUEUE_SIZE, ros::TransportHints().tcpNoDelay());
        odomCloudSync = std::make_shared<message_filters::Synchronizer<SyncPolicy>>(SyncPolicy(QUEUE_SIZE), *filterSubOdom, *filterSubCloud); // QUEUE_SIZE是消息队列长度
        odomCloudSync->registerCallback(boost::bind(&LidarOdomSyncerBase::SyncedCallbackWrapper, this, _1, _2));
    }

    virtual ~LidarOdomSyncerBase()
    {
        lidarTrajFile.close();
    }

    virtual void SyncedCallback(const nav_msgs::Odometry::ConstPtr &msgOdom, const sensor_msgs::PointCloud2::ConstPtr &msgCloud) = 0;

    void SyncedCallbackWrapper(const nav_msgs::Odometry::ConstPtr &msgOdom, const sensor_msgs::PointCloud2::ConstPtr &msgCloud)
    {
        SyncedCallback(msgOdom, msgCloud);
    }
};

class LidarOdomSaver : public LidarOdomSyncerBase
{
public:
    LidarOdomSaver(std::shared_ptr<ros::NodeHandle> nh, const std::string &odomTopic, const std::string &pointcloudTopic, const std::string &outputDir)
        : LidarOdomSyncerBase(nh, odomTopic, pointcloudTopic, outputDir) {}

    void SyncedCallback(const nav_msgs::Odometry::ConstPtr &msgOdom, const sensor_msgs::PointCloud2::ConstPtr &msgCloud) override
    {
        ROS_INFO("In SyncedCallback!");
        SaveOdom(msgOdom, lidarTrajFile);

        Eigen::Matrix4f transform = Odom2matrix(*msgOdom);
        pcl::PointCloud<PointType>::Ptr laserCloud(new pcl::PointCloud<PointType>);
        pcl::fromROSMsg(*msgCloud, *laserCloud);
        SavePCD(pcdDir, msgCloud->header.stamp.toSec(), laserCloud, transform);
    }
};

class LidarOdomBuffer : public LidarOdomSyncerBase
{
public:
    std::shared_ptr<std::deque<std::tuple<double, Eigen::Matrix4f, pcl::PointCloud<PointType>::Ptr>>> bufferPtr;

    LidarOdomBuffer(std::shared_ptr<ros::NodeHandle> nh, const std::string &odomTopic, const std::string &pointcloudTopic, const std::string &outputDir)
        : LidarOdomSyncerBase(nh, odomTopic, pointcloudTopic, outputDir)
    {
        bufferPtr = std::make_shared<std::deque<std::tuple<double, Eigen::Matrix4f, pcl::PointCloud<PointType>::Ptr>>>();
    }

    void SyncedCallback(const nav_msgs::Odometry::ConstPtr &msgOdom, const sensor_msgs::PointCloud2::ConstPtr &msgCloud) override
    {
        ROS_INFO("In SyncedCallback!");
        SaveOdom(msgOdom, lidarTrajFile);

        Eigen::Matrix4f transform = Odom2matrix(*msgOdom);
        pcl::PointCloud<PointType>::Ptr laserCloud(new pcl::PointCloud<PointType>);
        pcl::fromROSMsg(*msgCloud, *laserCloud);
        bufferPtr->emplace_back(msgOdom->header.stamp.toSec(), transform, laserCloud);
    }

    std::shared_ptr<std::deque<std::tuple<double, Eigen::Matrix4f, pcl::PointCloud<PointType>::Ptr>>> GetBufferPtr()
    {
        return bufferPtr;
    }
};

class LidarImageRendering
{
protected:
    std::string imageDir;
    std::string pcdDir;
    LidarCameraParam lidarCamParam;
    std::vector<CameraBuffer> cameraBuffers;
    LidarOdomBuffer lidarOdomBuffer;
    std::vector<std::ofstream> camTrajFiles;

public:
    LidarImageRendering(std::shared_ptr<ros::NodeHandle> nh, const std::string &odomTopic, const std::string &pointcloudTopic, const std::vector<std::string> &imageTopics, const LidarCameraParam &lidarCamParam, const std::string &outputDir)
        : lidarOdomBuffer(nh, odomTopic, pointcloudTopic, outputDir), imageDir(outputDir + "/images/"), pcdDir(outputDir + "/lidar_scan/"), lidarCamParam(lidarCamParam)
    {
        CommonTools::IfNotExistThenCreate(imageDir);
        std::cout << "Constructing LidarImageRendering with lidarCamParam:" << std::endl
                  << this->lidarCamParam << std::endl;

        auto camTrajDir = outputDir + "/cam_traj/";
        CommonTools::IfNotExistThenCreate(camTrajDir);
        for (size_t i = 0; i < imageTopics.size(); ++i)
        {
            
            std::string cameraTrajPath = camTrajDir + "cam_" + lidarCamParam.camParams[i].name + "_traj.txt";
            camTrajFiles.emplace_back(cameraTrajPath, std::ios::out);
            camTrajFiles.back() << std::fixed << std::setprecision(6);
            cameraBuffers.emplace_back(nh, lidarCamParam.camParams[i], imageTopics[i], outputDir, true);
        }
    }

    void Run()
    {
        auto pointsBufferPtr = lidarOdomBuffer.GetBufferPtr();
        std::vector<std::shared_ptr<std::deque<std::tuple<double, cv::Mat>>>> imageBufferPtrs;
        std::for_each(cameraBuffers.begin(), cameraBuffers.end(),
                      [&imageBufferPtrs](CameraBuffer &cameraBuffer)
                      {
                          imageBufferPtrs.emplace_back(cameraBuffer.GetBufferPtr());
                      });

        ros::Rate rate(100);
        while (ros::ok())
        {
            ros::spinOnce();
            rate.sleep();
            while (!pointsBufferPtr->empty() && std::all_of(imageBufferPtrs.begin(), imageBufferPtrs.end(),
                                                            [](const std::shared_ptr<std::deque<std::tuple<double, cv::Mat>>> &imageBufferPtr)
                                                            {
                                                                return imageBufferPtr->size() > 1;
                                                            }))
            {
                if (DiscardExpiredFrames(pointsBufferPtr, imageBufferPtrs))
                    continue;
                std::vector<cv::Mat> images;
                for (auto &imageBufferPtr : imageBufferPtrs)
                {
                    images.emplace_back(std::get<1>(imageBufferPtr->front()));
                }

                auto coloredPointCloud = SemanticColorizer().RenderPoints(lidarCamParam, std::get<2>(pointsBufferPtr->front()), images);

                SavePCD(pcdDir, std::get<0>(pointsBufferPtr->front()), coloredPointCloud, std::get<1>(pointsBufferPtr->front()));

                pointsBufferPtr->pop_front();
                for (size_t i = 0; i < imageBufferPtrs.size(); ++i)
                {
                    auto &Tlidar2world = std::get<1>(pointsBufferPtr->front());
                    auto Tcam2world = Tlidar2world * lidarCamParam.Tlidar2cam[i].inverse();
                    Eigen::Vector3f translation = Tcam2world.block<3, 1>(0, 3);
                    Eigen::Quaternionf orientation(Tcam2world.block<3, 3>(0, 0));
                    camTrajFiles[i] << std::get<0>(imageBufferPtrs[i]->front()) << " "
                                    << translation(0) << " "
                                    << translation(1) << " "
                                    << translation(2) << " "
                                    << orientation.x() << " "
                                    << orientation.y() << " "
                                    << orientation.z() << " "
                                    << orientation.w() << endl;
                    imageBufferPtrs[i]->pop_front();
                }
            }
        }
    }

    bool DiscardExpiredFrames(std::shared_ptr<std::deque<std::tuple<double, Eigen::Matrix4f, pcl::PointCloud<PointType>::Ptr>>> pointsBufferPtr, std::vector<std::shared_ptr<std::deque<std::tuple<double, cv::Mat>>>> &imageBufferPtrs)
    {
        double lidarTime = std::get<0>(pointsBufferPtr->front());
        bool popLidarFlag = false;
        bool res = false;
        for (auto &imageBufferPtr : imageBufferPtrs)
        {
            double cameraTime0 = std::get<0>(imageBufferPtr->at(0));
            double cameraTime1 = std::get<0>(imageBufferPtr->at(1));
            if (cameraTime0 > cameraTime1)
            {
                imageBufferPtr->pop_front();
                return true;
            }
        }
        std::vector<double> timeDiffs;
        for (auto &imageBufferPtr : imageBufferPtrs)
        {
            double cameraTime0 = std::get<0>(imageBufferPtr->at(0));
            double cameraTime1 = std::get<0>(imageBufferPtr->at(1));
            double timeDiff0 = abs(lidarTime - cameraTime0);
            double timeDiff1 = abs(lidarTime - cameraTime1);
            if (timeDiff0 > timeDiff1)
            {
                imageBufferPtr->pop_front();
                return true;
            }
            timeDiffs.emplace_back(timeDiff0);
        }

        if (std::any_of(timeDiffs.begin(), timeDiffs.end(), [](double timeDiff)
                        { return timeDiff > 0.05; }))
        {
            pointsBufferPtr->pop_front();
            return true;
        }

        return false;
    }
};

class LidarImgsBase {
protected:
    std::string pcdDir;
    LidarCameraParam lidarCamParam;
    std::ofstream lidarTrajFile;
    std::vector<std::ofstream> camTrajFiles;
    std::vector<ImageProcessor> imageProcessors;
    std::string semanticDir;
    SemanticColorizer semanticColorizer;
public:
    LidarImgsBase(const LidarCameraParam &lidarCamParam,
                  const std::string &outputDir)
        : pcdDir(outputDir + "/slam/odometry/lidar_scan/"),
          semanticDir(outputDir + "/semantic/semantic/"),
          lidarCamParam(lidarCamParam)
    {
        std::string odometryDir = outputDir + "/slam/odometry/";
        if (!CommonTools::IfFileExist(semanticDir)) {
            semanticDir.clear();
        }
        CommonTools::IfNotExistThenCreate(pcdDir);
        std::cout << "Constructing LidarImgsBase with lidarCamParam:" << std::endl
                  << this->lidarCamParam << std::endl;

        lidarTrajFile.open(odometryDir + "/imu_traj.txt", std::ios::out);
        lidarTrajFile << std::fixed << std::setprecision(6);

        auto camTrajDir = odometryDir + "/cam_traj/";
        CommonTools::IfNotExistThenCreate(camTrajDir);
        for (size_t i = 0; i < lidarCamParam.camParams.size(); ++i)
        {
            std::string cameraTrajPath = camTrajDir + "cam_" + lidarCamParam.camParams[i].name + "_traj.txt";
            camTrajFiles.emplace_back(cameraTrajPath, std::ios::out);
            camTrajFiles.back() << std::fixed << std::setprecision(6);
            imageProcessors.emplace_back(lidarCamParam.camParams[i], odometryDir);
        }
    }

    void SetFilter(const std::vector<std::string> &filteredClasses, int radius = 0) {
        semanticColorizer.SetFilter(semanticDir + "/../category_map.json", filteredClasses, radius);
    }

    void Process(const pcl::PointCloud<PointType>::Ptr& laserCloud, const std::vector<cv::Mat>& images, Eigen::Matrix4f Tlidar2world, std::vector<Eigen::Matrix4f> Tcam2world, double lidarTime, const std::vector<double>& imgTime) {
        std::vector<cv::Mat> undistortedImages;
        LidarCameraParam lidarCamParamInterpolated(lidarCamParam);
        for (size_t i = 0; i < lidarCamParamInterpolated.camParams.size(); ++i) {
            lidarCamParamInterpolated.Tlidar2cam[i] = Tcam2world[i].inverse() * Tlidar2world;
            SaveOdom(imgTime[i], Tcam2world[i], camTrajFiles[i]);
            undistortedImages.emplace_back(imageProcessors[i].UndistortImage(images[i]));
            imageProcessors[i].Save(undistortedImages[i], imgTime[i]);
        }

        std::optional<std::vector<cv::Mat>> masks = std::nullopt;
        if (!semanticDir.empty()) {
            std::vector<cv::Mat> maskImgs(imgTime.size());
            for (size_t i = 0; i < imgTime.size(); ++i) {
                auto maskImgPath = semanticDir + "/cam" + lidarCamParamInterpolated.camParams[i].name + "_" + Time2String(imgTime[i]) + "_undistorted.png";
                maskImgs[i] = cv::imread(maskImgPath, cv::IMREAD_UNCHANGED);
            }
            masks = maskImgs;
        }
        auto coloredPointCloud = semanticColorizer(lidarCamParamInterpolated, laserCloud, undistortedImages, masks);
        SavePCD(pcdDir, lidarTime, coloredPointCloud, Tlidar2world);
        SaveOdom(lidarTime, Tlidar2world, lidarTrajFile);
    }
};


class LidarOdomImgsSyncerBase: public LidarImgsBase
{
protected:
    std::shared_ptr<message_filters::Subscriber<nav_msgs::Odometry>> filterSubOdom;
    std::shared_ptr<message_filters::Subscriber<sensor_msgs::PointCloud2>> filterSubCloud;
    std::vector<std::shared_ptr<message_filters::Subscriber<sensor_msgs::Image>>> filterSubImgs;

public:
    int waitPeriod = -1;
    LidarOdomImgsSyncerBase(std::shared_ptr<ros::NodeHandle> nh, const std::string &odomTopic, const std::string &pointcloudTopic, const std::vector<std::string> &imageTopics, const LidarCameraParam &lidarCamParam, const std::string &outputDir)
    : LidarImgsBase(lidarCamParam, outputDir)
    {
        filterSubOdom = std::make_shared<message_filters::Subscriber<nav_msgs::Odometry>>(*nh, odomTopic, QUEUE_SIZE, ros::TransportHints().tcpNoDelay());
        filterSubCloud = std::make_shared<message_filters::Subscriber<sensor_msgs::PointCloud2>>(*nh, pointcloudTopic, QUEUE_SIZE, ros::TransportHints().tcpNoDelay());
        for (const auto& imgTopic : imageTopics) {
            auto imgSub = std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>(*nh, imgTopic, QUEUE_SIZE, ros::TransportHints().tcpNoDelay());
            filterSubImgs.push_back(imgSub);
        }
    }

    void SyncedCallback(const nav_msgs::Odometry::ConstPtr &msgOdom,
                        const sensor_msgs::PointCloud2::ConstPtr &msgCloud,
                        const std::vector<sensor_msgs::Image::ConstPtr> &msgImgs)
    {
        waitPeriod = 0;
        double lidarTime = msgCloud->header.stamp.toSec();
        double odomTime = msgOdom->header.stamp.toSec();
        std::vector<double> imgTime;
        std::vector<double> timeDiff{abs(lidarTime - odomTime)};
        std::vector<cv::Mat> images;
        auto Tlidar2world = Odom2matrix(*msgOdom);
        std::vector<Eigen::Matrix4f> Tcam2world;
        for (size_t i = 0; i < msgImgs.size(); ++i)
        {
            imgTime.emplace_back(msgImgs[i]->header.stamp.toSec());
            timeDiff.emplace_back(abs(lidarTime - imgTime[i]));
            for (size_t j = 0; j < i; ++j)
            {
                timeDiff.emplace_back(abs(imgTime[i] - imgTime[j]));
            }
            cv_bridge::CvImagePtr cvPtr = cv_bridge::toCvCopy(msgImgs[i], sensor_msgs::image_encodings::BGR8);
            images.emplace_back(cvPtr->image);
            Tcam2world.emplace_back(Tlidar2world * lidarCamParam.Tlidar2cam[i].inverse());
        }

        auto maxIt = std::max_element(timeDiff.begin(), timeDiff.end());
        if (*maxIt > 0.08)
        {
            ROS_WARN("Time diff is too large: %f, index: %ld", *maxIt, std::distance(timeDiff.begin(), maxIt));
        }
        else {
            ROS_INFO("Max time diff: %f, index: %ld", *maxIt, std::distance(timeDiff.begin(), maxIt));
        }
        pcl::PointCloud<PointType>::Ptr laserCloud(new pcl::PointCloud<PointType>);
        pcl::fromROSMsg(*msgCloud, *laserCloud);

        Process(laserCloud, images, Tlidar2world, Tcam2world, lidarTime, imgTime);
    }
};

class LidarOdomTwoImgsSyncer : public LidarOdomImgsSyncerBase
{
protected:
    using SyncPolicy = message_filters::sync_policies::ApproximateTime<nav_msgs::Odometry, sensor_msgs::PointCloud2, sensor_msgs::Image, sensor_msgs::Image>;
    std::shared_ptr<message_filters::Synchronizer<SyncPolicy>> odomCloudImgSync;

public:
    LidarOdomTwoImgsSyncer(std::shared_ptr<ros::NodeHandle> nh, const std::string &odomTopic, const std::string &pointcloudTopic, const std::vector<std::string> &imageTopics, const LidarCameraParam &lidarCamParam, const std::string &outputDir)
    : LidarOdomImgsSyncerBase(nh, odomTopic, pointcloudTopic, imageTopics, lidarCamParam, outputDir)
    {
        if (imageTopics.size() < 2 && lidarCamParam.camParams.size() < 2)
            throw std::runtime_error("LidarOdomTwoImgsSyncer needs two image topics and two camera parameters.");

        SyncPolicy syncPolicy(QUEUE_SIZE);
        syncPolicy.setInterMessageLowerBound(1, ros::Duration(0.05));
        odomCloudImgSync = std::make_shared<message_filters::Synchronizer<SyncPolicy>>(std::move(syncPolicy), *filterSubOdom, *filterSubCloud, *filterSubImgs[0], *filterSubImgs[1]);
        odomCloudImgSync->registerCallback(boost::bind(&LidarOdomTwoImgsSyncer::SyncedCallbackWrapper, this, _1, _2, _3, _4));
    }

    void SyncedCallbackWrapper(const nav_msgs::Odometry::ConstPtr &msgOdom,
                               const sensor_msgs::PointCloud2::ConstPtr &msgCloud,
                               const sensor_msgs::Image::ConstPtr &msgImg0,
                               const sensor_msgs::Image::ConstPtr &msgImg1)
    {
        std::vector<sensor_msgs::Image::ConstPtr> msgImgs{msgImg0, msgImg1};
        SyncedCallback(msgOdom, msgCloud, msgImgs);
    }
};

class LidarImgsSyncerBase : public LidarImgsBase
{
protected:
    std::unique_ptr<OdomTfBuffer> odomTfBufferPtr;
    std::deque<std::tuple<sensor_msgs::PointCloud2::ConstPtr, std::vector<sensor_msgs::Image::ConstPtr>>> msgsQueue;
    std::shared_ptr<message_filters::Subscriber<sensor_msgs::PointCloud2>> filterSubCloud;
    std::vector<std::shared_ptr<message_filters::Subscriber<sensor_msgs::Image>>> filterSubImgs;
public:
    LidarImgsSyncerBase(std::shared_ptr<ros::NodeHandle> nh,
                        const std::string &odomTopic,
                        const std::string &pointcloudTopic,
                        const std::vector<std::string> &imageTopics,
                        const LidarCameraParam &lidarCamParam,
                        const std::string &outputDir)
        : LidarImgsBase(lidarCamParam, outputDir),
          odomTfBufferPtr(new OdomTfBuffer({odomTopic}))
    {
        filterSubCloud = std::make_shared<message_filters::Subscriber<sensor_msgs::PointCloud2>>(*nh, pointcloudTopic, QUEUE_SIZE, ros::TransportHints().tcpNoDelay());
        for (const auto &imgTopic : imageTopics)
        {
            filterSubImgs.emplace_back(
                std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>
                (*nh, imgTopic, QUEUE_SIZE, ros::TransportHints().tcpNoDelay()));
        }
    }

    void SyncedCallback(const sensor_msgs::PointCloud2::ConstPtr &msgCloud,
                        const std::vector<sensor_msgs::Image::ConstPtr> &msgImgs)
    {
        double lidarTime = msgCloud->header.stamp.toSec();
        std::vector<double> imgTime;
        std::vector<double> timeDiff;
        for (size_t i = 0; i < msgImgs.size(); ++i)
        {
            imgTime.emplace_back(msgImgs[i]->header.stamp.toSec());
            timeDiff.emplace_back(abs(lidarTime - imgTime[i]));
            for (size_t j = 0; j < i; ++j)
            {
                timeDiff.emplace_back(abs(imgTime[i] - imgTime[j]));
            }
        }

        auto maxIt = std::max_element(timeDiff.begin(), timeDiff.end());
        if (*maxIt > 0.08)
        {
            ROS_WARN("Time diff is too large: %f, index: %ld", *maxIt, std::distance(timeDiff.begin(), maxIt));
        }
        else {
            ROS_INFO("Max time diff: %f, index: %ld", *maxIt, std::distance(timeDiff.begin(), maxIt));
        }
        msgsQueue.emplace_back(msgCloud, msgImgs);
    }

    bool ProcessMsgs(const sensor_msgs::PointCloud2::ConstPtr &msgCloud,
                     const std::vector<sensor_msgs::Image::ConstPtr> &msgImgs)
    {
        Eigen::Matrix4f Tlidar2world;
        switch (odomTfBufferPtr->GetTransform("lidar", "map", Tlidar2world, msgCloud->header.stamp))
        {
            case OdomTfBuffer::ReturnCode::NOT_YET:
                return false;
            case OdomTfBuffer::ReturnCode::EXTRA:
                return true;
            default:
                break;
        }
        double lidarTime = msgCloud->header.stamp.toSec();

        std::vector<double> imgTime;
        std::vector<Eigen::Matrix4f> Tlidar2worldInterpolated(msgImgs.size());
        for (size_t i = 0; i < msgImgs.size(); ++i)
        {
            switch (odomTfBufferPtr->GetTransform("lidar", "map", Tlidar2worldInterpolated[i], msgImgs[i]->header.stamp))
            {
                case OdomTfBuffer::ReturnCode::NOT_YET:
                    return false;
                case OdomTfBuffer::ReturnCode::EXTRA:
                    return true;
                default:
                    break;
            }
            imgTime.emplace_back(msgImgs[i]->header.stamp.toSec());
        }

        LidarCameraParam lidarCamParamInterpolated(lidarCamParam);

        std::vector<cv::Mat> images;
        std::vector<Eigen::Matrix4f> Tcam2world;
        for (size_t i = 0; i < msgImgs.size(); ++i)
        {
            double imgTime = msgImgs[i]->header.stamp.toSec();
            Tcam2world.emplace_back(Tlidar2worldInterpolated[i] * lidarCamParam.Tlidar2cam[i].inverse());

            cv_bridge::CvImagePtr cvPtr = cv_bridge::toCvCopy(msgImgs[i], sensor_msgs::image_encodings::BGR8);
            images.emplace_back(std::move(cvPtr->image));
        }

        pcl::PointCloud<PointType>::Ptr laserCloud(new pcl::PointCloud<PointType>);
        pcl::fromROSMsg(*msgCloud, *laserCloud);

        Process(laserCloud, images, Tlidar2world, Tcam2world, lidarTime, imgTime);
        return true;
    }

    void Run()
    {
        int waitPeriod = -1;
        ros::Rate rate(100);
        while (ros::ok())
        {
            rate.sleep();
            ros::spinOnce();
            if (!msgsQueue.empty())
            {
                waitPeriod = 0;
                auto &msgCloud = std::get<0>(msgsQueue.front());
                auto &msgImgs = std::get<1>(msgsQueue.front());
                if (ProcessMsgs(msgCloud, msgImgs))
                    msgsQueue.pop_front();
            }
            else {
                if (waitPeriod >= 0 && ++waitPeriod > WAIT_CYCLE) {
                    ROS_WARN("No messages received for %d cycles, shutting down.", WAIT_CYCLE);
                    return;
                }
            }
        }
    }
};

class LidarTwoImgsSyncer : public LidarImgsSyncerBase
{
protected:
    using SyncPolicy = message_filters::sync_policies::ApproximateTime<sensor_msgs::PointCloud2,
                                                                       sensor_msgs::Image,
                                                                       sensor_msgs::Image>;
    std::shared_ptr<message_filters::Synchronizer<SyncPolicy>> cloudImgSync;

public:
    LidarTwoImgsSyncer(std::shared_ptr<ros::NodeHandle> nh,
                       const std::string &odomTopic,
                       const std::string &pointcloudTopic,
                       const std::vector<std::string> &imageTopics,
                       const LidarCameraParam &lidarCamParam,
                       const std::string &outputDir)
        : LidarImgsSyncerBase(nh, odomTopic, pointcloudTopic, imageTopics, lidarCamParam, outputDir)
    {
        if (imageTopics.size() < 2 && lidarCamParam.camParams.size() < 2)
            throw std::runtime_error("LidarTwoImgsSyncer needs two image topics and two camera parameters.");

        SyncPolicy syncPolicy(QUEUE_SIZE);
        syncPolicy.setInterMessageLowerBound(0, ros::Duration(0.05));
        cloudImgSync = std::make_shared<message_filters::Synchronizer<SyncPolicy>>
                       (std::move(syncPolicy), *filterSubCloud,
                       *filterSubImgs[0], *filterSubImgs[1]);
        cloudImgSync->registerCallback(
            boost::bind(&LidarTwoImgsSyncer::SyncedCallbackWrapper, 
                        this, _1, _2, _3));
    }

    void SyncedCallbackWrapper(const sensor_msgs::PointCloud2::ConstPtr &msgCloud, const sensor_msgs::Image::ConstPtr &msgImg0, const sensor_msgs::Image::ConstPtr &msgImg1)
    {
        std::vector<sensor_msgs::Image::ConstPtr> msgImgs{msgImg0, msgImg1};
        SyncedCallback(msgCloud, msgImgs);
    }
};

#endif // __ROS_UTILITY_HPP__