#include <gtsam/geometry/Pose3.h>
#include <gtsam/geometry/Rot3.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/navigation/CombinedImuFactor.h>
#include <gtsam/navigation/GPSFactor.h>
#include <gtsam/navigation/ImuFactor.h>
#include <gtsam/nonlinear/ISAM2.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/Marginals.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/PriorFactor.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/ndt.h>
#include <pcl/visualization/pcl_visualizer.h> // 可视化模块

#include "lio_sam/msg/cloud_info.hpp"
#include "lio_sam/srv/save_map.hpp"
#include "lio_sam/srv/relocalization.hpp"

#include "utility.hpp"

using namespace gtsam;

using symbol_shorthand::B;  // Bias  (ax,ay,az,gx,gy,gz)
using symbol_shorthand::G;  // GPS pose
using symbol_shorthand::V;  // Vel   (xdot,ydot,zdot)
using symbol_shorthand::X;  // Pose3 (x,y,z,r,p,y)

/*
 * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is
 * time stamp)
 */
struct PointXYZIRPYT {
    PCL_ADD_POINT4D
    PCL_ADD_INTENSITY;  // preferred way of adding a XYZ+padding
    float  roll;
    float  pitch;
    float  yaw;
    double time;
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW  // make sure our new allocators are aligned
} EIGEN_ALIGN16;                     // enforce SSE padding for correct memory alignment

POINT_CLOUD_REGISTER_POINT_STRUCT(PointXYZIRPYT,
                                  (float, x, x)(float, y, y)(float, z, z)(float, intensity, intensity)(
                                      float, roll, roll)(float, pitch, pitch)(float, yaw, yaw)(double, time, time))

typedef PointXYZIRPYT PointTypePose;

/************/
class mapOptimization;
std::shared_ptr<mapOptimization> gp_map_opt;  // 全局的mapOptimization对象

// 计算点云匹配的得分
float ComputeRmse(const pcl::KdTreeFLANN<PointType>::Ptr kd_tree, const pcl::PointCloud<PointType>::Ptr& source,
                  const pcl::PointCloud<PointType>::Ptr& target) {
    float              rmse  = 0.0;
    int                count = 0;
    std::vector<int>   points_idx;
    std::vector<float> points_squared_distance;
    for (const auto& p : source->points) {
        if (kd_tree->nearestKSearch(p, 1, points_idx, points_squared_distance) > 0) {
            rmse += std::sqrt(points_squared_distance[0]);
            count++;
        }
    }

    if (count > 0) {
        rmse /= count;
    }

    return rmse;
}

// class mapOptimization
class mapOptimization : public ParamServer {
public:
    // gtsam
    NonlinearFactorGraph gtsam_graph_;         // 因子图
    Values               initial_estimate_;    // 因子图变量初始值
    Values               optimized_estimate_;  // 优化器当前优化结果
    ISAM2*               p_isam_;              // 非线性优化器
    // 当前优化结果的位姿方差。该方差在GPS因子中用到，如果该方差较小，则说明优化结果较好，即使打开GPS开关也不会将GPS因子加入因子图。
    Values          isam_current_estimate_;
    Eigen::MatrixXd pose_covariance_;

    rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr p_pub_laser_cloud_surround_;
    rclcpp::Publisher<nav_msgs::msg::Odometry>::SharedPtr       p_pub_laser_odometry_global_;
    rclcpp::Publisher<nav_msgs::msg::Odometry>::SharedPtr       p_pub_laser_odometry_incremental_;
    rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr p_pub_key_poses_;
    rclcpp::Publisher<nav_msgs::msg::Path>::SharedPtr           p_pub_path_;

    rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr        p_pub_history_key_frames_;
    rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr        p_pub_icp_key_frame_;
    rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr        p_pub_recent_key_frames_;
    rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr        p_pub_recent_key_frame_;
    rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr        p_pub_cloud_registered_raw_;
    rclcpp::Publisher<visualization_msgs::msg::MarkerArray>::SharedPtr p_pub_loop_constraint_edge_;

    rclcpp::Service<lio_sam::srv::SaveMap>::SharedPtr                 p_srv_save_map_;
    rclcpp::Subscription<lio_sam::msg::CloudInfo>::SharedPtr          p_sub_cloud_;
    rclcpp::Subscription<nav_msgs::msg::Odometry>::SharedPtr          p_sub_gps_;
    rclcpp::Subscription<std_msgs::msg::Float64MultiArray>::SharedPtr p_sub_loop_;

    // 重定位接口
    rclcpp::Service<lio_sam::srv::Relocalization>::SharedPtr p_srv_relocation_; // relocation

    std::deque<nav_msgs::msg::Odometry> gps_queue_;
    lio_sam::msg::CloudInfo             cloud_info_;

    // 历史所有关键帧的角点集合（降采样）
    vector<pcl::PointCloud<PointType>::Ptr> p_corner_cloud_key_frames_vec_;
    // 历史所有关键帧的平面点集合（降采样）
    vector<pcl::PointCloud<PointType>::Ptr> p_surf_cloud_key_frames_vec_;

    pcl::PointCloud<PointType>::Ptr p_cloud_key_poses_3d_;  // 历史关键帧位姿（位置）
    // 历史关键帧位姿
    pcl::PointCloud<PointTypePose>::Ptr p_cloud_key_poses_6d_;
    pcl::PointCloud<PointType>::Ptr     p_copy_cloud_key_poses_3d_;
    pcl::PointCloud<PointTypePose>::Ptr p_copy_cloud_key_poses_6d_;

    // 当前激光帧角点集合
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_corner_last_;  // corner feature set from odoOptimization
    // 当前激光帧平面点集合
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_surf_last_;  // surf feature set from odoOptimization
    // 当前激光帧角点集合，降采样，DS: DownSize
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_corner_last_ds_;  // downsampled corner feature set from
                                                                    // odoOptimization
    // 当前激光帧平面点集合，降采样
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_surf_laset_ds_;  // downsampled surf feature set from
                                                                   // odoOptimization

    // 当前帧与局部map匹配上了的角点、平面点，加入同一集合；后面是对应点的参数
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_ori_;
    pcl::PointCloud<PointType>::Ptr p_coff_sel_;

    // 当前帧与局部map匹配上了的角点、参数、标记
    std::vector<PointType> laser_cloud_ori_corner_vec_;  // corner point holder for parallel computation
    std::vector<PointType> coeff_sel_corner_vec_;
    std::vector<bool>      laser_cloud_ori_corner_flag_;

    // 当前帧与局部map匹配上了的平面点、参数、标记
    std::vector<PointType> laser_cloud_ori_surf_vec_;  // surf point holder for parallel computation
    std::vector<PointType> coeff_sel_surf_vec_;
    std::vector<bool>      laser_cloud_ori_surf_flag_;

    map<int, pair<pcl::PointCloud<PointType>, pcl::PointCloud<PointType>>> laser_cloud_map_container_;

    pcl::PointCloud<PointType>::Ptr p_laser_cloud_corner_from_map_;     // 局部map的角点集合
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_surf_from_map_;       // 局部map的平面点集合
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_corner_from_map_ds_;  // 局部map的角点集合，降采样
    pcl::PointCloud<PointType>::Ptr p_laser_cloud_surf_from_map_ds_;    // 局部map的平面点集合，降采样

    // 局部关键帧构建的map点云，对应kdtree，用于scan-to-map找相邻点
    pcl::KdTreeFLANN<PointType>::Ptr p_kdtree_corner_from_map_;
    pcl::KdTreeFLANN<PointType>::Ptr p_kdtree_surf_from_map_;

    pcl::KdTreeFLANN<PointType>::Ptr p_kdtree_surrounding_key_poses_;
    pcl::KdTreeFLANN<PointType>::Ptr p_kdtree_history_key_poses_;

    // 降采样
    pcl::VoxelGrid<PointType> down_size_filter_corner_;  // 角点点云降采样器
    pcl::VoxelGrid<PointType> down_size_filter_surf_;    // 平面点点云降采样器
    pcl::VoxelGrid<PointType> down_size_filter_icp_;     // 做回环检测时使用ICP时的点云降采样器
    // 构建局部地图时都挑选的关键帧做降采样
    pcl::VoxelGrid<PointType> down_size_filter_surrounding_key_poses_;  // for surrounding key poses of
                                                                        // scan-to-map optimization

    rclcpp::Time time_laser_info_stamp_;  // 当前雷达帧的时间戳
    double       time_laser_info_cur_;    // 当前雷达帧的时间戳，秒

    /**
     * 注意注意注意！！这是一个非常重要的变量，transformTobeMapped_[6]缓存的是当前帧
     * 的`最新`位姿x,y,z,roll,pitch,yaw。无论在哪个环节，对位姿的更新都会被缓存到这个
     * 变量供给下一个环节使用！！
     */
    float tramsform_tobe_mapped_[6];

    std::mutex mtx_;            // 点云信息回调函数锁
    std::mutex mtx_loop_info_;  // 回环检测线程锁

    // 标识点云匹配的结果是否较差，当isDegenerate为true的时候，标识本次的点云匹配结果较差，
    // 会在雷达里程计的协方差中置位，在imuPreintegration中会根据这个标志位选择因子图的噪声模型
    bool                       is_degenerate_ = false;
    Eigen::Matrix<float, 6, 6> mat_p_;

    int laser_cloud_corner_from_map_ds_num_ = 0;  // 降采样后局部map角点数量
    int laser_cloud_surf_from_map_ds_num_   = 0;  // 降采样后局部map平面点数量
    int laser_cloud_corner_last_ds_num_     = 0;  // 降采样后当前激光帧角点数量
    int laser_cloud_surf_laset_ds_num_      = 0;  // 降采样后当前激光帧面点数量

    // 当新的回环节点出现或者GPS信息被加入校正位置，这个变量被置为true，
    // 因子图优化器会执行多次更新，然后将所有的历史帧位置都更新一遍
    bool a_loop_is_closed_ = false;
    // 回环的索引字典，从当前帧到回环节点的索引
    map<int, int>                                   loop_index_container_;  // from new to old
    vector<pair<int, int>>                          loop_index_queue_;      // 所有回环配对关系
    vector<gtsam::Pose3>                            loop_pose_queue_;       // 所有回环的姿态配对关系
    vector<gtsam::noiseModel::Diagonal::shared_ptr> loop_noise_queue_;      // 每个回环因子的噪声模型
    deque<std_msgs::msg::Float64MultiArray>         loop_info_vec_;

    nav_msgs::msg::Path global_path_;  // 全局关键帧轨迹

    // 缓存雷达帧位姿用来做点云变换
    Eigen::Affine3f trans_point_associate_to_map_;  // 当前帧位姿
    // incrementalOdometryAffineFront在每次点云进来时缓存上一次的位姿
    Eigen::Affine3f incremental_odometry_affine_front_;  // 前一帧位姿
    // incrementalOdometryAffineBack是当前帧点云优化后的最终位姿，
    // incrementalOdometryAffineBack与Front可以算出一个增量，应用到上一次的雷达里程计
    // 计算出当前的雷达里程计。这一步似乎有点多余
    Eigen::Affine3f incremental_odometry_affine_back_;

    std::unique_ptr<tf2_ros::TransformBroadcaster> p_broadcaster_;  // TF发布器

    mapOptimization(const rclcpp::NodeOptions& options) : ParamServer("lio_sam_mapOptimization", options) {
        ISAM2Params parameters;
        parameters.relinearizeThreshold = 0.1;
        parameters.relinearizeSkip      = 1;
        p_isam_                         = new ISAM2(parameters);

        p_pub_key_poses_             = create_publisher<sensor_msgs::msg::PointCloud2>("lio_sam/mapping/trajectory", 1);
        p_pub_laser_cloud_surround_  = create_publisher<sensor_msgs::msg::PointCloud2>("lio_sam/mapping/map_global", 1);
        p_pub_laser_odometry_global_ = create_publisher<nav_msgs::msg::Odometry>("lio_sam/mapping/odometry", qos);
        p_pub_laser_odometry_incremental_ =
            create_publisher<nav_msgs::msg::Odometry>("lio_sam/mapping/odometry_incremental", qos);
        p_pub_path_    = create_publisher<nav_msgs::msg::Path>("lio_sam/mapping/path", 1);
        p_broadcaster_ = std::make_unique<tf2_ros::TransformBroadcaster>(this);

        p_sub_cloud_ = create_subscription<lio_sam::msg::CloudInfo>(
            "lio_sam/feature/cloud_info", qos,
            std::bind(&mapOptimization::laserCloudInfoHandler, this, std::placeholders::_1));
        p_sub_gps_ = create_subscription<nav_msgs::msg::Odometry>(
            gps_topic_, 200, std::bind(&mapOptimization::gpsHandler, this, std::placeholders::_1));
        p_sub_loop_ = create_subscription<std_msgs::msg::Float64MultiArray>(
            "lio_loop/loop_closure_detection", qos,
            std::bind(&mapOptimization::loopInfoHandler, this, std::placeholders::_1));

        auto saveMapService = [this](const std::shared_ptr<rmw_request_id_t>               request_header,
                                     const std::shared_ptr<lio_sam::srv::SaveMap::Request> req,
                                     std::shared_ptr<lio_sam::srv::SaveMap::Response>      res) -> void {
            (void) request_header;
            std::string save_map_directory;
            if (req->destination.empty())
                save_map_directory = std::getenv("HOME") + save_pcd_directory_;
            else
                save_map_directory = std::getenv("HOME") + req->destination;

            bool succ    = SaveMap(save_map_directory, req->resolution);
            res->success = succ;
        };

        // 重定位服务函数
        auto relocationService = [this](const std::shared_ptr<rmw_request_id_t>                      request_header,
                                        const std::shared_ptr<lio_sam::srv::Relocalization::Request> req,
                                        std::shared_ptr<lio_sam::srv::Relocalization::Response>      res) -> void {
            std::vector<float> init_pose_6d{req->init_pose_6d[0], req->init_pose_6d[1], req->init_pose_6d[2],
                                            req->init_pose_6d[3], req->init_pose_6d[4], req->init_pose_6d[5]};
            res->success = relocation(init_pose_6d);
        };

        p_srv_save_map_   = create_service<lio_sam::srv::SaveMap>("lio_sam/save_map", saveMapService);
        p_srv_relocation_ = create_service<lio_sam::srv::Relocalization>("lio_sam/relocalization", relocationService);

        p_pub_history_key_frames_ =
            create_publisher<sensor_msgs::msg::PointCloud2>("lio_sam/mapping/icp_loop_closure_history_cloud", 1);
        p_pub_icp_key_frame_ =
            create_publisher<sensor_msgs::msg::PointCloud2>("lio_sam/mapping/icp_loop_closure_history_cloud", 1);
        p_pub_loop_constraint_edge_ =
            create_publisher<visualization_msgs::msg::MarkerArray>("/lio_sam/mapping/loop_closure_constraints", 1);

        p_pub_recent_key_frames_ = create_publisher<sensor_msgs::msg::PointCloud2>("lio_sam/mapping/map_local", 1);
        p_pub_recent_key_frame_ =
            create_publisher<sensor_msgs::msg::PointCloud2>("lio_sam/mapping/cloud_registered", 1);
        p_pub_cloud_registered_raw_ =
            create_publisher<sensor_msgs::msg::PointCloud2>("lio_sam/mapping/cloud_registered_raw", 1);

        down_size_filter_corner_.setLeafSize(mapping_corner_leaf_size_, mapping_corner_leaf_size_,
                                             mapping_corner_leaf_size_);
        down_size_filter_surf_.setLeafSize(mapping_surf_leaf_size_, mapping_surf_leaf_size_, mapping_surf_leaf_size_);
        down_size_filter_icp_.setLeafSize(mapping_surf_leaf_size_, mapping_surf_leaf_size_, mapping_surf_leaf_size_);
        down_size_filter_surrounding_key_poses_.setLeafSize(
            surrounding_key_frame_density_, surrounding_key_frame_density_,
            surrounding_key_frame_density_);  // for surrounding key poses of
                                              // scan-to-map optimization

        allocateMemory();
    }

    void allocateMemory() {
        p_cloud_key_poses_3d_.reset(new pcl::PointCloud<PointType>());
        p_cloud_key_poses_6d_.reset(new pcl::PointCloud<PointTypePose>());
        p_copy_cloud_key_poses_3d_.reset(new pcl::PointCloud<PointType>());
        p_copy_cloud_key_poses_6d_.reset(new pcl::PointCloud<PointTypePose>());

        p_kdtree_surrounding_key_poses_.reset(new pcl::KdTreeFLANN<PointType>());
        p_kdtree_history_key_poses_.reset(new pcl::KdTreeFLANN<PointType>());

        p_laser_cloud_corner_last_.reset(new pcl::PointCloud<PointType>());     // corner feature set from
                                                                                // odoOptimization
        p_laser_cloud_surf_last_.reset(new pcl::PointCloud<PointType>());       // surf feature set from
                                                                                // odoOptimization
        p_laser_cloud_corner_last_ds_.reset(new pcl::PointCloud<PointType>());  // downsampled corner featuer set
                                                                                // from odoOptimization
        p_laser_cloud_surf_laset_ds_.reset(new pcl::PointCloud<PointType>());   // downsampled surf featuer set from
                                                                                // odoOptimization

        p_laser_cloud_ori_.reset(new pcl::PointCloud<PointType>());
        p_coff_sel_.reset(new pcl::PointCloud<PointType>());

        laser_cloud_ori_corner_vec_.resize(N_SCAN * Horizon_SCAN);
        coeff_sel_corner_vec_.resize(N_SCAN * Horizon_SCAN);
        laser_cloud_ori_corner_flag_.resize(N_SCAN * Horizon_SCAN);
        laser_cloud_ori_surf_vec_.resize(N_SCAN * Horizon_SCAN);
        coeff_sel_surf_vec_.resize(N_SCAN * Horizon_SCAN);
        laser_cloud_ori_surf_flag_.resize(N_SCAN * Horizon_SCAN);

        std::fill(laser_cloud_ori_corner_flag_.begin(), laser_cloud_ori_corner_flag_.end(), false);
        std::fill(laser_cloud_ori_surf_flag_.begin(), laser_cloud_ori_surf_flag_.end(), false);

        p_laser_cloud_corner_from_map_.reset(new pcl::PointCloud<PointType>());
        p_laser_cloud_surf_from_map_.reset(new pcl::PointCloud<PointType>());
        p_laser_cloud_corner_from_map_ds_.reset(new pcl::PointCloud<PointType>());
        p_laser_cloud_surf_from_map_ds_.reset(new pcl::PointCloud<PointType>());

        p_kdtree_corner_from_map_.reset(new pcl::KdTreeFLANN<PointType>());
        p_kdtree_surf_from_map_.reset(new pcl::KdTreeFLANN<PointType>());

        for (int i = 0; i < 6; ++i) {
            tramsform_tobe_mapped_[i] = 0;
        }

        mat_p_.setZero();
    }

    // 保存地图
    bool SaveMap(std::string directory, float resolution) {
        cout << "****************************************************" << endl;
        cout << "Saving map to pcd files ..." << endl;
        cout << "Save directory:" << directory << std::endl;
        // create directory and remove old files;
        int unused = system((std::string("exec rm -r ") + directory).c_str());
        unused |= system((std::string("mkdir -p ") + directory).c_str());
        unused |= system((std::string("mkdir ") + directory + "corner").c_str());
        unused |= system((std::string("mkdir ") + directory + "surf").c_str());

        // save key frame transformations
        int ret = pcl::io::savePCDFileASCII(directory + "/trajectory.pcd", *p_cloud_key_poses_3d_);
        ret |= pcl::io::savePCDFileASCII(directory + "/transformations.pcd", *p_cloud_key_poses_6d_);

        // extract global point cloud map
        pcl::PointCloud<PointType>::Ptr globalCornerCloud(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalSurfCloud(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalMapCloud(new pcl::PointCloud<PointType>());
        for (int i = 0; i < (int) p_cloud_key_poses_3d_->size(); i++) {
            *globalCornerCloud +=
                *transformPointCloud(p_corner_cloud_key_frames_vec_[i], &p_cloud_key_poses_6d_->points[i]);
            *globalSurfCloud +=
                *transformPointCloud(p_surf_cloud_key_frames_vec_[i], &p_cloud_key_poses_6d_->points[i]);
            ret |= pcl::io::savePCDFileASCII(directory + "/corner/corner" + std::to_string(i) + ".pcd",
                                             *p_corner_cloud_key_frames_vec_[i]);
            ret |= pcl::io::savePCDFileASCII(directory + "/surf/surf" + std::to_string(i) + ".pcd",
                                             *p_surf_cloud_key_frames_vec_[i]);
            std::cout << "\r" << std::flush << "Processing feature cloud " << i << " of "
                      << p_cloud_key_poses_6d_->size() << " ...";
        }

        // down-sample
        pcl::PointCloud<PointType>::Ptr globalCornerCloudDS(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalSurfCloudDS(new pcl::PointCloud<PointType>());
        if (std::fabs(resolution) > std::numeric_limits<float>::epsilon()) {
            cout << "\n\nSave resolution: " << resolution << endl;
            // down-sample and save corner cloud
            down_size_filter_corner_.setInputCloud(globalCornerCloud);
            down_size_filter_corner_.setLeafSize(resolution, resolution, resolution);
            down_size_filter_corner_.filter(*globalCornerCloudDS);
            ret |= pcl::io::savePCDFileBinary(directory + "/corner_map.pcd", *globalCornerCloudDS);
            // down-sample and save surf cloud
            down_size_filter_surf_.setInputCloud(globalSurfCloud);
            down_size_filter_surf_.setLeafSize(resolution, resolution, resolution);
            down_size_filter_surf_.filter(*globalSurfCloudDS);
            ret |= pcl::io::savePCDFileBinary(directory + "/surf_map.pcd", *globalSurfCloudDS);
        } else {
            // save corner cloud
            ret |= pcl::io::savePCDFileBinary(directory + "/corner_map.pcd", *globalCornerCloud);
            // save surf cloud
            ret |= pcl::io::savePCDFileBinary(directory + "/Surf_map.pcd", *globalSurfCloud);
        }

        // save global point cloud map
        *globalMapCloud += *globalCornerCloud;
        *globalMapCloud += *globalSurfCloud;
        pcl::io::savePCDFileBinary(directory + "/GlobalMap.pcd", *globalMapCloud);
        cout << "****************************************************" << endl;
        cout << "Saving map to pcd files completed\n" << endl;

        bool save_succ = (ret == 0 && unused == 0);
        return save_succ;
    }

    // TODO: 加载固定路径的地图，和保存地图的路径有可能不一致，如果当前存在地图，则清除地图
    bool LoadMap() {
        std::string save_map_directory = std::getenv("HOME") + save_pcd_directory_;
        std::cout << "****************************************************" << std::endl;
        std::cout << "start load pcd files..." << std::endl;
        std::cout << "load pcd files directory: " << save_map_directory << std::endl;

        std::lock_guard<std::mutex> lock1(mtx_);
        std::lock_guard<std::mutex> lock2(mtx_loop_info_);
        allocateMemory();  // 重置变量

        // 加载关键帧Pose(3D)
        int ret = pcl::io::loadPCDFile<PointType>(save_map_directory + "trajectory.pcd", *p_cloud_key_poses_3d_);
        if (ret == -1) {
            std::cerr << "no trajectory.pcd files:" << save_map_directory + "trajectory.pcd" << std::endl;
            return false;
        }

        // 加载关键帧Pose(6D)
        ret = pcl::io::loadPCDFile<PointTypePose>(save_map_directory + "transformations.pcd", *p_cloud_key_poses_6d_);
        if (ret == -1) {
            std::cerr << "no transformations.pcd files:" << save_map_directory + "transformations.pcd" << std::endl;
            return false;
        }

        // 加载corner和surf点云
        p_corner_cloud_key_frames_vec_.clear();
        p_surf_cloud_key_frames_vec_.clear();
        for (int i = 0; i < (int) p_cloud_key_poses_3d_->points.size(); i++) {
            // corner
            p_corner_cloud_key_frames_vec_.emplace_back(new pcl::PointCloud<PointType>());
            std::string corner_file = save_map_directory + "/corner/corner" + std::to_string(i) + ".pcd";
            pcl::io::loadPCDFile<PointType>(corner_file, *p_corner_cloud_key_frames_vec_[i]);

            // surf
            p_surf_cloud_key_frames_vec_.emplace_back(new pcl::PointCloud<PointType>());
            std::string surf_file = save_map_directory + "/surf/surf" + std::to_string(i) + ".pcd";
            pcl::io::loadPCDFile<PointType>(surf_file, *p_surf_cloud_key_frames_vec_[i]);
            // std::cout << "load keyframe id:" << i
            //           << ", corner size:" << p_corner_cloud_key_frames_vec_[i]->points.size()
            //           << ", surf size:" << p_surf_cloud_key_frames_vec_[i]->points.size() << std::endl;
        }

        // add history factor graph
        noiseModel::Diagonal::shared_ptr odometry_noise =
            noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
        for (int i = 0; i < (int) p_cloud_key_poses_3d_->points.size(); i++) {
            if (i == 0) {
                gtsam_graph_.add(
                    PriorFactor<Pose3>(0, pclPointTogtsamPose3(p_cloud_key_poses_6d_->points[0]), odometry_noise));
                initial_estimate_.insert(0, pclPointTogtsamPose3(p_cloud_key_poses_6d_->points[0]));
            } else {
                // 添加激光里程计因子
                gtsam::Pose3 pose_from = pclPointTogtsamPose3(p_cloud_key_poses_6d_->points[i - 1]);
                gtsam::Pose3 pose_to   = pclPointTogtsamPose3(p_cloud_key_poses_6d_->points[i]);
                gtsam_graph_.add(BetweenFactor<Pose3>(i - 1, i, pose_from.between(pose_to), odometry_noise));
                initial_estimate_.insert(i, pose_from);
            }

            p_isam_->update(gtsam_graph_, initial_estimate_);
            gtsam_graph_.resize(0);
            initial_estimate_.clear();
        }

        std::cout << "load cloud_key_frame size:" << p_cloud_key_poses_3d_->points.size() << std::endl;
        std::cout << "****************************************************" << std::endl;
        std::cout << "load pcd files completed\n" << std::endl;

        // visualize
        // pcl::visualization::PCLVisualizer viewer("Point Cloud Viewer");
        // pcl::PointCloud<PointType>::Ptr   global_map(new pcl::PointCloud<PointType>());
        // for (int i = 0; i < p_cloud_key_poses_3d_->points.size(); i++) {
        //     *global_map += *transformPointCloud(p_corner_cloud_key_frames_vec_[i], &p_cloud_key_poses_6d_->points[i]);
        //     *global_map += *transformPointCloud(p_surf_cloud_key_frames_vec_[i], &p_cloud_key_poses_6d_->points[i]);
        //     viewer.removePointCloud("cloud");
        //     viewer.addPointCloud<PointType>(global_map, "cloud");
        //     viewer.spinOnce(1);
        //     SimpleTime::sleep_msec(10);
        // }

        return true;
    }

    // 重定位函数
    bool relocation(const std::vector<float>& init_pose_6d_f) {
        pcl::PointCloud<PointType>::Ptr laser_cloud_in(new pcl::PointCloud<PointType>());
        pcl::fromROSMsg(cloud_info_.cloud_deskewed, *laser_cloud_in);

        // empty cloud
        if (laser_cloud_in->points.empty()) {
            std::cout << "empty input cloud!" << std::endl;
            return false;
        }

        // empty keyframe
        if (p_cloud_key_poses_3d_->points.empty()) {
            std::cout << " no p_cloud_key_poses_3d!" << std::endl;
            return false;
        }

        // 填充global_map
        pcl::PointCloud<PointType>::Ptr global_map_cloud(new pcl::PointCloud<PointType>());
        for (int i = 0; i < (int) p_cloud_key_poses_3d_->size(); i++) {
            *global_map_cloud +=
                *transformPointCloud(p_corner_cloud_key_frames_vec_[i], &p_cloud_key_poses_6d_->points[i]);
            *global_map_cloud +=
                *transformPointCloud(p_surf_cloud_key_frames_vec_[i], &p_cloud_key_poses_6d_->points[i]);
        }

        // 输入点云降采样
        pcl::VoxelGrid<PointType>       ds_filter;
        pcl::PointCloud<PointType>::Ptr laser_cloud_in_ds(new pcl::PointCloud<PointType>());
        ds_filter.setInputCloud(laser_cloud_in);
        ds_filter.setLeafSize(2.0, 2.0, 2.0);
        ds_filter.filter(*laser_cloud_in_ds);

        // 构建global_map的kd-tree
        pcl::KdTreeFLANN<PointType>::Ptr kd_tree(new pcl::KdTreeFLANN<PointType>());
        kd_tree->setInputCloud(global_map_cloud);

        // 窗口搜索最优位姿:x y yaw
        float           min_score = 100;
        Eigen::Affine3f best_init_pose_6d;
        for (float delta_x = -0.5; delta_x < 0.5; delta_x += 0.25) {
            for (float delta_y = -0.5; delta_y < 0.5; delta_y += 0.25) {
                for (float delta_yaw = -M_PI; delta_yaw < M_PI; delta_yaw += 0.2) {
                    Eigen::Affine3f init_pose_6d = pcl::getTransformation(
                        init_pose_6d_f[0] + delta_x, init_pose_6d_f[1] + delta_y, init_pose_6d_f[2], init_pose_6d_f[3],
                        init_pose_6d_f[4], init_pose_6d_f[5] + delta_yaw);
                    pcl::PointCloud<PointType>::Ptr rotation_cloud(new pcl::PointCloud<PointType>());
                    pcl::transformPointCloud(*laser_cloud_in_ds, *rotation_cloud, init_pose_6d.matrix());
                    float score = ComputeRmse(kd_tree, rotation_cloud, global_map_cloud);
                    if (score < min_score) {
                        min_score         = score;
                        best_init_pose_6d = pcl::getTransformation(
                            init_pose_6d_f[0] + delta_x, init_pose_6d_f[1] + delta_y, init_pose_6d_f[2],
                            init_pose_6d_f[3], init_pose_6d_f[4], init_pose_6d_f[5] + delta_yaw);
                        // std::cout << "x:" << init_pose_6d_f[0] + delta_x << " y:" << init_pose_6d_f[1] + delta_y
                        //           << " yaw:" << init_pose_6d_f[5] + delta_yaw << " score:" << score << std::endl;
                    }
                }
            }
        }

        // ndt init
        pcl::NormalDistributionsTransform<PointType, PointType> ndt;
        ndt.setTransformationEpsilon(1e-6);
        ndt.setStepSize(0.01);
        ndt.setResolution(1);
        ndt.setInputSource(laser_cloud_in);
        ndt.setInputTarget(global_map_cloud);
        pcl::PointCloud<PointType>::Ptr ndt_result(new pcl::PointCloud<PointType>());
        ndt.align(*ndt_result, best_init_pose_6d.matrix());

        // print ndt result
        Eigen::Affine3f    ndt_transformation(ndt.getFinalTransformation());
        std::vector<float> ndt_result_f(6);
        pcl::getTranslationAndEulerAngles(ndt_transformation, ndt_result_f[0], ndt_result_f[1], ndt_result_f[2],
                                          ndt_result_f[3], ndt_result_f[4], ndt_result_f[5]);

        std::cout << "the ndt score in initializing process is: " << ndt.getFitnessScore() << std::endl;
        std::cout << "ndt_x:" << ndt_result_f[0] << " ndt_y:" << ndt_result_f[1] << " ndt_z:" << ndt_result_f[2]
                  << " ndt_roll:" << ndt_result_f[3] << " ndt_pitch:" << ndt_result_f[4]
                  << " ndt_yaw:" << ndt_result_f[5] << std::endl;
        // print ndt result

        // icp init
        pcl::IterativeClosestPoint<PointType, PointType> icp;
        icp.setMaxCorrespondenceDistance(40);
        icp.setTransformationEpsilon(1e-6);
        icp.setEuclideanFitnessEpsilon(0.01);
        icp.setRANSACIterations(0);
        icp.setInputSource(laser_cloud_in);
        icp.setInputTarget(global_map_cloud);
        pcl::PointCloud<PointType>::Ptr icp_result(new pcl::PointCloud<PointType>());
        icp.align(*ndt_result, ndt.getFinalTransformation());

        // update pose
        Eigen::Affine3f icp_transformation(icp.getFinalTransformation());
        pcl::getTranslationAndEulerAngles(icp_transformation, tramsform_tobe_mapped_[3], tramsform_tobe_mapped_[4],
                                          tramsform_tobe_mapped_[5], tramsform_tobe_mapped_[0],
                                          tramsform_tobe_mapped_[1], tramsform_tobe_mapped_[2]);

        // add relocalization pose PriorFactor
        noiseModel::Diagonal::shared_ptr correction_noise =
            noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
        gtsam_graph_.add(PriorFactor<Pose3>(p_cloud_key_poses_3d_->points.size(),
                                            trans2gtsamPose(tramsform_tobe_mapped_), correction_noise));
        initial_estimate_.insert(p_cloud_key_poses_3d_->points.size(), trans2gtsamPose(tramsform_tobe_mapped_));
        p_isam_->update(gtsam_graph_, initial_estimate_);
        gtsam_graph_.resize(0);
        initial_estimate_.clear();

        // 把重定位信息加入到关键帧
        // add 6d pose
        PointTypePose pose_6d = trans2PointTypePose(tramsform_tobe_mapped_);
        pose_6d.time          = time_laser_info_cur_;
        p_cloud_key_poses_6d_->emplace_back(pose_6d);

        // add 3d pose
        PointType pose_3d;
        pose_3d.x         = pose_6d.x;
        pose_3d.y         = pose_6d.y;
        pose_3d.z         = pose_6d.z;
        pose_3d.intensity = p_cloud_key_poses_3d_->points.size();
        p_cloud_key_poses_3d_->emplace_back(pose_3d);

        // add corner and surf
        p_corner_cloud_key_frames_vec_.emplace_back(new pcl::PointCloud<PointType>());
        pcl::fromROSMsg(cloud_info_.cloud_corner, *p_corner_cloud_key_frames_vec_.back());
        p_surf_cloud_key_frames_vec_.emplace_back(new pcl::PointCloud<PointType>());
        pcl::fromROSMsg(cloud_info_.cloud_surface, *p_surf_cloud_key_frames_vec_.back());

        // print icp result
        std::cout << "icp_x:" << tramsform_tobe_mapped_[3] << " icp_y:" << tramsform_tobe_mapped_[4]
                  << " icp_z:" << tramsform_tobe_mapped_[5] << " icp_roll:" << tramsform_tobe_mapped_[0]
                  << " icp_pitch:" << tramsform_tobe_mapped_[1] << " icp_yaw:" << tramsform_tobe_mapped_[2]
                  << std::endl;
        std::cout << "the icp score in initializing process is: " << icp.getFitnessScore() << std::endl;
        // print icp result

        return true;
    }

    /**
     * 订阅当前激光帧点云信息，来自featureExtraction
     * 1、当前帧位姿初始化
     *   1) 如果是第一帧，用原始imu数据的RPY初始化当前帧位姿（旋转部分）
     *   2) 后续帧，用imu里程计计算两帧之间的增量位姿变换，作用于前一帧的激光位姿，得到当前帧激光位姿
     * 2、提取局部角点、平面点云集合，加入局部map
     *   1) 对最近的一帧关键帧，搜索时空维度上相邻的关键帧集合，降采样一下
     *   2) 对关键帧集合中的每一帧，提取对应的角点、平面点，加入局部map中
     * 3、当前激光帧角点、平面点集合降采样
     * 4、scan-to-map优化当前帧位姿
     *   (1) 要求当前帧特征点数量足够多，且匹配的点数够多，才执行优化
     *   (2) 迭代30次（上限）优化
     *      1) 当前激光帧角点寻找局部map匹配点
     *          a.更新当前帧位姿，将当前帧角点坐标变换到map系下，在局部map中查找5个最近点，距离小于1m，且5个点构成直线（用距离中心点的协方差矩阵，特征值进行判断），则认为匹配上了
     *          b.计算当前帧角点到直线的距离、垂线的单位向量，存储为角点参数
     *      2) 当前激光帧平面点寻找局部map匹配点
     *          a.更新当前帧位姿，将当前帧平面点坐标变换到map系下，在局部map中查找5个最近点，距离小于1m，且5个点构成平面（最小二乘拟合平面），则认为匹配上了
     *          b.计算当前帧平面点到平面的距离、垂线的单位向量，存储为平面点参数
     *      3) 提取当前帧中与局部map匹配上了的角点、平面点，加入同一集合
     *      4)
     * 对匹配特征点计算Jacobian矩阵，观测值为特征点到直线、平面的距离，构建高斯牛顿方程，迭代优化当前位姿，存transformTobeMapped
     *   (3)用imu原始RPY数据与scan-to-map优化后的位姿进行加权融合，更新当前帧位姿的roll、pitch，约束z坐标
     * 5、设置当前帧为关键帧并执行因子图优化
     *   1) 计算当前帧与前一帧位姿变换，如果变化太小，不设为关键帧，反之设为关键帧
     *   2) 添加激光里程计因子、GPS因子、闭环因子
     *   3) 执行因子图优化
     *   4) 得到当前帧优化后位姿，位姿协方差
     *   5) 添加cloudKeyPoses3D，cloudKeyPoses6D，更新transformTobeMapped，添加当前关键帧的角点、平面点集合
     * 6、更新因子图中所有变量节点的位姿，也就是所有历史关键帧的位姿，更新里程计轨迹
     * 7、发布激光里程计
     * 8、发布里程计、点云、轨迹
     */
    void laserCloudInfoHandler(const lio_sam::msg::CloudInfo::SharedPtr msgIn) {
        // extract time stamp
        time_laser_info_stamp_ = msgIn->header.stamp;
        time_laser_info_cur_   = stamp2Sec(msgIn->header.stamp);

        // extract info and feature cloud
        cloud_info_ = *msgIn;
        pcl::fromROSMsg(msgIn->cloud_corner, *p_laser_cloud_corner_last_);
        pcl::fromROSMsg(msgIn->cloud_surface, *p_laser_cloud_surf_last_);

        std::lock_guard<std::mutex> lock(mtx_);

        // mapping执行频率控制
        static double timeLastProcessing = -1;
        if (time_laser_info_cur_ - timeLastProcessing >= mapping_process_interval_) {
            timeLastProcessing = time_laser_info_cur_;

            // 当前帧位姿初始化
            // 1、如果是第一帧，用原始imu数据的RPY初始化当前帧位姿（旋转部分）
            // 2、后续帧，用imu里程计计算两帧之间的增量位姿变换，作用于前一帧的激光位姿，得到当前帧激光初始位姿
            updateInitialGuess();

            // 提取局部角点、平面点云集合，加入局部map
            // 1、对最近的一帧关键帧，搜索时空维度上相邻的关键帧集合，降采样一下
            // 2、对关键帧集合中的每一帧，提取对应的角点、平面点，加入局部map中
            extractSurroundingKeyFrames();

            // 当前激光帧角点、平面点集合降采样
            downsampleCurrentScan();

            // 优化当前帧位姿
            scan2MapOptimization();

            // 加入因子图优化
            saveKeyFramesAndFactor();

            // 更新因子图中所有变量节点的位姿，也就是所有历史关键帧的位姿，更新里程计轨迹
            correctPoses();

            // 发布激光里程计
            publishOdometry();

            // 发布里程计、点云、轨迹
            publishFrames();
        }
    }

    void gpsHandler(const nav_msgs::msg::Odometry::SharedPtr gpsMsg) { gps_queue_.push_back(*gpsMsg); }

    void pointAssociateToMap(PointType const* const pi, PointType* const po) {
        po->x = trans_point_associate_to_map_(0, 0) * pi->x + trans_point_associate_to_map_(0, 1) * pi->y +
                trans_point_associate_to_map_(0, 2) * pi->z + trans_point_associate_to_map_(0, 3);
        po->y = trans_point_associate_to_map_(1, 0) * pi->x + trans_point_associate_to_map_(1, 1) * pi->y +
                trans_point_associate_to_map_(1, 2) * pi->z + trans_point_associate_to_map_(1, 3);
        po->z = trans_point_associate_to_map_(2, 0) * pi->x + trans_point_associate_to_map_(2, 1) * pi->y +
                trans_point_associate_to_map_(2, 2) * pi->z + trans_point_associate_to_map_(2, 3);
        po->intensity = pi->intensity;
    }

    pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn,
                                                        PointTypePose*                  transformIn) {
        pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());

        int cloudSize = cloudIn->size();
        cloudOut->resize(cloudSize);

        Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z,
                                                          transformIn->roll, transformIn->pitch, transformIn->yaw);

#pragma omp parallel for num_threads(number_of_cores_)
        for (int i = 0; i < cloudSize; ++i) {
            const auto& pointFrom = cloudIn->points[i];
            cloudOut->points[i].x = transCur(0, 0) * pointFrom.x + transCur(0, 1) * pointFrom.y +
                                    transCur(0, 2) * pointFrom.z + transCur(0, 3);
            cloudOut->points[i].y = transCur(1, 0) * pointFrom.x + transCur(1, 1) * pointFrom.y +
                                    transCur(1, 2) * pointFrom.z + transCur(1, 3);
            cloudOut->points[i].z = transCur(2, 0) * pointFrom.x + transCur(2, 1) * pointFrom.y +
                                    transCur(2, 2) * pointFrom.z + transCur(2, 3);
            cloudOut->points[i].intensity = pointFrom.intensity;
        }
        return cloudOut;
    }

    gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint) {
        return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),
                            gtsam::Point3(double(thisPoint.x), double(thisPoint.y), double(thisPoint.z)));
    }

    gtsam::Pose3 trans2gtsamPose(float transformIn[]) {
        return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]),
                            gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));
    }

    Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint) {
        return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch,
                                      thisPoint.yaw);
    }

    Eigen::Affine3f trans2Affine3f(float transformIn[]) {
        return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1],
                                      transformIn[2]);
    }

    PointTypePose trans2PointTypePose(float transformIn[]) {
        PointTypePose thisPose6D;
        thisPose6D.x     = transformIn[3];
        thisPose6D.y     = transformIn[4];
        thisPose6D.z     = transformIn[5];
        thisPose6D.roll  = transformIn[0];
        thisPose6D.pitch = transformIn[1];
        thisPose6D.yaw   = transformIn[2];
        return thisPose6D;
    }

    void visualizeGlobalMapThread() {
        rclcpp::Rate rate(0.2);
        while (rclcpp::ok()) {
            rate.sleep();
            publishGlobalMap();
        }
        if (save_pcd_ == false)
            return;

        std::string save_map_directory = std::getenv("HOME") + save_pcd_directory_;
        SaveMap(save_map_directory, 0.1);
    }

    void publishGlobalMap() {
        if (p_pub_laser_cloud_surround_->get_subscription_count() == 0)
            return;

        if (p_cloud_key_poses_3d_->points.empty() == true)
            return;

        pcl::KdTreeFLANN<PointType>::Ptr kdtreeGlobalMap(new pcl::KdTreeFLANN<PointType>());
        ;
        pcl::PointCloud<PointType>::Ptr globalMapKeyPoses(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalMapKeyPosesDS(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalMapKeyFrames(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalMapKeyFramesDS(new pcl::PointCloud<PointType>());

        // kd-tree to find near key frames to visualize
        std::vector<int>   pointSearchIndGlobalMap;
        std::vector<float> pointSearchSqDisGlobalMap;
        // search near key frames to visualize
        mtx_.lock();
        kdtreeGlobalMap->setInputCloud(p_cloud_key_poses_3d_);
        kdtreeGlobalMap->radiusSearch(p_cloud_key_poses_3d_->back(), global_map_visualization_search_radius_,
                                      pointSearchIndGlobalMap, pointSearchSqDisGlobalMap, 0);
        mtx_.unlock();

        for (int i = 0; i < (int) pointSearchIndGlobalMap.size(); ++i)
            globalMapKeyPoses->push_back(p_cloud_key_poses_3d_->points[pointSearchIndGlobalMap[i]]);
        // downsample near selected key frames
        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyPoses;  // for global map visualization
        downSizeFilterGlobalMapKeyPoses.setLeafSize(
            global_map_visualization_pose_density_, global_map_visualization_pose_density_,
            global_map_visualization_pose_density_);  // for global map visualization
        downSizeFilterGlobalMapKeyPoses.setInputCloud(globalMapKeyPoses);
        downSizeFilterGlobalMapKeyPoses.filter(*globalMapKeyPosesDS);
        for (auto& pt : globalMapKeyPosesDS->points) {
            kdtreeGlobalMap->nearestKSearch(pt, 1, pointSearchIndGlobalMap, pointSearchSqDisGlobalMap);
            pt.intensity = p_cloud_key_poses_3d_->points[pointSearchIndGlobalMap[0]].intensity;
        }

        // extract visualized and downsampled key frames
        for (int i = 0; i < (int) globalMapKeyPosesDS->size(); ++i) {
            if (pointDistance(globalMapKeyPosesDS->points[i], p_cloud_key_poses_3d_->back()) >
                global_map_visualization_search_radius_)
                continue;
            int thisKeyInd = (int) globalMapKeyPosesDS->points[i].intensity;
            *globalMapKeyFrames += *transformPointCloud(p_corner_cloud_key_frames_vec_[thisKeyInd],
                                                        &p_cloud_key_poses_6d_->points[thisKeyInd]);
            *globalMapKeyFrames += *transformPointCloud(p_surf_cloud_key_frames_vec_[thisKeyInd],
                                                        &p_cloud_key_poses_6d_->points[thisKeyInd]);
        }
        // downsample visualized points
        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyFrames;  // for global map visualization
        downSizeFilterGlobalMapKeyFrames.setLeafSize(
            global_map_visualization_leaf_size_, global_map_visualization_leaf_size_,
            global_map_visualization_leaf_size_);  // for global map visualization
        downSizeFilterGlobalMapKeyFrames.setInputCloud(globalMapKeyFrames);
        downSizeFilterGlobalMapKeyFrames.filter(*globalMapKeyFramesDS);
        publishCloud(p_pub_laser_cloud_surround_, globalMapKeyFramesDS, time_laser_info_stamp_, odometry_frame_);
    }

    /**
     * 闭环线程
     * 1、闭环scan-to-map，icp优化位姿
     *   1) 在历史关键帧中查找与当前关键帧距离最近的关键帧集合，选择时间相隔较远的一帧作为候选闭环帧
     *   2) 提取当前关键帧特征点集合，降采样；提取闭环匹配关键帧前后相邻若干帧的关键帧特征点集合，降采样
     *   3) 执行scan-to-map优化，调用icp方法，得到优化后位姿，构造闭环因子需要的数据，在因子图优化中一并加入更新位姿
     * 2、rviz展示闭环边
     */
    void loopClosureThread() {
        if (loop_closure_enable_flag_ == false)
            return;

        rclcpp::Rate rate(loop_closure_frequency_);
        while (rclcpp::ok()) {
            rate.sleep();
            performLoopClosure();
            visualizeLoopClosure();
        }
    }

    void loopInfoHandler(const std_msgs::msg::Float64MultiArray::SharedPtr loopMsg) {
        std::lock_guard<std::mutex> lock(mtx_loop_info_);
        if (loopMsg->data.size() != 2)
            return;

        loop_info_vec_.push_back(*loopMsg);

        while (loop_info_vec_.size() > 5) loop_info_vec_.pop_front();
    }

    /**
     * 回环检测函数
     * 1. 关键帧队列为空，直接返回
     * 2. 加锁拷贝关键帧3D和6D位姿，避免多线程干扰
     * 3.
     * 将最后一帧关键帧作为当前帧，如果当前帧已经在回环对应关系中，则返回（已经处理过这一帧了）。回环关系用一个全局map缓存
     * 4. 对关键帧3D位姿构建kd树，并用当前帧位置从kd树寻找距离最近的几帧，挑选时间间隔最远的那一帧作为匹配帧
     * 5. 将当前帧转到Map坐标系并降采样
     * 6. 对匹配帧前后几帧转换到Map坐标系下，融合并降采样，构建局部地图
     * 7. 调用ICP降当前帧匹配到局部地图，得到当前帧位姿的偏差，将偏差应用到当前帧的位姿，得到修正后的当前帧位姿。
     * 8.
     * 根据修正后的当前帧位姿和匹配帧的位姿，计算帧间相对位姿，这个位姿被用来作为回环因子。同时，将ICP的匹配分数当作因子的噪声模型
     * 9. 将回环索引、回环间相对位姿、回环噪声模型加入全局变量
     */
    void performLoopClosure() {
        // 关键帧队列为空，直接返回
        if (p_cloud_key_poses_3d_->points.empty() == true)
            return;

        mtx_.lock();
        *p_copy_cloud_key_poses_3d_ = *p_cloud_key_poses_3d_;
        *p_copy_cloud_key_poses_6d_ = *p_cloud_key_poses_6d_;
        mtx_.unlock();

        // find keys
        // 将最后一帧关键帧作为当前帧，如果当前帧已经在回环对应关系中，则返回（已经处理过这一帧了）。如果找到的回环对应帧相差时间过短也返回false。回环关系用一个全局map缓存
        // 对关键帧3D位姿构建kd树，并用当前帧位置从kd树寻找距离最近的几帧，挑选时间间隔最远的那一帧作为匹配帧
        int loopKeyCur;
        int loopKeyPre;
        if (detectLoopClosureExternal(&loopKeyCur, &loopKeyPre) == false)
            // 在历史关键帧中查找与当前关键帧距离最近的关键帧集合，选择时间相隔较远的一帧作为候选闭环帧
            if (detectLoopClosureDistance(&loopKeyCur, &loopKeyPre) == false)
                return;

        // extract cloud
        pcl::PointCloud<PointType>::Ptr cureKeyframeCloud(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr prevKeyframeCloud(new pcl::PointCloud<PointType>());
        {
            // 将当前帧转到Map坐标系并降采样，注意这里第三个参数是0, 也就是不加上前后其他帧
            loopFindNearKeyframes(cureKeyframeCloud, loopKeyCur, 0);
            // 对匹配帧前后几帧转换到Map坐标系下，融合并降采样，构建局部地图
            loopFindNearKeyframes(prevKeyframeCloud, loopKeyPre, history_key_frame_search_num_);
            if (cureKeyframeCloud->size() < 300 || prevKeyframeCloud->size() < 1000)
                return;
            if (p_pub_history_key_frames_->get_subscription_count() != 0)
                publishCloud(p_pub_history_key_frames_, prevKeyframeCloud, time_laser_info_stamp_, odometry_frame_);
        }

        // ICP Settings
        // 调用ICP降当前帧匹配到局部地图，得到当前帧位姿的偏差，将偏差应用到当前帧的位姿，得到修正后的当前帧位姿。
        static pcl::IterativeClosestPoint<PointType, PointType> icp;
        icp.setMaxCorrespondenceDistance(history_key_frame_search_radius_ * 2);
        icp.setMaximumIterations(100);
        icp.setTransformationEpsilon(1e-6);
        icp.setEuclideanFitnessEpsilon(1e-6);
        icp.setRANSACIterations(0);

        // Align clouds
        // scan-to-map，调用icp匹配
        icp.setInputSource(cureKeyframeCloud);
        icp.setInputTarget(prevKeyframeCloud);
        pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());
        icp.align(*unused_result);

        // 未收敛，或者匹配不够好
        if (icp.hasConverged() == false || icp.getFitnessScore() > history_key_frame_fitness_score_)
            return;

        // publish corrected cloud
        // 根据修正后的当前帧位姿和匹配帧的位姿，计算帧间相对位姿，这个位姿被用来作为回环因子。同时，将ICP的匹配分数当作因子的噪声模型
        if (p_pub_icp_key_frame_->get_subscription_count() != 0) {
            pcl::PointCloud<PointType>::Ptr closed_cloud(new pcl::PointCloud<PointType>());
            pcl::transformPointCloud(*cureKeyframeCloud, *closed_cloud, icp.getFinalTransformation());
            publishCloud(p_pub_icp_key_frame_, closed_cloud, time_laser_info_stamp_, odometry_frame_);
        }

        // Get pose transformation
        // 闭环优化得到的当前关键帧与闭环关键帧之间的位姿变换
        float           x, y, z, roll, pitch, yaw;
        Eigen::Affine3f correctionLidarFrame;
        correctionLidarFrame = icp.getFinalTransformation();
        // transform from world origin to wrong pose
        // 闭环优化前当前帧位姿
        Eigen::Affine3f tWrong = pclPointToAffine3f(p_copy_cloud_key_poses_6d_->points[loopKeyCur]);
        // transform from world origin to corrected pose
        // 闭环优化后当前帧位姿
        Eigen::Affine3f tCorrect =
            correctionLidarFrame * tWrong;  // pre-multiplying -> successive rotation about a fixed frame
        pcl::getTranslationAndEulerAngles(tCorrect, x, y, z, roll, pitch, yaw);
        gtsam::Pose3 poseFrom = Pose3(Rot3::RzRyRx(roll, pitch, yaw), Point3(x, y, z));
        // 闭环匹配帧的位姿
        gtsam::Pose3  poseTo = pclPointTogtsamPose3(p_copy_cloud_key_poses_6d_->points[loopKeyPre]);
        gtsam::Vector Vector6(6);
        float         noiseScore = icp.getFitnessScore();
        Vector6 << noiseScore, noiseScore, noiseScore, noiseScore, noiseScore, noiseScore;
        noiseModel::Diagonal::shared_ptr constraintNoise = noiseModel::Diagonal::Variances(Vector6);

        // Add pose constraint
        // 添加闭环因子需要的数据
        // 将回环索引、回环间相对位姿、回环噪声模型加入全局变量
        mtx_.lock();
        loop_index_queue_.push_back(make_pair(loopKeyCur, loopKeyPre));
        loop_pose_queue_.push_back(poseFrom.between(poseTo));
        loop_noise_queue_.push_back(constraintNoise);
        mtx_.unlock();

        // add loop constriant
        loop_index_container_[loopKeyCur] = loopKeyPre;
    }

    /**
     * @brief 根据位置关系寻找当前帧与对应帧的索引
     * 1.
     * 将最后一帧关键帧作为当前帧，如果当前帧已经在回环对应关系中，则返回（已经处理过这一帧了）。如果找到的回环对应帧相差时间过短也返回false。回环关系用一个全局map缓存
     * 2. 对关键帧3D位姿构建kd树，并用当前帧位置从kd树寻找距离最近的几帧，挑选时间间隔最远的那一帧作为匹配帧
     *
     * @param latestID 传出参数，找到的当前帧索引，实际就是用最后一帧关键帧
     * @param closestID 传出参数，找到的当前帧对应的匹配帧
     */
    bool detectLoopClosureDistance(int* latestID, int* closestID) {
        int loopKeyCur = p_copy_cloud_key_poses_3d_->size() - 1;
        int loopKeyPre = -1;

        // check loop constraint added before
        // 确认最后一帧关键帧没有被加入过回环关系中
        auto it = loop_index_container_.find(loopKeyCur);
        if (it != loop_index_container_.end())
            return false;

        // find the closest history key frame
        // 将关键帧的3D位置构建kdtree，并检索空间位置相近的关键帧
        std::vector<int>   pointSearchIndLoop;
        std::vector<float> pointSearchSqDisLoop;
        p_kdtree_history_key_poses_->setInputCloud(p_copy_cloud_key_poses_3d_);
        p_kdtree_history_key_poses_->radiusSearch(p_copy_cloud_key_poses_3d_->back(), history_key_frame_search_radius_,
                                                  pointSearchIndLoop, pointSearchSqDisLoop, 0);

        // 确保空间距离相近的帧是较久前采集的，排除是前面几个关键帧
        for (int i = 0; i < (int) pointSearchIndLoop.size(); ++i) {
            int id = pointSearchIndLoop[i];
            if (abs(p_copy_cloud_key_poses_6d_->points[id].time - time_laser_info_cur_) >
                history_key_frame_search_time_off_) {
                loopKeyPre = id;
                break;
            }
        }

        // 如果没有找到位置关系、时间关系都符合要求的关键帧，则返回false
        if (loopKeyPre == -1 || loopKeyCur == loopKeyPre)
            return false;

        *latestID  = loopKeyCur;
        *closestID = loopKeyPre;

        return true;
    }

    bool detectLoopClosureExternal(int* latestID, int* closestID) {
        // this function is not used yet, please ignore it
        int loopKeyCur = -1;
        int loopKeyPre = -1;

        std::lock_guard<std::mutex> lock(mtx_loop_info_);
        if (loop_info_vec_.empty())
            return false;

        double loopTimeCur = loop_info_vec_.front().data[0];
        double loopTimePre = loop_info_vec_.front().data[1];
        loop_info_vec_.pop_front();

        if (abs(loopTimeCur - loopTimePre) < history_key_frame_search_time_off_)
            return false;

        int cloudSize = p_copy_cloud_key_poses_6d_->size();
        if (cloudSize < 2)
            return false;

        // latest key
        loopKeyCur = cloudSize - 1;
        for (int i = cloudSize - 1; i >= 0; --i) {
            if (p_copy_cloud_key_poses_6d_->points[i].time >= loopTimeCur)
                loopKeyCur = round(p_copy_cloud_key_poses_6d_->points[i].intensity);
            else
                break;
        }

        // previous key
        loopKeyPre = 0;
        for (int i = 0; i < cloudSize; ++i) {
            if (p_copy_cloud_key_poses_6d_->points[i].time <= loopTimePre)
                loopKeyPre = round(p_copy_cloud_key_poses_6d_->points[i].intensity);
            else
                break;
        }

        if (loopKeyCur == loopKeyPre)
            return false;

        auto it = loop_index_container_.find(loopKeyCur);
        if (it != loop_index_container_.end())
            return false;

        *latestID  = loopKeyCur;
        *closestID = loopKeyPre;

        return true;
    }

    /**
     * @brief 根据当前帧索引key，从前后多帧（searchNum）构建局部地图
     * @param nearKeyframes 传出参数，构建出的局部地图
     * @param key 当前帧的索引
     * @param searchNum 从当前帧的前后各searchNum个关键帧构建局部点云地图
     */
    void loopFindNearKeyframes(pcl::PointCloud<PointType>::Ptr& nearKeyframes, const int& key, const int& searchNum) {
        // extract near keyframes
        nearKeyframes->clear();
        int cloudSize = p_copy_cloud_key_poses_6d_->size();
        for (int i = -searchNum; i <= searchNum; ++i) {
            int keyNear = key + i;
            if (keyNear < 0 || keyNear >= cloudSize)
                continue;
            *nearKeyframes += *transformPointCloud(p_corner_cloud_key_frames_vec_[keyNear],
                                                   &p_copy_cloud_key_poses_6d_->points[keyNear]);
            *nearKeyframes += *transformPointCloud(p_surf_cloud_key_frames_vec_[keyNear],
                                                   &p_copy_cloud_key_poses_6d_->points[keyNear]);
        }

        if (nearKeyframes->empty())
            return;

        // downsample near keyframes
        pcl::PointCloud<PointType>::Ptr cloud_temp(new pcl::PointCloud<PointType>());
        down_size_filter_icp_.setInputCloud(nearKeyframes);
        down_size_filter_icp_.filter(*cloud_temp);
        *nearKeyframes = *cloud_temp;
    }

    /**
     * 可视化回环关系，主要是根据回环关系的构建Rivz可以直接显示的MarkerArray
     */
    void visualizeLoopClosure() {
        if (loop_index_container_.empty())
            return;

        visualization_msgs::msg::MarkerArray markerArray;
        // loop nodes
        visualization_msgs::msg::Marker markerNode;
        markerNode.header.frame_id    = odometry_frame_;
        markerNode.header.stamp       = time_laser_info_stamp_;
        markerNode.action             = visualization_msgs::msg::Marker::ADD;
        markerNode.type               = visualization_msgs::msg::Marker::SPHERE_LIST;
        markerNode.ns                 = "loop_nodes";
        markerNode.id                 = 0;
        markerNode.pose.orientation.w = 1;
        markerNode.scale.x            = 0.3;
        markerNode.scale.y            = 0.3;
        markerNode.scale.z            = 0.3;
        markerNode.color.r            = 0;
        markerNode.color.g            = 0.8;
        markerNode.color.b            = 1;
        markerNode.color.a            = 1;
        // loop edges
        visualization_msgs::msg::Marker markerEdge;
        markerEdge.header.frame_id    = odometry_frame_;
        markerEdge.header.stamp       = time_laser_info_stamp_;
        markerEdge.action             = visualization_msgs::msg::Marker::ADD;
        markerEdge.type               = visualization_msgs::msg::Marker::LINE_LIST;
        markerEdge.ns                 = "loop_edges";
        markerEdge.id                 = 1;
        markerEdge.pose.orientation.w = 1;
        markerEdge.scale.x            = 0.1;
        markerEdge.color.r            = 0.9;
        markerEdge.color.g            = 0.9;
        markerEdge.color.b            = 0;
        markerEdge.color.a            = 1;

        for (auto it = loop_index_container_.begin(); it != loop_index_container_.end(); ++it) {
            int                       key_cur = it->first;
            int                       key_pre = it->second;
            geometry_msgs::msg::Point p;
            p.x = p_copy_cloud_key_poses_6d_->points[key_cur].x;
            p.y = p_copy_cloud_key_poses_6d_->points[key_cur].y;
            p.z = p_copy_cloud_key_poses_6d_->points[key_cur].z;
            markerNode.points.push_back(p);
            markerEdge.points.push_back(p);
            p.x = p_copy_cloud_key_poses_6d_->points[key_pre].x;
            p.y = p_copy_cloud_key_poses_6d_->points[key_pre].y;
            p.z = p_copy_cloud_key_poses_6d_->points[key_pre].z;
            markerNode.points.push_back(p);
            markerEdge.points.push_back(p);
        }

        markerArray.markers.push_back(markerNode);
        markerArray.markers.push_back(markerEdge);
        p_pub_loop_constraint_edge_->publish(markerArray);
    }

    /**
     * 当前帧位姿初始化
     * 1、如果是第一帧，用原始imu数据的RPY初始化当前帧位姿（旋转部分）
     * 2、后续帧，用imu里程计计算两帧之间的增量位姿变换，作用于前一帧的激光位姿，得到当前帧激光位姿
     */
    void updateInitialGuess() {
        // save current transformation before any processing
        // 前一帧的位姿，注：这里指lidar的位姿，后面都简写成位姿
        incremental_odometry_affine_front_ = trans2Affine3f(tramsform_tobe_mapped_);

        // 前一帧的初始化姿态角（来自原始imu数据），用于估计第一帧的位姿（旋转部分）
        static Eigen::Affine3f lastImuTransformation = Eigen::Affine3f::Identity();
        // initialization RPY
        if (p_cloud_key_poses_3d_->points.empty()) {
            tramsform_tobe_mapped_[0] = cloud_info_.imu_roll_init;
            tramsform_tobe_mapped_[1] = cloud_info_.imu_pitch_init;
            tramsform_tobe_mapped_[2] = cloud_info_.imu_yaw_init;

            if (!use_imu_heading_initialization_)
                tramsform_tobe_mapped_[2] = 0;

            lastImuTransformation =
                pcl::getTransformation(0, 0, 0, cloud_info_.imu_roll_init, cloud_info_.imu_pitch_init,
                                       cloud_info_.imu_yaw_init);  // save imu before return;
            return;
        }

        // use imu pre-integration estimation for pose guess
        // 用当前帧和前一帧对应的imu里程计计算相对位姿变换，再用前一帧的位姿与相对变换，计算当前帧的位姿，存transformTobeMapped
        static bool            lastImuPreTransAvailable = false;
        static Eigen::Affine3f lastImuPreTransformation;
        // odomAvailable和imuAvailable均来源于imageProjection.cpp中赋值，
        // imuAvailable是遍历激光帧前后起止时刻0.01s之内的imu数据，
        // 如果都没有那就是false，因为imu频率一般比激光帧快，因此这里应该是都有的。
        // odomAvailable同理，是监听imu里程计的位姿，如果没有紧挨着激光帧的imu里程计数据，那么就是false；
        // 这俩应该一般都有
        if (cloud_info_.odom_available == true) {
            // cloudInfo来自featureExtraction.cpp发布的lio_sam/feature/cloud_info,
            // 而其中的initialGuessX等信息本质上来源于ImageProjection.cpp发布的deskew/cloud_info信息，
            // 而deskew/cloud_info中的initialGuessX则来源于ImageProjection.cpp中的回调函数odometryHandler，
            // odometryHandler订阅的是imuPreintegration.cpp发布的odometry/imu_incremental话题，
            // 该话题发布的xyz是imu在前一帧雷达基础上的增量位姿
            // 纠正一个观点：增量位姿，指的绝不是预积分位姿！！是在前一帧雷达的基础上(包括该基础!!)的（基础不是0）的位姿
            // 当前帧的初始估计位姿（来自imu里程计），后面用来计算增量位姿变换
            Eigen::Affine3f transBack = pcl::getTransformation(
                cloud_info_.initial_guess_x, cloud_info_.initial_guess_y, cloud_info_.initial_guess_z,
                cloud_info_.initial_guess_roll, cloud_info_.initial_guess_pitch, cloud_info_.initial_guess_yaw);
            if (lastImuPreTransAvailable == false) {
                // 赋值给前一帧
                // lastImuPreTransAvailable是一个静态变量，初始被设置为false,之后就变成了true
                // 也就是说这段只调用一次，就是初始时，把imu位姿赋值给lastImuPreTransformation
                lastImuPreTransformation = transBack;
                lastImuPreTransAvailable = true;
            } else {
                // 当前帧相对于前一帧的位姿变换，imu里程计计算得到
                // lastImuPreTransformation就是上一帧激光时刻的imu位姿,transBack是这一帧时刻的imu位姿
                // 求完逆相乘以后才是增量，绝不可把imu_incremental发布的当成是两激光间的增量
                Eigen::Affine3f transIncre = lastImuPreTransformation.inverse() * transBack;
                Eigen::Affine3f transTobe  = trans2Affine3f(tramsform_tobe_mapped_);
                Eigen::Affine3f transFinal = transTobe * transIncre;
                pcl::getTranslationAndEulerAngles(transFinal, tramsform_tobe_mapped_[3], tramsform_tobe_mapped_[4],
                                                  tramsform_tobe_mapped_[5], tramsform_tobe_mapped_[0],
                                                  tramsform_tobe_mapped_[1], tramsform_tobe_mapped_[2]);

                lastImuPreTransformation = transBack;

                lastImuTransformation =
                    pcl::getTransformation(0, 0, 0, cloud_info_.imu_roll_init, cloud_info_.imu_pitch_init,
                                           cloud_info_.imu_yaw_init);  // save imu before return;
                return;
            }
        }

        // use imu incremental estimation for pose guess (only rotation)
        if (cloud_info_.imu_available == true) {
            Eigen::Affine3f transBack  = pcl::getTransformation(0, 0, 0, cloud_info_.imu_roll_init,
                                                                cloud_info_.imu_pitch_init, cloud_info_.imu_yaw_init);
            Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;

            Eigen::Affine3f transTobe  = trans2Affine3f(tramsform_tobe_mapped_);
            Eigen::Affine3f transFinal = transTobe * transIncre;
            pcl::getTranslationAndEulerAngles(transFinal, tramsform_tobe_mapped_[3], tramsform_tobe_mapped_[4],
                                              tramsform_tobe_mapped_[5], tramsform_tobe_mapped_[0],
                                              tramsform_tobe_mapped_[1], tramsform_tobe_mapped_[2]);

            lastImuTransformation =
                pcl::getTransformation(0, 0, 0, cloud_info_.imu_roll_init, cloud_info_.imu_pitch_init,
                                       cloud_info_.imu_yaw_init);  // save imu before return;
            return;
        }
    }

    void extractForLoopClosure() {
        pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());
        int                             numPoses = p_cloud_key_poses_3d_->size();
        for (int i = numPoses - 1; i >= 0; --i) {
            if ((int) cloudToExtract->size() <= surrounding_key_frame_size_)
                cloudToExtract->push_back(p_cloud_key_poses_3d_->points[i]);
            else
                break;
        }

        extractCloud(cloudToExtract);
    }

    /**
     * 构建局部地图
     * 1. 对所有关键帧3D位姿构建KD树
     * 2. 使用最后一个关键帧位姿作为索引，从KD树中找到指定半径范围内的其他关键帧
     * 3. 对找出的关键帧数量做降采样，避免关键帧位姿太过靠近
     * 4. 加上时间上相邻的关键帧
     * 5. 对所有挑选出的关键帧数量再做一次降采样，避免位置过近
     * 6. 将挑选出的关键帧点云转换到odom坐标系。（这里使用一个map缓存坐标变换后的点云，避免重复计算）
     * 7. 对局部地图的角点、平面点点云做降采样
     */
    void extractNearby() {
        pcl::PointCloud<PointType>::Ptr surroundingKeyPoses(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr surroundingKeyPosesDS(new pcl::PointCloud<PointType>());
        std::vector<int>                pointSearchInd;
        std::vector<float>              pointSearchSqDis;

        // extract all the nearby key poses and downsample them
        // kdtree的输入，全局关键帧位姿集合（历史所有关键帧集合）
        p_kdtree_surrounding_key_poses_->setInputCloud(p_cloud_key_poses_3d_);  // create kd-tree
        // 创建Kd树然后搜索  半径在配置文件中
        // 指定半径范围查找近邻
        // 球状固定距离半径近邻搜索
        // surroundingKeyframeSearchRadius是搜索半径，pointSearchInd应该是返回的index，pointSearchSqDis应该是依次距离中心点的距离
        PointType cur_pose_point;
        cur_pose_point.x = tramsform_tobe_mapped_[3];
        cur_pose_point.y = tramsform_tobe_mapped_[4];
        cur_pose_point.z = tramsform_tobe_mapped_[5];
        p_kdtree_surrounding_key_poses_->radiusSearch(cur_pose_point, (double) surrounding_key_frame_search_radius_,
                                                      pointSearchInd, pointSearchSqDis);
        for (int i = 0; i < (int) pointSearchInd.size(); ++i) {
            int id = pointSearchInd[i];
            // 保存附近关键帧,加入相邻关键帧位姿集合中
            surroundingKeyPoses->push_back(p_cloud_key_poses_3d_->points[id]);
        }

        // 降采样
        // 把相邻关键帧位姿集合，进行下采样，滤波后存入surroundingKeyPosesDS
        down_size_filter_surrounding_key_poses_.setInputCloud(surroundingKeyPoses);
        down_size_filter_surrounding_key_poses_.filter(*surroundingKeyPosesDS);
        // intensity实际上就是关键帧索引，因为降采样之后，索引发生变换，有可能可能变成了浮点数
        // 索引一定是整数才行，因此需要找到最近关键帧的索引作为代替
        for (auto& pt : surroundingKeyPosesDS->points) {
            p_kdtree_surrounding_key_poses_->nearestKSearch(pt, 1, pointSearchInd, pointSearchSqDis);
            pt.intensity = p_cloud_key_poses_3d_->points[pointSearchInd[0]].intensity;
        }

        // also extract some latest key frames in case the robot rotates in one
        // position
        // 提取了一些最新的关键帧，以防机器人在一个位置原地旋转
        int numPoses = p_cloud_key_poses_3d_->size();
        // 把10s内的关键帧也加到surroundingKeyPosesDS中,注意是“也”，原先已经装了下采样的位姿(位置)
        for (int i = numPoses - 1; i >= 0; --i) {
            if (time_laser_info_cur_ - p_cloud_key_poses_6d_->points[i].time < 10.0)
                surroundingKeyPosesDS->push_back(p_cloud_key_poses_3d_->points[i]);
            else
                break;
        }

        // 对降采样后的点云进行提取出边缘点和平面点对应的localmap
        extractCloud(surroundingKeyPosesDS);
    }

    /**
     * 将相邻关键帧集合对应的角点、平面点，加入到局部map中，作为scan-to-map匹配的局部点云地图
     */
    void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract) {
        // fuse the map
        p_laser_cloud_corner_from_map_->clear();
        p_laser_cloud_surf_from_map_->clear();
        // 遍历当前帧（实际是取最近的一个关键帧来找它相邻的关键帧集合）时空维度上相邻的关键帧集合
        SimpleTime timer;
        for (int i = 0; i < (int) cloudToExtract->size(); ++i) {
            // 距离超过阈值，丢弃
            if (pointDistance(cloudToExtract->points[i], p_cloud_key_poses_3d_->back()) >
                surrounding_key_frame_search_radius_)
                continue;

            int thisKeyInd = (int) cloudToExtract->points[i].intensity;  // 相邻关键帧索引
            if (laser_cloud_map_container_.find(thisKeyInd) != laser_cloud_map_container_.end()) {
                // transformed cloud available
                *p_laser_cloud_corner_from_map_ += laser_cloud_map_container_[thisKeyInd].first;
                *p_laser_cloud_surf_from_map_ += laser_cloud_map_container_[thisKeyInd].second;
            } else {
                // transformed cloud not available
                // 相邻关键帧对应的角点、平面点云，通过6D位姿变换到世界坐标系下
                // transformPointCloud输入的两个形参，分别为点云和变换，返回变换位姿后的点
                pcl::PointCloud<PointType> laserCloudCornerTemp = *transformPointCloud(
                    p_corner_cloud_key_frames_vec_[thisKeyInd], &p_cloud_key_poses_6d_->points[thisKeyInd]);
                pcl::PointCloud<PointType> laserCloudSurfTemp = *transformPointCloud(
                    p_surf_cloud_key_frames_vec_[thisKeyInd], &p_cloud_key_poses_6d_->points[thisKeyInd]);
                // 加入局部map
                *p_laser_cloud_corner_from_map_ += laserCloudCornerTemp;
                *p_laser_cloud_surf_from_map_ += laserCloudSurfTemp;
                laser_cloud_map_container_[thisKeyInd] = make_pair(laserCloudCornerTemp, laserCloudSurfTemp);
            }
        }
        // Downsample the surrounding corner key frames (or map)
        // 降采样局部角点map
        down_size_filter_corner_.setInputCloud(p_laser_cloud_corner_from_map_);
        down_size_filter_corner_.filter(*p_laser_cloud_corner_from_map_ds_);
        laser_cloud_corner_from_map_ds_num_ = p_laser_cloud_corner_from_map_ds_->size();
        // Downsample the surrounding surf key frames (or map)
        // 降采样局部平面点map
        down_size_filter_surf_.setInputCloud(p_laser_cloud_surf_from_map_);
        down_size_filter_surf_.filter(*p_laser_cloud_surf_from_map_ds_);
        laser_cloud_surf_from_map_ds_num_ = p_laser_cloud_surf_from_map_ds_->size();

        // clear map cache if too large
        // 太大了，清空一下内存
        if (laser_cloud_map_container_.size() > 1000)
            laser_cloud_map_container_.clear();
    }

    void extractSurroundingKeyFrames() {
        if (p_cloud_key_poses_3d_->points.empty() == true)
            return;

        // if (loop_closure_enable_flag_ == true)
        // {
        //     extractForLoopClosure();
        // } else {
        //     extractNearby();
        // }

        extractNearby();
    }

    void downsampleCurrentScan() {
        // Downsample cloud from current scan
        p_laser_cloud_corner_last_ds_->clear();
        down_size_filter_corner_.setInputCloud(p_laser_cloud_corner_last_);
        down_size_filter_corner_.filter(*p_laser_cloud_corner_last_ds_);
        laser_cloud_corner_last_ds_num_ = p_laser_cloud_corner_last_ds_->size();

        p_laser_cloud_surf_laset_ds_->clear();
        down_size_filter_surf_.setInputCloud(p_laser_cloud_surf_last_);
        down_size_filter_surf_.filter(*p_laser_cloud_surf_laset_ds_);
        laser_cloud_surf_laset_ds_num_ = p_laser_cloud_surf_laset_ds_->size();
    }

    void updatePointAssociateToMap() { trans_point_associate_to_map_ = trans2Affine3f(tramsform_tobe_mapped_); }

    void cornerOptimization() {
        // 将transformTobeMapped存储到transPointAssociateToMap转换矩阵，
        // 方便后面用旋转平移关系对选中的点转换到地图坐标系
        updatePointAssociateToMap();

#pragma omp parallel for num_threads(number_of_cores_)  // omp指令集进行并行计算
        for (int i = 0; i < laser_cloud_corner_last_ds_num_; i++) {
            // pointOri是雷达坐标系下的边缘点；pointSel是转换到地图坐标系下的点
            // coeff存储的是经过距离加权后的点到平面向量
            PointType          pointOri, pointSel, coeff;
            std::vector<int>   pointSearchInd;
            std::vector<float> pointSearchSqDis;

            pointOri = p_laser_cloud_corner_last_ds_->points[i];
            pointAssociateToMap(&pointOri, &pointSel);  // 将雷达坐标系下的点pointOri转换到地图坐标系pointSel
            // 从局部地图（已经提前设置好kdtree）中找到最近的5个点
            // pointSel为检索点
            // pointSearchInd存储检索结果的5个点在原始点云中的索引
            // pointSearchSqDis存储检索出的5个点与检索点的距离的平方
            p_kdtree_corner_from_map_->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);

            cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));  // matA1是检索出的5个点的协方差矩阵
            cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));  // matD1是协方差矩阵的特征值
            cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));  // matV1是协方差矩阵的特征向量

            // 如果找到的第5个点（距离最大）的点也小与1米，认为检索结果有效，否则跳过当前的pointOri
            if (pointSearchSqDis[4] < 1.0) {
                // cx,cy,cz是检索出的5个点的中心坐标
                float cx = 0, cy = 0, cz = 0;
                for (int j = 0; j < 5; j++) {
                    cx += p_laser_cloud_corner_from_map_ds_->points[pointSearchInd[j]].x;
                    cy += p_laser_cloud_corner_from_map_ds_->points[pointSearchInd[j]].y;
                    cz += p_laser_cloud_corner_from_map_ds_->points[pointSearchInd[j]].z;
                }
                cx /= 5;
                cy /= 5;
                cz /= 5;
                // 下面求矩阵matA1=[ax,ay,az]^t*[ax,ay,az]
                // 更准确地说应该是在求协方差matA1
                float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;
                for (int j = 0; j < 5; j++) {
                    float ax = p_laser_cloud_corner_from_map_ds_->points[pointSearchInd[j]].x - cx;
                    float ay = p_laser_cloud_corner_from_map_ds_->points[pointSearchInd[j]].y - cy;
                    float az = p_laser_cloud_corner_from_map_ds_->points[pointSearchInd[j]].z - cz;

                    a11 += ax * ax;
                    a12 += ax * ay;
                    a13 += ax * az;
                    a22 += ay * ay;
                    a23 += ay * az;
                    a33 += az * az;
                }
                a11 /= 5;
                a12 /= 5;
                a13 /= 5;
                a22 /= 5;
                a23 /= 5;
                a33 /= 5;

                matA1.at<float>(0, 0) = a11;
                matA1.at<float>(0, 1) = a12;
                matA1.at<float>(0, 2) = a13;
                matA1.at<float>(1, 0) = a12;
                matA1.at<float>(1, 1) = a22;
                matA1.at<float>(1, 2) = a23;
                matA1.at<float>(2, 0) = a13;
                matA1.at<float>(2, 1) = a23;
                matA1.at<float>(2, 2) = a33;

                // 对协方差矩阵做特征值分解，最大特征值对应的特征向量是这5个点的主方向
                cv::eigen(matA1, matD1, matV1);

                // 如果最大的特征值要远大于第二个特征值，则认为则5个点能够构成一条直线
                // 类似PCA主成分分析的原理，数据协方差的最大特征值对应的特征向量为主方向
                // 边缘：与较大特征值相对应的特征向量代表边缘线的方向（一大两小，大方向）
                // 以下这一大块是在计算点到边缘的距离，最后通过系数s来判断是否距离很近
                // 如果距离很近就认为这个点在边缘上，需要放到laserCloudOri中
                // 如果最大的特征值相比次大特征值，大很多，认为构成了线，角点是合格的
                if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1)) {
                    // 以下部分是在计算当前点pointSel到检索出的直线的距离和方向，如果距离够近，则认为匹配成功，否则认为匹配失败
                    // x0,y0,z0是直线外一点
                    float x0 = pointSel.x;
                    float y0 = pointSel.y;
                    float z0 = pointSel.z;
                    // matV1的第一行就是5个点形成的直线的方向，cx,cy,cz是5个点的中心点
                    // 因此，x1,y1,z1和x2,y2,z2是经过中心点的直线上的另外两个点，两点之间的距离是0.2米
                    float x1 = cx + 0.1 * matV1.at<float>(0, 0);
                    float y1 = cy + 0.1 * matV1.at<float>(0, 1);
                    float z1 = cz + 0.1 * matV1.at<float>(0, 2);
                    float x2 = cx - 0.1 * matV1.at<float>(0, 0);
                    float y2 = cy - 0.1 * matV1.at<float>(0, 1);
                    float z2 = cz - 0.1 * matV1.at<float>(0, 2);

                    // 这边是在求[(x0-x1),(y0-y1),(z0-z1)]与[(x0-x2),(y0-y2),(z0-z2)]叉乘得到的向量的模长
                    // 这个模长是由0.2*V1[0]和点[x0,y0,z0]构成的平行四边形的面积
                    // 因为[(x0-x1),(y0-y1),(z0-z1)]x[(x0-x2),(y0-y2),(z0-z2)]=[XXX,YYY,ZZZ],
                    // [XXX,YYY,ZZZ]=[(y0-y1)(z0-z2)-(y0-y2)(z0-z1),-(x0-x1)(z0-z2)+(x0-x2)(z0-z1),(x0-x1)(y0-y2)-(x0-x2)(y0-y1)]
                    // area_012，也就是三个点组成的三角形面积*2，叉积的模|axb|=a*b*sin(theta)
                    float a012 = sqrt(((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) *
                                          ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) +
                                      ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) *
                                          ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) +
                                      ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1)) *
                                          ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1)));

                    // l12表示的是0.2*(||V1[0]||),点x1,y1,z1到点x2,y2,z2的距离,底边边长
                    float l12 = sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2));

                    // 两次叉积，得到点到直线的垂线段单位向量，x分量，下面同理
                    // 求叉乘结果[la',lb',lc']=[(x1-x2),(y1-y2),(z1-z2)]x[XXX,YYY,ZZZ]
                    // [la,lb,lc]=[la',lb',lc']/a012/l12
                    // 得到底边上的高的方向向量[la,lb,lc]
                    // LLL=[la,lb,lc]是V1[0]这条高上的单位法向量。||LLL||=1；
                    float la = ((y1 - y2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) +
                                (z1 - z2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))) /
                               a012 / l12;

                    float lb = -((x1 - x2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) -
                                 (z1 - z2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) /
                               a012 / l12;

                    float lc = -((x1 - x2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) +
                                 (y1 - y2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) /
                               a012 / l12;

                    // 三角形的高，也就是点到直线距离
                    // 计算点pointSel到直线的距离
                    // 这里需要特别说明的是ld2代表的是点pointSel到过点[cx,cy,cz]的方向向量直线的距离
                    float ld2 = a012 / l12;

                    // 下面涉及到一个鲁棒核函数，作者简单地设计了这个核函数。
                    // 距离越大，s越小，是个距离惩罚因子（权重）
                    float s = 1 - 0.9 * fabs(ld2);

                    // 使用系数对法向量加权，实际上相当于对导数（雅克比矩阵加权了）
                    // coeff代表系数的意思
                    // coff用于保存距离的方向向量
                    coeff.x = s * la;
                    coeff.y = s * lb;
                    coeff.z = s * lc;
                    // intensity本质上构成了一个核函数，ld2越接近于1，增长越慢
                    // intensity=(1-0.9*ld2)*ld2=ld2-0.9*ld2*ld2
                    coeff.intensity = s * ld2;

                    // 经验阈值，判断点到直线的距离是否够近，足够近才采纳为优化目标点
                    // 程序末尾根据s的值来判断是否将点云点放入点云集合laserCloudOri以及coeffSel中。
                    // 所以就应该认为这个点是边缘点
                    // s>0.1 也就是要求点到直线的距离ld2要小于1m
                    // s越大说明ld2越小(离边缘线越近)，这样就说明点pointOri在直线上
                    if (s > 0.1) {
                        laser_cloud_ori_corner_vec_[i]  = pointOri;
                        coeff_sel_corner_vec_[i]        = coeff;
                        laser_cloud_ori_corner_flag_[i] = true;
                    }
                }
            }
        }
    }

    /**
     * 当前激光帧平面点寻找局部map匹配点
     * 1、更新当前帧位姿，将当前帧平面点坐标变换到map系下，在局部map中查找5个最近点，距离小于1m，且5个点构成平面（最小二乘拟合平面），则认为匹配上了
     * 2、计算当前帧平面点到平面的距离、垂线的单位向量，存储为平面点参数
     */
    void surfOptimization() {
        updatePointAssociateToMap();

#pragma omp parallel for num_threads(number_of_cores_)
        // 遍历当前帧平面点集合
        for (int i = 0; i < laser_cloud_surf_laset_ds_num_; i++) {
            PointType          pointOri, pointSel, coeff;
            std::vector<int>   pointSearchInd;
            std::vector<float> pointSearchSqDis;

            // 寻找5个紧邻点, 计算其特征值和特征向量
            // 平面点（坐标还是lidar系）
            pointOri = p_laser_cloud_surf_laset_ds_->points[i];
            pointAssociateToMap(&pointOri, &pointSel);  // 根据当前帧位姿，变换到世界坐标系（map系）下
            p_kdtree_surf_from_map_->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);

            // 下面的过程要求解Ax+By+Cz+1=0的平面方程
            // 由于有5个点，因此是求解超定方程
            // 假设5个点都在平面上，则matA0是系数矩阵，matB0是等号右边的值（都是-1）；matX0是求出来的A，B，C
            Eigen::Matrix<float, 5, 3> matA0;
            Eigen::Matrix<float, 5, 1> matB0;
            Eigen::Vector3f            matX0;

            matA0.setZero();
            matB0.fill(-1);
            matX0.setZero();

            // 只考虑1m之内
            if (pointSearchSqDis[4] < 1.0) {
                for (int j = 0; j < 5; j++) {
                    matA0(j, 0) = p_laser_cloud_surf_from_map_ds_->points[pointSearchInd[j]].x;
                    matA0(j, 1) = p_laser_cloud_surf_from_map_ds_->points[pointSearchInd[j]].y;
                    matA0(j, 2) = p_laser_cloud_surf_from_map_ds_->points[pointSearchInd[j]].z;
                }

                // 求maxA0中点构成的平面法向量
                // matB0是-1，这个函数用来求解AX=B的X，
                // 也就是AX+BY+CZ+1=0
                matX0 = matA0.colPivHouseholderQr().solve(matB0);

                // 假设平面方程为ax+by+cz+1=0，这里就是求方程的系数abc，d=1
                float pa = matX0(0, 0);
                float pb = matX0(1, 0);
                float pc = matX0(2, 0);
                float pd = 1;

                // 单位法向量
                // 对[pa,pb,pc,pd]进行单位化
                float ps = sqrt(pa * pa + pb * pb + pc * pc);
                pa /= ps;
                pb /= ps;
                pc /= ps;
                pd /= ps;

                // 检查平面是否合格，如果5个点中有点到平面的距离超过0.2m，那么认为这些点太分散了，不构成平面
                // 下面是判定检索出来的5个点是否能够构成一个合格的法向量
                // 点到平面（Ax+By+Cz+D=0)的距离为 |Ax+By+Cz+D|/\sqrt(A^2+B^2+C^2)
                // 由于这里法向量已经归一化成为单位法向量，因此这里|Ax+By+Cz+D|就等于点到平面的距离
                // 这里只有当5个点到拟合的平面距离都小于0.2米才认为拟合的平面合格，否则跳过这个点
                bool planeValid = true;
                for (int j = 0; j < 5; j++) {
                    if (fabs(pa * p_laser_cloud_surf_from_map_ds_->points[pointSearchInd[j]].x +
                             pb * p_laser_cloud_surf_from_map_ds_->points[pointSearchInd[j]].y +
                             pc * p_laser_cloud_surf_from_map_ds_->points[pointSearchInd[j]].z + pd) > 0.2) {
                        planeValid = false;
                        break;
                    }
                }

                // 如果由检索出的5个点拟合的平面是合格的平面，计算点到平面的距离
                if (planeValid) {
                    // 当前激光帧点到平面距离
                    // 点(x0,y0,z0)到了平面Ax+By+Cz+D=0的距离为：d=|Ax0+By0+Cz0+D|/√(A^2+B^2+C^2)
                    // 但是会发现下面的分母开了两次方，不知道为什么，分母多开一次方会更小，这因此求出的距离会更大
                    float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;

                    // 距离越大，s越小，是个距离惩罚因子（权重）
                    // 后面部分相除求的是[pa,pb,pc,pd]与pointSel的夹角余弦值(两个sqrt，其实并不是余弦值)
                    // 这个夹角余弦值越小越好，越小证明所求的[pa,pb,pc,pd]与平面越垂直
                    float s =
                        1 - 0.9 * fabs(pd2) /
                                sqrt(sqrt(pointOri.x * pointOri.x + pointOri.y * pointOri.y + pointOri.z * pointOri.z));

                    // 点到平面垂线单位法向量（其实等价于平面法向量）
                    coeff.x         = s * pa;
                    coeff.y         = s * pb;
                    coeff.z         = s * pc;
                    coeff.intensity = s * pd2;

                    if (s > 0.1) {
                        // 当前激光帧平面点，加入匹配集合中.
                        // 如果s>0.1,代表fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x+ pointSel.y * pointSel.y +
                        // pointSel.z * pointSel.z))这一项<1,即"伪距离"<1
                        laser_cloud_ori_surf_vec_[i]  = pointOri;
                        coeff_sel_surf_vec_[i]        = coeff;
                        laser_cloud_ori_surf_flag_[i] = true;
                    }
                }
            }
        }
    }

    void combineOptimizationCoeffs() {
        // combine corner coeffs
        for (int i = 0; i < laser_cloud_corner_last_ds_num_; ++i) {
            if (laser_cloud_ori_corner_flag_[i] == true) {
                p_laser_cloud_ori_->push_back(laser_cloud_ori_corner_vec_[i]);
                p_coff_sel_->push_back(coeff_sel_corner_vec_[i]);
            }
        }
        // combine surf coeffs
        for (int i = 0; i < laser_cloud_surf_laset_ds_num_; ++i) {
            if (laser_cloud_ori_surf_flag_[i] == true) {
                p_laser_cloud_ori_->push_back(laser_cloud_ori_surf_vec_[i]);
                p_coff_sel_->push_back(coeff_sel_surf_vec_[i]);
            }
        }
        // reset flag for next iteration
        std::fill(laser_cloud_ori_corner_flag_.begin(), laser_cloud_ori_corner_flag_.end(), false);
        std::fill(laser_cloud_ori_surf_flag_.begin(), laser_cloud_ori_surf_flag_.end(), false);
    }

    /**
     * @brief
     * 这部分代码是基于高斯牛顿法的优化，不是LOAM论文里面提到的LM优化。目标函数（点到线、点到平面的距离）对位姿（这里
     * 使用的是欧拉角、tx,ty,tz的表达）求导，计算高斯-牛顿法更新方向和步长，然后对transformTobeMapped（存放当前雷达点云位姿）进行
     * 更新。这里相比于LOAM，多了坐标轴的转换，但实际上这部分转换是没有必要的。这部分代码可以直接阅读LeGO-LOAM的代码：
     * https://github.com/RobustFieldAutonomyLab/LeGO-LOAM/blob/896a7a95a8bc510b76819d4cc48707e344bad621/LeGO-LOAM/src/mapOptmization.cpp#L1229
     *
     * @param iterCount 迭代更新次数，这个函数在scan2MapOptimization中被调用，默认最大迭代次数是30
     */
    bool LMOptimization(int iterCount) {
        // This optimization is from the original loam_velodyne by Ji Zhang, need to
        // cope with coordinate transformation lidar <- camera      ---     camera
        // <- lidar x = z                ---     x = y y = x                --- y =
        // z z = y                ---     z = x roll = yaw           ---     roll =
        // pitch pitch = roll         ---     pitch = yaw yaw = pitch          ---
        // yaw = roll

        // lidar -> camera
        // 计算三轴欧拉角的sin、cos，后面使用旋转矩阵对欧拉角求导中会使用到
        float srx = sin(tramsform_tobe_mapped_[1]);
        float crx = cos(tramsform_tobe_mapped_[1]);
        float sry = sin(tramsform_tobe_mapped_[2]);
        float cry = cos(tramsform_tobe_mapped_[2]);
        float srz = sin(tramsform_tobe_mapped_[0]);
        float crz = cos(tramsform_tobe_mapped_[0]);

        // laserCloudOri是在cornerOptimization、surfOptimization两个函数中找到的有匹配关系的
        // 角点和平面点，如果找到的可供优化的点数太少，则跳过此次优化
        int laserCloudSelNum = p_laser_cloud_ori_->size();
        if (laserCloudSelNum < 50) {
            return false;
        }

        // matA是Jacobians矩阵J
        cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));
        cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0));
        cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));

        // matB是目标函数，也就是距离
        cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));
        cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));

        // matX是高斯-牛顿法计算出的更新向量
        cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));
        cv::Mat mat_p_(6, 6, CV_32F, cv::Scalar::all(0));

        PointType pointOri, coeff;

        // 遍历匹配特征点，构建Jacobian矩阵
        for (int i = 0; i < laserCloudSelNum; i++) {
            // lidar -> camera
            pointOri.x = p_laser_cloud_ori_->points[i].y;
            pointOri.y = p_laser_cloud_ori_->points[i].z;
            pointOri.z = p_laser_cloud_ori_->points[i].x;
            // lidar -> camera
            coeff.x         = p_coff_sel_->points[i].y;
            coeff.y         = p_coff_sel_->points[i].z;
            coeff.z         = p_coff_sel_->points[i].x;
            coeff.intensity = p_coff_sel_->points[i].intensity;
            // in camera
            // https://wykxwyc.github.io/2019/08/01/The-Math-Formula-in-LeGO-LOAM/
            // 求雅克比矩阵中的元素，距离d对roll角度的偏导量即d(d)/d(roll)
            // 各种cos
            // sin的是旋转矩阵对roll求导，pointOri.x是点的坐标，coeff.x等是距离到局部点的偏导，也就是法向量（建议看链接）
            // 注意：链接当中的R0-5公式中，ex和ey是反的
            // 另一个链接https://blog.csdn.net/weixin_37835423/article/details/111587379#commentBox当中写的更好
            float arx =
                (crx * sry * srz * pointOri.x + crx * crz * sry * pointOri.y - srx * sry * pointOri.z) * coeff.x +
                (-srx * srz * pointOri.x - crz * srx * pointOri.y - crx * pointOri.z) * coeff.y +
                (crx * cry * srz * pointOri.x + crx * cry * crz * pointOri.y - cry * srx * pointOri.z) * coeff.z;
            // ary是目标函数相对于pitch的导数
            float ary = ((cry * srx * srz - crz * sry) * pointOri.x + (sry * srz + cry * crz * srx) * pointOri.y +
                         crx * cry * pointOri.z) *
                            coeff.x +
                        ((-cry * crz - srx * sry * srz) * pointOri.x + (cry * srz - crz * srx * sry) * pointOri.y -
                         crx * sry * pointOri.z) *
                            coeff.z;
            // arz是目标函数相对于yaw的导数
            float arz =
                ((crz * srx * sry - cry * srz) * pointOri.x + (-cry * crz - srx * sry * srz) * pointOri.y) * coeff.x +
                (crx * crz * pointOri.x - crx * srz * pointOri.y) * coeff.y +
                ((sry * srz + cry * crz * srx) * pointOri.x + (crz * sry - cry * srx * srz) * pointOri.y) * coeff.z;
            // lidar -> camera
            matA.at<float>(i, 0) = arz;
            matA.at<float>(i, 1) = arx;
            matA.at<float>(i, 2) = ary;
            matA.at<float>(i, 3) = coeff.z;  // 目标函数相对于tx的导数等于法向量的x
            matA.at<float>(i, 4) = coeff.x;  // 目标函数相对于ty的导数等于法向量的y
            matA.at<float>(i, 5) = coeff.y;  // 目标函数相对于tz的导数等于法向量的z
                                             // matB存储的是目标函数（距离）的负值，因为：J^{T}J\Delta{x} = -Jf(x)
            matB.at<float>(i, 0) = -coeff.intensity;
        }

        cv::transpose(matA, matAt);
        matAtA = matAt * matA;
        matAtB = matAt * matB;
        // 利用高斯牛顿法进行求解，
        // 高斯牛顿法的原型是J^(T)*J * delta(x) = -J*f(x)
        // J是雅克比矩阵，这里是A，f(x)是优化目标，这里是-B(符号在给B赋值时候就放进去了)
        // 通过QR分解的方式，求解matAtA*matX=matAtB，得到解matX
        cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);

        // 如果是第一次迭代，判断求解出来的近似Hessian矩阵，也就是J^{T}J:=matAtA是否退化
        /**
         * 这部分的计算说实话没有找到很好的理论出处，这里只能大概说一下这段代码想要做的事情
         * 这里用matAtA也就是高斯-牛顿中的近似海瑟（Hessian）矩阵H。求解增量方程：J^{T}J\Delta{x} = -Jf(x)
         * 要求H:=J^{T}J可逆，但H不一定可逆。下面的代码通过H的特征值判断H是否退化，并将退化的方向清零matV2。而后又根据
         * matV.inv()*matV2作为更新向量的权重系数，matV是H的特征向量矩阵。
         */
        if (iterCount == 0) {
            // 对近似的Hessian矩阵求特征值和特征向量
            cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));
            cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));
            cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));
            // 对近似的Hessian矩阵求特征值和特征向量，
            // matE特征值,matV是特征向量
            // 退化方向只与原始的约束方向  A有关，与原始约束的位置 b 无关
            // 算这个的目的是要判断退化，即约束中较小的偏移会导致解所在的局部区域发生较大的变化
            cv::eigen(matAtA, matE, matV);
            matV.copyTo(matV2);

            is_degenerate_ = false;
            // 初次优化时，特征值门限设置为100，小于这个值认为是退化了
            // 系统退化与否和系统是否存在解没有必然联系，即使系统出现退化，系统也是可能存在解的，
            // 因此需要将系统的解进行调整，一个策略就是将解进行投影，
            // 对于退化方向，使用优化的状态估计值，对于非退化方向，依然使用方程的解。
            // 另一个策略就是直接抛弃解在退化方向的分量。
            // 对于退化方向，我们不考虑，直接丢弃，只考虑非退化方向解的增量。
            float eignThre[6] = {100, 100, 100, 100, 100, 100};
            for (int i = 5; i >= 0; i--) {
                if (matE.at<float>(0, i) < eignThre[i]) {
                    for (int j = 0; j < 6; j++) {
                        matV2.at<float>(i, j) = 0;
                    }
                    is_degenerate_ = true;
                } else {
                    break;
                }
            }
            mat_p_ = matV.inv() * matV2;
        }

        // 当第一次迭代判断到海瑟矩阵退化，后面会使用计算出来的权重matP对增量matX做加权组合
        if (is_degenerate_) {
            cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));
            matX.copyTo(matX2);
            matX = mat_p_ * matX2;
        }

        // 更新当前位姿 x = x + delta_x
        tramsform_tobe_mapped_[0] += matX.at<float>(0, 0);
        tramsform_tobe_mapped_[1] += matX.at<float>(1, 0);
        tramsform_tobe_mapped_[2] += matX.at<float>(2, 0);
        tramsform_tobe_mapped_[3] += matX.at<float>(3, 0);
        tramsform_tobe_mapped_[4] += matX.at<float>(4, 0);
        tramsform_tobe_mapped_[5] += matX.at<float>(5, 0);

        // 计算roll、pitch、yaw的迭代步长
        float deltaR = sqrt(pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) + pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) +
                            pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));
        // 计算tx，ty，tz的迭代步长
        float deltaT = sqrt(pow(matX.at<float>(3, 0) * 100, 2) + pow(matX.at<float>(4, 0) * 100, 2) +
                            pow(matX.at<float>(5, 0) * 100, 2));

        // 如果迭代的步长达到设定阈值，则认为已经收敛
        if (deltaR < 0.05 && deltaT < 0.05) {
            return true;  // converged
        }
        return false;  // keep optimizing
    }

    // scan-to-map优化当前帧位姿
    // 1、要求当前帧特征点数量足够多，且匹配的点数够多，才执行优化
    // 2、迭代30次（上限）优化
    //    1) 当前激光帧角点寻找局部map匹配点
    //       a.更新当前帧位姿，将当前帧角点坐标变换到map系下，在局部map中查找5个最近点，距离小于1m，且5个点构成直线（用距离中心点的协方差矩阵，特征值进行判断），则认为匹配上了
    //       b.计算当前帧角点到直线的距离、垂线的单位向量，存储为角点参数
    //    2) 当前激光帧平面点寻找局部map匹配点
    //       a.更新当前帧位姿，将当前帧平面点坐标变换到map系下，在局部map中查找5个最近点，距离小于1m，且5个点构成平面（最小二乘拟合平面），则认为匹配上了
    //       b.计算当前帧平面点到平面的距离、垂线的单位向量，存储为平面点参数
    //    3) 提取当前帧中与局部map匹配上了的角点、平面点，加入同一集合
    //    4)
    //    对匹配特征点计算Jacobian矩阵，观测值为特征点到直线、平面的距离，构建高斯牛顿方程，迭代优化当前位姿，存transformTobeMapped
    // 3、用imu原始RPY数据与scan-to-map优化后的位姿进行加权融合，更新当前帧位姿的roll、pitch，约束z坐标
    void scan2MapOptimization() {
        // 根据现有地图与最新点云数据进行配准从而更新机器人精确位姿与融合建图，
        // 它分为角点优化、平面点优化、配准与更新等部分。
        //  优化的过程与里程计的计算类似，是通过计算点到直线或平面的距离，构建优化公式再用LM法求解。
        if (p_cloud_key_poses_3d_->points.empty())
            return;

        if (laser_cloud_corner_last_ds_num_ > edge_feature_min_valid_num_ &&
            laser_cloud_surf_laset_ds_num_ > surf_feature_min_valid_num_) {
            // 构建kd-tree
            p_kdtree_corner_from_map_->setInputCloud(p_laser_cloud_corner_from_map_ds_);
            p_kdtree_surf_from_map_->setInputCloud(p_laser_cloud_surf_from_map_ds_);

            // 迭代30次
            SimpleTime timer;
            for (int iterCount = 0; iterCount < 30; iterCount++) {
                p_laser_cloud_ori_->clear();
                p_coff_sel_->clear();

                // 边缘点匹配优化
                cornerOptimization();

                // 平面点匹配优化
                surfOptimization();

                // 提取当前帧中与局部map匹配上了的角点、平面点，加入同一集合
                combineOptimizationCoeffs();

                // 对匹配特征点计算Jacobian矩阵，观测值为特征点到直线、平面的距离，构建高斯牛顿方程，
                // 迭代优化当前位姿，存在transformTobeMapped
                if (LMOptimization(iterCount) == true)
                    break;
            }

            // 用imu原始RPY数据与scan-to-map优化后的位姿进行加权融合，更新当前帧位姿的roll、pitch，约束z坐标
            transformUpdate();
        } else {
            RCLCPP_WARN(get_logger(), "Not enough features! Only %d edge and %d planar features available.",
                        laser_cloud_corner_last_ds_num_, laser_cloud_surf_laset_ds_num_);
        }
    }

    /**
     * 使用IMU的原始输出roll、pitch与当前估计的roll、pitch加权融合
     * 注意这里只对roll、pitch加权融合。同时有一个权重控制IMU的比重（默认0.01）
     */
    void transformUpdate() {
        if (cloud_info_.imu_available == true) {
            // 俯仰角小于1.4
            if (std::abs(cloud_info_.imu_pitch_init) < 1.4) {
                double          imuWeight = imu_rpy_weight_;
                tf2::Quaternion imuQuaternion;
                tf2::Quaternion transformQuaternion;
                double          rollMid, pitchMid, yawMid;

                // slerp roll
                // roll角求加权均值，用scan-to-map优化得到的位姿与imu原始RPY数据，进行加权平均
                transformQuaternion.setRPY(tramsform_tobe_mapped_[0], 0, 0);
                imuQuaternion.setRPY(cloud_info_.imu_roll_init, 0, 0);
                tf2::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
                tramsform_tobe_mapped_[0] = rollMid;

                // slerp pitch
                // pitch角求加权均值，用scan-to-map优化得到的位姿与imu原始RPY数据，进行加权平均
                transformQuaternion.setRPY(0, tramsform_tobe_mapped_[1], 0);
                imuQuaternion.setRPY(0, cloud_info_.imu_pitch_init, 0);
                tf2::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
                tramsform_tobe_mapped_[1] = pitchMid;
            }
        }

        // 更新当前帧位姿的roll, pitch, z坐标；因为是小车，roll、pitch是相对稳定的，
        // 不会有很大变动，一定程度上可以信赖imu的数据，z是进行高度约束
        tramsform_tobe_mapped_[0] = constraintTransformation(tramsform_tobe_mapped_[0], rotation_tollerance_);
        tramsform_tobe_mapped_[1] = constraintTransformation(tramsform_tobe_mapped_[1], rotation_tollerance_);
        tramsform_tobe_mapped_[5] = constraintTransformation(tramsform_tobe_mapped_[5], z_tollerance_);

        incremental_odometry_affine_back_ = trans2Affine3f(tramsform_tobe_mapped_);  // 当前帧位姿
    }

    float constraintTransformation(float value, float limit) {
        if (value < -limit)
            value = -limit;
        if (value > limit)
            value = limit;

        return value;
    }

    /**
     * 1. 是否将当前帧选择为关键帧。
     * 当距离不够且角度不够时，不会将当前帧选择为关键帧。
     * 对于非关键帧的点云帧，只做点云匹配校准里程计。对于关键帧，则会加入因子图进行优化
     */
    bool saveFrame() {
        if (p_cloud_key_poses_3d_->points.empty())
            return true;

        if (sensor == SensorType::LIVOX || sensor == SensorType::LIVOX_MID360) {
            if (time_laser_info_cur_ - p_cloud_key_poses_6d_->back().time > 1.0)
                return true;
        }

        Eigen::Affine3f transStart = pclPointToAffine3f(p_cloud_key_poses_6d_->back());
        Eigen::Affine3f transFinal =
            pcl::getTransformation(tramsform_tobe_mapped_[3], tramsform_tobe_mapped_[4], tramsform_tobe_mapped_[5],
                                   tramsform_tobe_mapped_[0], tramsform_tobe_mapped_[1], tramsform_tobe_mapped_[2]);
        Eigen::Affine3f transBetween = transStart.inverse() * transFinal;
        float           x, y, z, roll, pitch, yaw;
        pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);

        if (abs(roll) < surrounding_key_frame_adding_angle_threshold_ &&
            abs(pitch) < surrounding_key_frame_adding_angle_threshold_ &&
            abs(yaw) < surrounding_key_frame_adding_angle_threshold_ &&
            sqrt(x * x + y * y + z * z) < surrounding_key_frame_adding_dist_threshold_)
            return false;

        return true;
    }

    /**
     * 2. 添加激光里程计因子
     *   1）若是第一帧，则构建prior因子，赋予较大的方差
     *   2）后续帧，根据当前的位姿估计，以及上一个关键帧的位姿，计算位姿增量，添加间隔因子（BetweenFactor);
     *      同时，将当前帧当前的位姿估计作为因子图当前变量的初始值。
     */
    void addOdomFactor() {
        if (p_cloud_key_poses_3d_->points.empty()) {
            // 第一帧初始化先验因子
            noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances(
                (Vector(6) << 1e-2, 1e-2, M_PI * M_PI, 1e8, 1e8, 1e8).finished());  // rad*rad, meter*meter
            gtsam_graph_.add(PriorFactor<Pose3>(0, trans2gtsamPose(tramsform_tobe_mapped_), priorNoise));
            // 变量节点设置初始值
            initial_estimate_.insert(0, trans2gtsamPose(tramsform_tobe_mapped_));
        } else {
            // 添加激光里程计因子
            noiseModel::Diagonal::shared_ptr odometryNoise =
                noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
            gtsam::Pose3 poseFrom = pclPointTogtsamPose3(p_cloud_key_poses_6d_->points.back());
            gtsam::Pose3 poseTo   = trans2gtsamPose(tramsform_tobe_mapped_);
            // 参数：前一帧id，当前帧id，前一帧与当前帧的位姿变换（作为观测值），噪声协方差
            gtsam_graph_.add(BetweenFactor<Pose3>(p_cloud_key_poses_3d_->size() - 1, p_cloud_key_poses_3d_->size(),
                                                  poseFrom.between(poseTo), odometryNoise));
            // 变量节点设置初始值
            initial_estimate_.insert(p_cloud_key_poses_3d_->size(), poseTo);
        }
    }

    /**
     * 3. 添加GPS因子
     *   1）GPS队列为空，或者关键帧序列为空，或者前后关键帧距离小于5m，或者位姿协方差较小，直接返回，认为不需要加入GPS校正
     *   2）从GPS队列中找到与当前帧时间最接近的GPS数据
     *   3）GPS数据方差大于阈值或者与上一次采用的GPS位置小于5m，直接返回
     *   4）从GPS数据中提取x,y,z和协方差，构建GPS因子加入因子图。其中的z坐标可以设置为不使用GPS的输出（GPS的z坐标较为不准）
     *   5）设置aLoopGpsIsClosed标志位为true，后面因子图优化时会多次迭代且更新所有历史关键帧位姿
     */
    void addGPSFactor() {
        if (gps_queue_.empty())
            return;

        // wait for system initialized and settles down
        // 如果没有关键帧，或者首尾关键帧距离小于5m，不添加gps因子
        if (p_cloud_key_poses_3d_->points.empty())
            return;
        else {
            if (pointDistance(p_cloud_key_poses_3d_->front(), p_cloud_key_poses_3d_->back()) < 5.0)
                return;
        }

        // pose covariance small, no need to correct
        // 位姿协方差很小，没必要加入GPS数据进行校正
        // 3和4我猜可能是x和y？（6维，roll，pitch，yaw，x，y，z）
        if (pose_covariance_(3, 3) < pose_cov_threshold_ && pose_covariance_(4, 4) < pose_cov_threshold_)
            return;

        // last gps position
        static PointType lastGPSPoint;

        while (!gps_queue_.empty()) {
            // 删除当前帧0.2s之前的里程计
            if (stamp2Sec(gps_queue_.front().header.stamp) < time_laser_info_cur_ - 0.2) {
                // message too old
                gps_queue_.pop_front();
            }  // 超过当前帧0.2s之后，退出
            else if (stamp2Sec(gps_queue_.front().header.stamp) > time_laser_info_cur_ + 0.2) {
                // message too new
                break;
            } else {
                nav_msgs::msg::Odometry thisGPS = gps_queue_.front();
                gps_queue_.pop_front();

                // GPS too noisy, skip
                // GPS噪声协方差太大，不能用
                float noise_x = thisGPS.pose.covariance[0];
                float noise_y = thisGPS.pose.covariance[7];
                float noise_z = thisGPS.pose.covariance[14];
                if (noise_x > gps_cov_threshold_ || noise_y > gps_cov_threshold_)
                    continue;
                // GPS里程计位置
                float gps_x = thisGPS.pose.pose.position.x;
                float gps_y = thisGPS.pose.pose.position.y;
                float gps_z = thisGPS.pose.pose.position.z;
                if (!use_gps_elevation_) {
                    gps_z   = tramsform_tobe_mapped_[5];
                    noise_z = 0.01;
                }

                // GPS not properly initialized (0,0,0)
                // (0,0,0)无效数据
                if (abs(gps_x) < 1e-6 && abs(gps_y) < 1e-6)
                    continue;

                // Add GPS every a few meters
                // 每隔5m添加一个GPS里程计
                PointType curGPSPoint;
                curGPSPoint.x = gps_x;
                curGPSPoint.y = gps_y;
                curGPSPoint.z = gps_z;
                if (pointDistance(curGPSPoint, lastGPSPoint) < 5.0)
                    continue;
                else
                    lastGPSPoint = curGPSPoint;

                // 添加GPS因子
                gtsam::Vector Vector3(3);
                Vector3 << max(noise_x, 1.0f), max(noise_y, 1.0f), max(noise_z, 1.0f);
                noiseModel::Diagonal::shared_ptr gps_noise = noiseModel::Diagonal::Variances(Vector3);
                gtsam::GPSFactor gps_factor(p_cloud_key_poses_3d_->size(), gtsam::Point3(gps_x, gps_y, gps_z),
                                            gps_noise);
                gtsam_graph_.add(gps_factor);

                a_loop_is_closed_ = true;
                break;
            }
        }
    }

    /**
     * 4. 添加回环因子（回环信息由独立线程提供）
     *   1）回环队列为空，直接返回
     *   2）遍历回环关系，将所有回环关系加入因子图
     *   3）清空回环关系
     *   4）设置aLoopGpsIsClosed标志位为true，后面因子图优化时会多次迭代且更新所有历史关键帧位姿
     */
    void addLoopFactor() {
        if (loop_index_queue_.empty())
            return;

        // 闭环队列
        for (int i = 0; i < (int) loop_index_queue_.size(); ++i) {
            // 闭环边对应两帧的索引
            int indexFrom = loop_index_queue_[i].first;
            int indexTo   = loop_index_queue_[i].second;
            // 闭环边的位姿变换
            gtsam::Pose3                            poseBetween  = loop_pose_queue_[i];
            gtsam::noiseModel::Diagonal::shared_ptr noiseBetween = loop_noise_queue_[i];
            gtsam_graph_.add(BetweenFactor<Pose3>(indexFrom, indexTo, poseBetween, noiseBetween));
        }

        loop_index_queue_.clear();
        loop_pose_queue_.clear();
        loop_noise_queue_.clear();
        a_loop_is_closed_ = true;
    }

    /**
     * 添加因子并执行图优化，更新当前位姿
     * 1. 只有在当前帧距离上一帧足够远（距离、角度）时，才纳入为关键帧，并加入因子图
     * 2. 添加激光里程计因子
     *      1）若是第一帧，则构建prior因子，赋予较大的方差
     *      2）后续帧，根据当前的位姿估计，以及上一个关键帧的位姿，计算位姿增量，添加间隔因子（BetweenFactor);
     *          同时，将当前帧当前的位姿估计作为因子图当前变量的初始值。
     * 3. 添加GPS因子
     *      1）GPS队列为空，或者关键帧序列为空，或者前后关键帧距离小于5m，或者位姿协方差较小，直接返回，认为不需要加入GPS校正
     *      2）从GPS队列中找到与当前帧时间最接近的GPS数据
     *      3）GPS数据方差大于阈值或者与上一次采用的GPS位置小于5m，直接返回
     *      4）从GPS数据中提取x,y,z和协方差，构建GPS因子加入因子图。其中的z坐标可以设置为不使用GPS的输出（GPS的z坐标较为不准）
     *      5）设置aLoopGpsIsClosed标志位为true，后面因子图优化时会多次迭代且更新所有历史关键帧位姿
     * 4. 添加回环因子（回环信息由独立线程提供）
     *      1）回环队列为空，直接返回
     *      2）遍历回环关系，将所有回环关系加入因子图
     *      3）清空回环关系
     *      4）设置aLoopGpsIsClosed标志位为true，后面因子图优化时会多次迭代且更新所有历史关键帧位姿
     * 5. 因子图优化
     *      1）将当前因子图加入优化器
     *      2）对优化器执行一次迭代更新
     *      3）如果aLoopGpsIsClosed为真，额外执行5次优化器迭代
     *      4）清空因子图和初始值（ISAM优化器已经记录这些信息）
     *      5）将当前帧位姿加入关键帧队列
     *      6）将优化后的结果更新为当前位姿
     *      7）保存当前帧特征点到特征点集合
     *      8）当当前帧位姿更新到轨迹缓存变量
     */
    void saveKeyFramesAndFactor() {
        // 计算当前帧与前一帧位姿变换，如果变化太小，不设为关键帧，反之设为关键帧
        if (saveFrame() == false)
            return;

        // odom factor
        addOdomFactor();

        // gps factor
        addGPSFactor();

        // loop factor
        addLoopFactor();

        // cout << "****************************************************" << endl;
        // gtsam_graph_.print("GTSAM Graph:\n");

        // update iSAM
        // 迭代一次优化器
        p_isam_->update(gtsam_graph_, initial_estimate_);
        p_isam_->update();

        // 如果当前帧有新的GPS因子或者回环因子加入，执行多次迭代更新，且后面会更新所有历史帧位姿
        if (a_loop_is_closed_ == true) {
            p_isam_->update();
            p_isam_->update();
            p_isam_->update();
            p_isam_->update();
            p_isam_->update();
        }

        // update之后要清空一下保存的因子图，注：历史数据不会清掉，ISAM保存起来了
        gtsam_graph_.resize(0);
        initial_estimate_.clear();

        // save key poses
        PointType     thisPose3D;
        PointTypePose thisPose6D;
        Pose3         latestEstimate;

        // 优化结果
        isam_current_estimate_ = p_isam_->calculateEstimate();
        latestEstimate         = isam_current_estimate_.at<Pose3>(isam_current_estimate_.size() - 1);
        // cout << "****************************************************" << endl;
        // isam_current_estimate_.print("Current estimate: ");

        // cloudKeyPoses3D加入当前帧位姿
        thisPose3D.x         = latestEstimate.translation().x();
        thisPose3D.y         = latestEstimate.translation().y();
        thisPose3D.z         = latestEstimate.translation().z();
        thisPose3D.intensity = p_cloud_key_poses_3d_->size();  // this can be used as index
        p_cloud_key_poses_3d_->push_back(thisPose3D);

        // cloudKeyPoses6D加入当前帧位姿
        thisPose6D.x         = thisPose3D.x;
        thisPose6D.y         = thisPose3D.y;
        thisPose6D.z         = thisPose3D.z;
        thisPose6D.intensity = thisPose3D.intensity;  // this can be used as index
        thisPose6D.roll      = latestEstimate.rotation().roll();
        thisPose6D.pitch     = latestEstimate.rotation().pitch();
        thisPose6D.yaw       = latestEstimate.rotation().yaw();
        thisPose6D.time      = time_laser_info_cur_;
        p_cloud_key_poses_6d_->push_back(thisPose6D);

        // cout << "****************************************************" << endl;
        // cout << "Pose covariance:" << endl;
        // cout << p_isam_->marginalCovariance(isam_current_estimate_.size()-1) << endl <<
        // endl;
        // 位姿协方差
        pose_covariance_ = p_isam_->marginalCovariance(isam_current_estimate_.size() - 1);

        // save updated transform
        //  transformTobeMapped更新当前帧位姿
        tramsform_tobe_mapped_[0] = latestEstimate.rotation().roll();
        tramsform_tobe_mapped_[1] = latestEstimate.rotation().pitch();
        tramsform_tobe_mapped_[2] = latestEstimate.rotation().yaw();
        tramsform_tobe_mapped_[3] = latestEstimate.translation().x();
        tramsform_tobe_mapped_[4] = latestEstimate.translation().y();
        tramsform_tobe_mapped_[5] = latestEstimate.translation().z();

        // save all the received edge and surf points
        // 当前帧激光角点、平面点，降采样集合
        pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());
        pcl::copyPointCloud(*p_laser_cloud_corner_last_ds_, *thisCornerKeyFrame);
        pcl::copyPointCloud(*p_laser_cloud_surf_laset_ds_, *thisSurfKeyFrame);

        // save key frame cloud
        // 保存特征点降采样集合
        p_corner_cloud_key_frames_vec_.push_back(thisCornerKeyFrame);
        p_surf_cloud_key_frames_vec_.push_back(thisSurfKeyFrame);

        // save path for visualization
        // 更新里程计轨迹
        updatePath(thisPose6D);
    }

    /**
     * 更新所有历史关键帧位姿
     * 1. 只当aLoopGpsIsClosed标志位为真时才执行历史关键帧位姿更新
     * 2. 从因子图优化器中拿出所有关键帧的位姿（优化结果）
     * 3. 清空全局路径变量，替换成当前的关键帧位姿序列
     * 4. 将优化后的位姿更新为当前的位姿
     */
    void correctPoses() {
        if (p_cloud_key_poses_3d_->points.empty())
            return;

        // 这个标志位在新的回环因子加入或者GPS因子加入会被置为True
        if (a_loop_is_closed_ == true) {
            // clear map cache
            // laserCloudMapContainer缓存的是转换到map坐标系的点云
            // 在更新历史轨迹之后需要清空
            laser_cloud_map_container_.clear();
            // clear path
            global_path_.poses.clear();
            // update key poses
            // 更新因子图中所有变量节点的位姿，也就是所有历史关键帧的位姿
            int numPoses = isam_current_estimate_.size();
            for (int i = 0; i < numPoses; ++i) {
                p_cloud_key_poses_3d_->points[i].x = isam_current_estimate_.at<Pose3>(i).translation().x();
                p_cloud_key_poses_3d_->points[i].y = isam_current_estimate_.at<Pose3>(i).translation().y();
                p_cloud_key_poses_3d_->points[i].z = isam_current_estimate_.at<Pose3>(i).translation().z();

                p_cloud_key_poses_6d_->points[i].x     = p_cloud_key_poses_3d_->points[i].x;
                p_cloud_key_poses_6d_->points[i].y     = p_cloud_key_poses_3d_->points[i].y;
                p_cloud_key_poses_6d_->points[i].z     = p_cloud_key_poses_3d_->points[i].z;
                p_cloud_key_poses_6d_->points[i].roll  = isam_current_estimate_.at<Pose3>(i).rotation().roll();
                p_cloud_key_poses_6d_->points[i].pitch = isam_current_estimate_.at<Pose3>(i).rotation().pitch();
                p_cloud_key_poses_6d_->points[i].yaw   = isam_current_estimate_.at<Pose3>(i).rotation().yaw();

                updatePath(p_cloud_key_poses_6d_->points[i]);
            }

            a_loop_is_closed_ = false;
        }
    }

    void updatePath(const PointTypePose& pose_in) {
        geometry_msgs::msg::PoseStamped pose_stamped;
        pose_stamped.header.stamp    = rclcpp::Time(pose_in.time * 1e9);
        pose_stamped.header.frame_id = odometry_frame_;
        pose_stamped.pose.position.x = pose_in.x;
        pose_stamped.pose.position.y = pose_in.y;
        pose_stamped.pose.position.z = pose_in.z;
        tf2::Quaternion q;
        q.setRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);
        pose_stamped.pose.orientation.x = q.x();
        pose_stamped.pose.orientation.y = q.y();
        pose_stamped.pose.orientation.z = q.z();
        pose_stamped.pose.orientation.w = q.w();

        global_path_.poses.push_back(pose_stamped);
    }

    /**
     * 发布激光里程计
     * 1. 发布当前帧位姿（激光里程计）
     * 2. 发布TF坐标系，从odom坐标系到雷达坐标系的变换
     */
    void publishOdometry() {
        // Publish odometry for ROS (global)
        // 发布全局最优的激光里程计结果（mapping/odometry)
        nav_msgs::msg::Odometry laserOdometryROS;
        laserOdometryROS.header.stamp         = time_laser_info_stamp_;
        laserOdometryROS.header.frame_id      = odometry_frame_;
        laserOdometryROS.child_frame_id       = "odom_mapping";
        laserOdometryROS.pose.pose.position.x = tramsform_tobe_mapped_[3];
        laserOdometryROS.pose.pose.position.y = tramsform_tobe_mapped_[4];
        laserOdometryROS.pose.pose.position.z = tramsform_tobe_mapped_[5];
        tf2::Quaternion quat_tf;
        quat_tf.setRPY(tramsform_tobe_mapped_[0], tramsform_tobe_mapped_[1], tramsform_tobe_mapped_[2]);
        geometry_msgs::msg::Quaternion quat_msg;
        tf2::convert(quat_tf, quat_msg);
        laserOdometryROS.pose.pose.orientation = quat_msg;
        p_pub_laser_odometry_global_->publish(laserOdometryROS);

        // Publish TF
        quat_tf.setRPY(tramsform_tobe_mapped_[0], tramsform_tobe_mapped_[1], tramsform_tobe_mapped_[2]);
        tf2::Transform t_odom_to_lidar = tf2::Transform(
            quat_tf, tf2::Vector3(tramsform_tobe_mapped_[3], tramsform_tobe_mapped_[4], tramsform_tobe_mapped_[5]));
        tf2::TimePoint                       time_point = tf2_ros::fromRclcpp(time_laser_info_stamp_);
        tf2::Stamped<tf2::Transform>         temp_odom_to_lidar(t_odom_to_lidar, time_point, odometry_frame_);
        geometry_msgs::msg::TransformStamped trans_odom_to_lidar;
        tf2::convert(temp_odom_to_lidar, trans_odom_to_lidar);
        trans_odom_to_lidar.child_frame_id = "lidar_link";
        p_broadcaster_->sendTransform(trans_odom_to_lidar);

        // Publish odometry for ROS (incremental)
        // 发布光滑的激光里程计结果（mapping/odometry_incremental）
        /**
         * mapping/odometry_incremental里程计是只使用了点云匹配而没有使用因子图优化的里程计
         * liosam作者TixiaoShan在github回复中（https://github.com/TixiaoShan/LIO-SAM/issues/92）提到了这一点
         * 下面这部分计算incremental里程计中，incrementalOdometryAffineFront是上一帧经过因子图优化后的结果，
         * incrementalOdometryAffineBack是在点云匹配之后、因子图优化之前的缓存结果。
         * 因此，odometry_incremental是间接使用了因子图优化，相比odometry应该有一定延迟和平滑。但是根据实验的结果
         * 来看，似乎差别不大，但是为了体现作者的工作和思考，下面这部分代码依旧保留。
         */
        static bool                    lastIncreOdomPubFlag = false;
        static nav_msgs::msg::Odometry laserOdomIncremental;  // incremental odometry msg
        static Eigen::Affine3f         increOdomAffine;       // incremental odometry in affine
        // 第一次数据直接用全局里程计初始化
        if (lastIncreOdomPubFlag == false) {
            lastIncreOdomPubFlag = true;
            laserOdomIncremental = laserOdometryROS;
            increOdomAffine      = trans2Affine3f(tramsform_tobe_mapped_);
        } else {
            /**
             * incrementalOdometryAffineBack在执行完scan2MapOptimization后就被记录下来
             * 也就是说，incremental的激光里程计没有使用到因子图优化的结果，因此更加光滑，没有跳变，可以给IMU预积分模块使用
             */
            // 当前帧与前一帧之间的位姿变换
            Eigen::Affine3f affineIncre =
                incremental_odometry_affine_front_.inverse() * incremental_odometry_affine_back_;
            increOdomAffine = increOdomAffine * affineIncre;
            float x, y, z, roll, pitch, yaw;
            pcl::getTranslationAndEulerAngles(increOdomAffine, x, y, z, roll, pitch, yaw);
            if (cloud_info_.imu_available == true) {
                if (std::abs(cloud_info_.imu_pitch_init) < 1.4) {
                    double          imuWeight = 0.1;
                    tf2::Quaternion imuQuaternion;
                    tf2::Quaternion transformQuaternion;
                    double          rollMid, pitchMid, yawMid;

                    // slerp roll
                    // roll姿态角加权平均
                    transformQuaternion.setRPY(roll, 0, 0);
                    imuQuaternion.setRPY(cloud_info_.imu_roll_init, 0, 0);
                    tf2::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight))
                        .getRPY(rollMid, pitchMid, yawMid);
                    roll = rollMid;

                    // slerp pitch
                    // pitch姿态角加权平均
                    transformQuaternion.setRPY(0, pitch, 0);
                    imuQuaternion.setRPY(0, cloud_info_.imu_pitch_init, 0);
                    tf2::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight))
                        .getRPY(rollMid, pitchMid, yawMid);
                    pitch = pitchMid;
                }
            }
            laserOdomIncremental.header.stamp         = time_laser_info_stamp_;
            laserOdomIncremental.header.frame_id      = odometry_frame_;
            laserOdomIncremental.child_frame_id       = "odom_mapping";
            laserOdomIncremental.pose.pose.position.x = x;
            laserOdomIncremental.pose.pose.position.y = y;
            laserOdomIncremental.pose.pose.position.z = z;
            tf2::Quaternion quat_tf;
            quat_tf.setRPY(roll, pitch, yaw);
            geometry_msgs::msg::Quaternion quat_msg;
            tf2::convert(quat_tf, quat_msg);
            laserOdomIncremental.pose.pose.orientation = quat_msg;
            if (is_degenerate_)
                laserOdomIncremental.pose.covariance[0] = 1;
            else
                laserOdomIncremental.pose.covariance[0] = 0;
        }
        p_pub_laser_odometry_incremental_->publish(laserOdomIncremental);
    }
    /**
     * 发布里程计、点云、轨迹
     * 1、发布历史关键帧位姿集合
     * 2、发布局部map的降采样平面点集合
     * 3、发布历史帧（累加的）的角点、平面点降采样集合
     * 4、发布里程计轨迹
     */
    void publishFrames() {
        if (p_cloud_key_poses_3d_->points.empty())
            return;
        // publish key poses
        // 发布历史关键帧位姿集合

        publishCloud(p_pub_key_poses_, p_cloud_key_poses_3d_, time_laser_info_stamp_, odometry_frame_);
        // Publish surrounding key frames
        // 发布局部map的降采样平面点集合
        publishCloud(p_pub_recent_key_frames_, p_laser_cloud_surf_from_map_ds_, time_laser_info_stamp_,
                     odometry_frame_);
        // publish registered key frame
        // 发布当前帧的角点、平面点降采样集合
        if (p_pub_recent_key_frame_->get_subscription_count() != 0) {
            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
            PointTypePose                   thisPose6D = trans2PointTypePose(tramsform_tobe_mapped_);
            *cloudOut += *transformPointCloud(p_laser_cloud_corner_last_ds_, &thisPose6D);
            *cloudOut += *transformPointCloud(p_laser_cloud_surf_laset_ds_, &thisPose6D);
            publishCloud(p_pub_recent_key_frame_, cloudOut, time_laser_info_stamp_, odometry_frame_);
        }
        // publish registered high-res raw cloud
        // 发布当前帧原始点云配准之后的点云
        if (p_pub_cloud_registered_raw_->get_subscription_count() != 0) {
            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
            pcl::fromROSMsg(cloud_info_.cloud_deskewed, *cloudOut);
            PointTypePose thisPose6D = trans2PointTypePose(tramsform_tobe_mapped_);
            *cloudOut                = *transformPointCloud(cloudOut, &thisPose6D);
            publishCloud(p_pub_cloud_registered_raw_, cloudOut, time_laser_info_stamp_, odometry_frame_);
        }
        // publish path
        // 发布里程计轨迹
        if (p_pub_path_->get_subscription_count() != 0) {
            global_path_.header.stamp    = time_laser_info_stamp_;
            global_path_.header.frame_id = odometry_frame_;
            p_pub_path_->publish(global_path_);
        }
    }

    // 获取最新的6dPose
    PointTypePose GetPose6d() { return trans2PointTypePose(tramsform_tobe_mapped_); }
};

void relocationTest() {
    // 创建一个随机设备作为种子
    std::random_device rd;
    // 使用 Mersenne Twister 算法生成随机数
    std::mt19937 gen(rd());
    // 定义一个浮点数均匀分布，范围从 0.0 到 0.5
    std::uniform_real_distribution<> dis(0.0, 0.5);
    // 定义一个浮点数均匀分布，范围从 -3.14 到 3.14
    std::uniform_real_distribution<> rot(-M_PI, M_PI);
    while (rclcpp::ok()) {
        std::vector<float> init_pose_6d(6);
        PointTypePose      newest_pose_6d = gp_map_opt->GetPose6d();
        // x y yaw增加随机误差
        init_pose_6d[0] = newest_pose_6d.x + dis(gen);
        init_pose_6d[1] = newest_pose_6d.y + dis(gen);
        init_pose_6d[2] = newest_pose_6d.z;
        init_pose_6d[3] = newest_pose_6d.roll;
        init_pose_6d[4] = newest_pose_6d.pitch;
        init_pose_6d[5] = newest_pose_6d.yaw + rot(gen);

        std::cout << "relocalization start" << std::endl;
        std::cout << "real_x:" << newest_pose_6d.x << " real_y:" << newest_pose_6d.y << " real_z:" << newest_pose_6d.z
                  << " real_roll:" << newest_pose_6d.roll << " real_pitch:" << newest_pose_6d.pitch
                  << " real_yaw:" << newest_pose_6d.yaw << std::endl;
        std::cout << "init_x:" << init_pose_6d[0] << " init_y:" << init_pose_6d[1] << " init_z:" << init_pose_6d[2]
                  << " init_roll:" << init_pose_6d[3] << " init_pitch:" << init_pose_6d[4]
                  << " ini_yaw:" << init_pose_6d[5] << std::endl;
        gp_map_opt->relocation(init_pose_6d);
    }
}

int main(int argc, char** argv) {
    rclcpp::init(argc, argv);

    rclcpp::NodeOptions options;
    options.use_intra_process_comms(true);
    rclcpp::executors::SingleThreadedExecutor exec;

    auto MO = std::make_shared<mapOptimization>(options);
    exec.add_node(MO);

    RCLCPP_INFO(rclcpp::get_logger("rclcpp"), "\033[1;32m----> Map Optimization Started.\033[0m");

    std::thread loopthread(&mapOptimization::loopClosureThread, MO);
    std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, MO);

    gp_map_opt = MO;
    // test reloaction
    //  std::thread RelocalizationTestThread(&relocationTest);

    exec.spin();

    rclcpp::shutdown();

    loopthread.join();
    visualizeMapThread.join();
    // RelocalizationTestThread.join();

    return 0;
}
