#include "mapOptmization.hpp"

#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/PriorFactor.h>

#include <pcl/registration/icp.h>

#include <visualization_msgs/Marker.h>
#include <visualization_msgs/MarkerArray.h>

/*loop clousre*/
float loopClosureFrequency;  //   回环检测频率
// int surroundingKeyframeSize;
float historyKeyframeSearchRadius;    // 回环检测 radius kdtree搜索半径
float historyKeyframeSearchTimeDiff;  //  帧间时间阈值
int historyKeyframeSearchNum;         //   回环时多少个keyframe拼成submap
float historyKeyframeFitnessScore;    // icp 匹配阈值
bool potentialLoopFlag = false;

CloudPtr copy_cloudKeyPoses3D(new CloudType());
CloudPtrPose copy_cloudKeyPoses6D(new CloudTypePose());

ros::Publisher pubHistoryKeyFrames;  //  发布 loop history keyframe submap
ros::Publisher pubIcpKeyFrames;
ros::Publisher pubRecentKeyFrames;
ros::Publisher pubRecentKeyFrame;
ros::Publisher pubCloudRegisteredRaw;
ros::Publisher pubLoopConstraintEdge;

std::map<int, int> loopIndexContainer;  // from new to old
std::vector<std::pair<int, int>> loopIndexQueue;
std::vector<gtsam::Pose3> loopPoseQueue;
// std::vector<gtsam::noiseModel::Diagonal::shared_ptr> loopNoiseQueue;
std::vector<gtsam::noiseModel::Base::shared_ptr> loopNoiseQueue;
std::deque<std_msgs::Float64MultiArray> loopInfoVec;

// 局部关键帧构建的map点云，对应kdtree，用于scan-to-map找相邻点
pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses(new pcl::KdTreeFLANN<PointType>());

// 回环检测三大要素
//  1.设置最小时间差，太近没必要
//  2.控制回环的频率，避免频繁检测，每检测一次，就做一次等待
//  3.根据当前最小距离重新计算等待时间
bool detectLoopClosureDistance(int* latestID, int* closestID) {
    // 当前关键帧帧
    int loopKeyCur = copy_cloudKeyPoses3D->size() - 1;  //  当前关键帧索引
    int loopKeyPre = -1;

    // 当前帧已经添加过闭环对应关系，不再继续添加
    auto it = loopIndexContainer.find(loopKeyCur);
    if (it != loopIndexContainer.end()) return false;
    // 在历史关键帧中查找与当前关键帧距离最近的关键帧集合
    std::vector<int> pointSearchIndLoop;                         //  候选关键帧索引
    std::vector<float> pointSearchSqDisLoop;                     //  候选关键帧距离
    kdtreeHistoryKeyPoses->setInputCloud(copy_cloudKeyPoses3D);  //  历史帧构建kdtree
    kdtreeHistoryKeyPoses->radiusSearch(copy_cloudKeyPoses3D->back(), historyKeyframeSearchRadius, pointSearchIndLoop, pointSearchSqDisLoop, 0);
    // 在候选关键帧集合中，找到与当前帧时间相隔较远的帧，设为候选匹配帧
    for (int i = 0; i < (int)pointSearchIndLoop.size(); ++i) {
        int id = pointSearchIndLoop[i];
        if (abs(copy_cloudKeyPoses6D->points[id].time - lidar_end_time) > historyKeyframeSearchTimeDiff) {
            loopKeyPre = id;
            break;
        }
    }
    if (loopKeyPre == -1 || loopKeyCur == loopKeyPre) return false;
    *latestID = loopKeyCur;
    *closestID = loopKeyPre;

    ROS_INFO("Find loop clousre frame ");
    return true;
}

// 提取key索引的关键帧前后相邻若干帧的关键帧特征点集合，降采样
void loopFindNearKeyframes(CloudPtr& nearKeyframes, const int& key, const int& searchNum) {
    // 提取key索引的关键帧前后相邻若干帧的关键帧特征点集合
    nearKeyframes->clear();
    int cloudSize = copy_cloudKeyPoses6D->size();
    auto surfcloud_keyframes_size = surfCloudKeyFrames.size();
    for (int i = -searchNum; i <= searchNum; ++i) {
        int keyNear = key + i;
        if (keyNear < 0 || keyNear >= cloudSize) continue;

        if (keyNear < 0 || keyNear >= surfcloud_keyframes_size) continue;

        // 注意：cloudKeyPoses6D 存储的是 T_w_b , 而点云是lidar系下的，构建icp的submap时，需要通过外参数T_b_lidar 转换 , 参考pointBodyToWorld 的转换
        *nearKeyframes +=
            *transformPointCloud(surfCloudKeyFrames[keyNear], &copy_cloudKeyPoses6D->points[keyNear]);  //  fast-lio 没有进行特征提取，默认点云就是surf
    }

    if (nearKeyframes->empty()) return;

    // 降采样
    CloudPtr cloud_temp(new CloudType());
    downSizeFilterICP.setInputCloud(nearKeyframes);
    downSizeFilterICP.filter(*cloud_temp);
    *nearKeyframes = *cloud_temp;
}

void performLoopClosure() {
    ros::Time timeLaserInfoStamp = ros::Time().fromSec(lidar_end_time);  //  时间戳
    string odometryFrame = "camera_init";

    if (cloudKeyPoses3D->points.empty() == true) {
        return;
    }

    // 当前关键帧索引，候选闭环匹配帧索引
    int loopKeyCur;
    int loopKeyPre;
    // 在历史关键帧中查找与当前关键帧距离最近的关键帧集合，选择时间相隔较远的一帧作为候选闭环帧
    if (detectLoopClosureDistance(&loopKeyCur, &loopKeyPre) == false) {
        return;
    }

    // 提取
    CloudPtr cureKeyframeCloud(new CloudType());  //  cue keyframe
    CloudPtr prevKeyframeCloud(new CloudType());  //   history keyframe submap
    {
        // 提取当前关键帧特征点集合，降采样
        loopFindNearKeyframes(cureKeyframeCloud, loopKeyCur, 0);  //  将cur keyframe 转换到world系下
        // 提取闭环匹配关键帧前后相邻若干帧的关键帧特征点集合，降采样
        loopFindNearKeyframes(prevKeyframeCloud, loopKeyPre, historyKeyframeSearchNum);  //  选取historyKeyframeSearchNum个keyframe拼成submap
        // 如果特征点较少，返回
        // if (cureKeyframeCloud->size() < 300 || prevKeyframeCloud->size() < 1000)
        //     return;
        // 发布闭环匹配关键帧局部map
        if (pubHistoryKeyFrames.getNumSubscribers() != 0) publishCloud(&pubHistoryKeyFrames, prevKeyframeCloud, timeLaserInfoStamp, odometryFrame);
    }

    // ICP Settings
    pcl::IterativeClosestPoint<PointType, PointType> icp;
    icp.setMaxCorrespondenceDistance(150);  // giseop , use a value can cover 2*historyKeyframeSearchNum range in meter
    icp.setMaximumIterations(100);
    icp.setTransformationEpsilon(1e-6);
    icp.setEuclideanFitnessEpsilon(1e-6);
    icp.setRANSACIterations(0);

    // scan-to-map，调用icp匹配
    icp.setInputSource(cureKeyframeCloud);
    icp.setInputTarget(prevKeyframeCloud);
    CloudPtr unused_result(new CloudType());
    icp.align(*unused_result);

    // 未收敛，或者匹配不够好
    if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore) return;

    std::cout << "icp  success  " << std::endl;

    // 发布当前关键帧经过闭环优化后的位姿变换之后的特征点云
    if (pubIcpKeyFrames.getNumSubscribers() != 0) {
        CloudPtr closed_cloud(new CloudType());
        pcl::transformPointCloud(*cureKeyframeCloud, *closed_cloud, icp.getFinalTransformation());
        publishCloud(&pubIcpKeyFrames, closed_cloud, timeLaserInfoStamp, odometryFrame);
    }

    // 闭环优化得到的当前关键帧与闭环关键帧之间的位姿变换
    float x, y, z, roll, pitch, yaw;
    Eigen::Affine3f correctionLidarFrame;
    correctionLidarFrame = icp.getFinalTransformation();

    // 闭环优化前当前帧位姿
    Eigen::Affine3f tWrong = pclPointToAffine3f(copy_cloudKeyPoses6D->points[loopKeyCur]);
    // 闭环优化后当前帧位姿
    Eigen::Affine3f tCorrect = correctionLidarFrame * tWrong;
    pcl::getTranslationAndEulerAngles(tCorrect, x, y, z, roll, pitch, yaw);  //  获取上一帧 相对 当前帧的 位姿
    gtsam::Pose3 poseFrom = gtsam::Pose3(gtsam::Rot3::RzRyRx(roll, pitch, yaw), gtsam::Point3(x, y, z));
    // 闭环匹配帧的位姿
    gtsam::Pose3 poseTo = pclPointTogtsamPose3(copy_cloudKeyPoses6D->points[loopKeyPre]);
    gtsam::Vector Vector6(6);
    float noiseScore = icp.getFitnessScore();  //  loop_clousre  noise from icp
    Vector6 << noiseScore, noiseScore, noiseScore, noiseScore, noiseScore, noiseScore;
    gtsam::noiseModel::Diagonal::shared_ptr constraintNoise = gtsam::noiseModel::Diagonal::Variances(Vector6);
    std::cout << "loopNoiseQueue   =   " << noiseScore << std::endl;

    // 添加闭环因子需要的数据
    mtx.lock();
    loopIndexQueue.push_back(make_pair(loopKeyCur, loopKeyPre));
    loopPoseQueue.push_back(poseFrom.between(poseTo));
    loopNoiseQueue.push_back(constraintNoise);
    mtx.unlock();

    loopIndexContainer[loopKeyCur] = loopKeyPre;  //   使用hash map 存储回环对
}

// 找到与给定关键帧在一定范围内的所有关键帧，并对其进行下采样
void loopFindNearKeyframesWithRespectTo(CloudPtr& nearKeyframes, const int& key, const int& searchNum, const int _wrt_key) {
    // 提取附近的关键帧
    nearKeyframes->clear();
    int cloudSize = copy_cloudKeyPoses6D->size();    // 获取关键帧云的大小
    for (int i = -searchNum; i <= searchNum; ++i) {  // 在给定关键帧的前后searchNum范围内寻找关键帧
        int keyNear = key + i;
        if (keyNear < 0 || keyNear >= cloudSize)  // 如果keyNear超出范围，则继续下一次循环
            continue;
        // 把找到的关键帧转换到给定的关键帧的坐标系下，并添加到nearKeyframes中
        *nearKeyframes += *transformPointCloud(surfCloudKeyFrames[keyNear], &copy_cloudKeyPoses6D->points[_wrt_key]);
    }

    if (nearKeyframes->empty())  // 如果nearKeyframes为空，则直接返回
        return;

    // 下采样nearKeyframes
    CloudPtr cloud_temp(new CloudType());            // 创建一个临时的点云对象
    downSizeFilterICP.setInputCloud(nearKeyframes);  // 把nearKeyframes设置为输入点云
    downSizeFilterICP.filter(*cloud_temp);           // 对输入点云进行下采样，并保存结果到cloud_temp
    *nearKeyframes = *cloud_temp;                    // 用下采样后的点云替换原始的nearKeyframes
}

void performSCLoopClosure() {
    ros::Time timeLaserInfoStamp = ros::Time().fromSec(lidar_end_time);  //  时间戳
    string odometryFrame = "camera_init";

    if (cloudKeyPoses3D->points.empty() == true) return;

    // find keys
    auto detectResult = scManager.detectLoopClosureID();  // first: nn index, second: yaw diff
    int loopKeyCur = copy_cloudKeyPoses3D->size() - 1;
    ;
    int loopKeyPre = detectResult.first;
    float yawDiffRad = detectResult.second;  // not use for v1 (because pcl icp withi initial somthing wrong...)
    if (loopKeyPre == -1)                    // No loop found
        return;

    std::cout << "SC loop found! between " << loopKeyCur << " and " << loopKeyPre << "." << std::endl;  // giseop

    CloudPtr cureKeyframeCloud(new CloudType());
    CloudPtr prevKeyframeCloud(new CloudType());
    {
        int base_key = 0;
        // 实际上是将相邻histNum叠加在一起去配准，
        loopFindNearKeyframesWithRespectTo(cureKeyframeCloud, loopKeyCur, 0, base_key);                         // giseop
        loopFindNearKeyframesWithRespectTo(prevKeyframeCloud, loopKeyPre, historyKeyframeSearchNum, base_key);  // giseop
        // 如果不叠加，getFitnessScore分数很高，根本上不去
        // loopFindNearKeyframes(cureKeyframeCloud, loopKeyCur, 2);                         // giseop
        // loopFindNearKeyframes(prevKeyframeCloud, loopKeyPre, historyKeyframeSearchNum);  // giseop

        if (cureKeyframeCloud->size() < 300 || prevKeyframeCloud->size() < 1000) return;
        if (pubHistoryKeyFrames.getNumSubscribers() != 0) publishCloud(&pubHistoryKeyFrames, prevKeyframeCloud, timeLaserInfoStamp, odometryFrame);
    }

    // ICP Settings
    static pcl::IterativeClosestPoint<PointType, PointType> icp;
    icp.setMaxCorrespondenceDistance(150);  // giseop , use a value can cover 2*historyKeyframeSearchNum range in meter
    icp.setMaximumIterations(100);
    icp.setTransformationEpsilon(1e-6);
    icp.setEuclideanFitnessEpsilon(1e-6);
    icp.setRANSACIterations(0);

    // Align clouds
    icp.setInputSource(cureKeyframeCloud);
    icp.setInputTarget(prevKeyframeCloud);
    CloudPtr unused_result(new CloudType());
    icp.align(*unused_result);
    // giseop
    // TODO icp align with initial

    if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore) {
        std::cout << "ICP fitness failed (" << icp.getFitnessScore() << " > " << historyKeyframeFitnessScore << "). Reject" << std::endl;
        return;
    } else {
        std::cout << "ICP fitness passed (" << icp.getFitnessScore() << " < " << historyKeyframeFitnessScore << "). Add" << std::endl;
    }

    // publish corrected cloud
    if (pubIcpKeyFrames.getNumSubscribers() != 0) {
        CloudPtr closed_cloud(new CloudType());
        pcl::transformPointCloud(*cureKeyframeCloud, *closed_cloud, icp.getFinalTransformation());
        publishCloud(&pubIcpKeyFrames, closed_cloud, timeLaserInfoStamp, odometryFrame);
    }

    // Get pose transformation
    float x, y, z, roll, pitch, yaw;
    Eigen::Affine3f correctionLidarFrame;
    correctionLidarFrame = icp.getFinalTransformation();

    // giseop
    pcl::getTranslationAndEulerAngles(correctionLidarFrame, x, y, z, roll, pitch, yaw);
    gtsam::Pose3 poseFrom = gtsam::Pose3(gtsam::Rot3::RzRyRx(roll, pitch, yaw), gtsam::Point3(x, y, z));
    gtsam::Pose3 poseTo = gtsam::Pose3(gtsam::Rot3::RzRyRx(0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, 0.0));

    // giseop, robust kernel for a SC loop
    float robustNoiseScore = 0.5;  // constant is ok...
    gtsam::Vector robustNoiseVector6(6);
    robustNoiseVector6 << robustNoiseScore, robustNoiseScore, robustNoiseScore, robustNoiseScore, robustNoiseScore, robustNoiseScore;
    gtsam::noiseModel::Base::shared_ptr robustConstraintNoise;
    robustConstraintNoise = gtsam::noiseModel::Robust::Create(
        gtsam::noiseModel::mEstimator::Cauchy::Create(
            1),  // optional: replacing Cauchy by DCS or GemanMcClure, but with a good front-end loop detector, Cauchy is empirically enough.
        gtsam::noiseModel::Diagonal::Variances(robustNoiseVector6));  // - checked it works. but with robust kernel, map modification may be delayed (i.e,.
                                                                      // requires more true-positive loop factors)

    // Add pose constraint
    mtx.lock();
    loopIndexQueue.push_back(make_pair(loopKeyCur, loopKeyPre));
    loopPoseQueue.push_back(poseFrom.between(poseTo));
    loopNoiseQueue.push_back(robustConstraintNoise);
    mtx.unlock();

    // add loop constriant
    loopIndexContainer.insert(std::pair<int, int>(loopKeyCur, loopKeyPre));  // giseop for multimap
}

// 回环检测线程
void loopClosureThread() {
    if (EN_LOOP) {
        ros::Rate rate(loopClosureFrequency);  //   回环频率
        while (ros::ok() && startFlag) {
            rate.sleep();

            mtx.lock();
            *copy_cloudKeyPoses3D = *cloudKeyPoses3D;
            *copy_cloudKeyPoses6D = *cloudKeyPoses6D;
            mtx.unlock();

            // performLoopClosure();    //  回环检测
            performSCLoopClosure();
            visualizeLoopClosure();  // rviz展示闭环边
        }
    }
}

// 添加闭环因子
void addLoopFactor() {
    if (loopIndexQueue.empty()) return;

    // 闭环队列包含多个闭环候选对，每个对应两个索引值：indexFrom和indexTo，分别表示当前帧（cur）和先前帧（pre）的索引
    for (int i = 0; i < (int)loopIndexQueue.size(); ++i) {
        // 闭环边对应两帧的索引
        int indexFrom = loopIndexQueue[i].first;  //   cur
        int indexTo = loopIndexQueue[i].second;   //    pre
        // 闭环边的位姿变换，表示从当前帧到先前帧的相对位姿变换
        gtsam::Pose3 poseBetween = loopPoseQueue[i];
        auto noiseBetween = loopNoiseQueue[i];  // 闭环边的噪声模型noiseBetween  gtsam::noiseModel::Diagonal::shared_ptr
        gtSAMgraph.add(gtsam::BetweenFactor<gtsam::Pose3>(indexFrom, indexTo, poseBetween, noiseBetween));
    }
    // 处理完所有的闭环候选对后，进行清理
    loopIndexQueue.clear();
    loopPoseQueue.clear();
    loopNoiseQueue.clear();
    aLoopIsClosed = true;
}

// rviz展示闭环边
void visualizeLoopClosure() {
    ros::Time timeLaserInfoStamp = ros::Time().fromSec(lidar_end_time);  //  时间戳
    string odometryFrame = "camera_init";

    if (loopIndexContainer.empty()) return;

    visualization_msgs::MarkerArray markerArray;
    // 闭环顶点
    visualization_msgs::Marker markerNode;
    markerNode.header.frame_id = odometryFrame;
    markerNode.header.stamp = timeLaserInfoStamp;
    markerNode.action = visualization_msgs::Marker::ADD;
    markerNode.type = visualization_msgs::Marker::SPHERE_LIST;
    markerNode.ns = "loop_nodes";
    markerNode.id = 0;
    markerNode.pose.orientation.w = 1;
    markerNode.scale.x = 0.3;
    markerNode.scale.y = 0.3;
    markerNode.scale.z = 0.3;
    markerNode.color.r = 0;
    markerNode.color.g = 0.8;
    markerNode.color.b = 1;
    markerNode.color.a = 1;
    // 闭环边
    visualization_msgs::Marker markerEdge;
    markerEdge.header.frame_id = odometryFrame;
    markerEdge.header.stamp = timeLaserInfoStamp;
    markerEdge.action = visualization_msgs::Marker::ADD;
    markerEdge.type = visualization_msgs::Marker::LINE_LIST;
    markerEdge.ns = "loop_edges";
    markerEdge.id = 1;
    markerEdge.pose.orientation.w = 1;
    markerEdge.scale.x = 0.1;
    markerEdge.color.r = 0.9;
    markerEdge.color.g = 0.9;
    markerEdge.color.b = 0;
    markerEdge.color.a = 1;

    // 遍历闭环
    for (auto it = loopIndexContainer.begin(); it != loopIndexContainer.end(); ++it) {
        int key_cur = it->first;
        int key_pre = it->second;
        geometry_msgs::Point p;
        p.x = copy_cloudKeyPoses6D->points[key_cur].x;
        p.y = copy_cloudKeyPoses6D->points[key_cur].y;
        p.z = copy_cloudKeyPoses6D->points[key_cur].z;
        markerNode.points.push_back(p);
        markerEdge.points.push_back(p);
        p.x = copy_cloudKeyPoses6D->points[key_pre].x;
        p.y = copy_cloudKeyPoses6D->points[key_pre].y;
        p.z = copy_cloudKeyPoses6D->points[key_pre].z;
        markerNode.points.push_back(p);
        markerEdge.points.push_back(p);
    }

    markerArray.markers.push_back(markerNode);
    markerArray.markers.push_back(markerEdge);
    pubLoopConstraintEdge.publish(markerArray);
}

void LoadParamsLoop(ros::NodeHandle& nh) {
    // loop clousre
    nh.param<float>("loopClosureFrequency", loopClosureFrequency, 4.0);
    // nh.param<int>("surroundingKeyframeSize", surroundingKeyframeSize, 50);
    nh.param<float>("historyKeyframeSearchRadius", historyKeyframeSearchRadius, 1.5);
    nh.param<float>("historyKeyframeSearchTimeDiff", historyKeyframeSearchTimeDiff, 30.0);
    nh.param<int>("historyKeyframeSearchNum", historyKeyframeSearchNum, 20);
    nh.param<float>("historyKeyframeFitnessScore", historyKeyframeFitnessScore, 0.3);

    // loop clousre
    // 发布闭环匹配关键帧局部map
    pubHistoryKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("/loop_closure/icp_history_cloud", 1);
    // 发布当前关键帧经过闭环优化后的位姿变换之后的特征点云
    pubIcpKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("/loop_closure/icp_corrected_cloud", 1);
    // 发布闭环边，rviz中表现为闭环帧之间的连线
    pubLoopConstraintEdge = nh.advertise<visualization_msgs::MarkerArray>("/loop_closure/constraints", 1);
}
