#include "cartographer/mapping/internal/2d/local_trajectory_builder_2d.h"
#include <limits>
#include <memory>
#include "absl/memory/memory.h"
#include "cartographer/metrics/family_factory.h"
#include "cartographer/sensor/range_data.h"

namespace cartographer
{
    namespace mapping
    {
        typedef std::unique_ptr<LocalTrajectoryBuilder2D::InsertionResult> Local2DInsertResPtr;
        typedef std::unique_ptr<LocalTrajectoryBuilder2D::MatchingResult> Local2DMatchResPtr;

        static auto *kLocalSlamLatencyMetric = metrics::Gauge::Null();
        static auto *kLocalSlamRealTimeRatio = metrics::Gauge::Null();
        static auto *kLocalSlamCpuRealTimeRatio = metrics::Gauge::Null();
        static auto *kRealTimeCorrelativeScanMatcherScoreMetric = metrics::Histogram::Null();
        static auto *kCeresScanMatcherCostMetric = metrics::Histogram::Null();
        static auto *kScanMatcherResidualDistanceMetric = metrics::Histogram::Null();
        static auto *kScanMatcherResidualAngleMetric = metrics::Histogram::Null();

        LocalTrajectoryBuilder2D::LocalTrajectoryBuilder2D(
            const proto::LocalTrajectoryBuilderOptions2D &options,
            const std::vector<std::string> &expected_range_sensor_ids)
            : options_(options),
              active_submaps_(options.submaps_options()),
              motion_filter_(options_.motion_filter_options()),
              range_data_collator_(expected_range_sensor_ids),
              ceres_scan_matcher_(options_.ceres_scan_matcher_options()),
              real_time_correlative_scan_matcher_(options_.real_time_correlative_scan_matcher_options())
        {
        }

        LocalTrajectoryBuilder2D::~LocalTrajectoryBuilder2D()
        {
        }

        sensor::RangeData LocalTrajectoryBuilder2D::TransformToGravityAlignedFrameAndFilter(
            const transform::Rigid3f &transform_to_gravity_aligned_frame,
            const sensor::RangeData &range_data) const
        {
            const auto &trans_range_data = sensor::TransformRangeData(range_data, transform_to_gravity_aligned_frame);
            const sensor::RangeData cropped = sensor::CropRangeData(trans_range_data, options_.min_z(), options_.max_z());
            return sensor::RangeData{cropped.origin,
                                     sensor::VoxelFilter(cropped.returns, options_.voxel_filter_size()),
                                     sensor::VoxelFilter(cropped.misses, options_.voxel_filter_size())};
        }

        Pose2dPtr LocalTrajectoryBuilder2D::ScanMatch(const common::Time time,
                                                      const transform::Rigid2d &pose_prediction,
                                                      const sensor::PointCloud &filtered_aligned_cloud)
        {
            if (active_submaps_.submaps().empty())
            {
                return absl::make_unique<transform::Rigid2d>(pose_prediction);
            }
            // 获得一张最新的子图
            auto &matching_submap = active_submaps_.submaps().front();
            if (!matching_submap || !matching_submap->grid())
            {
                LOG(INFO) << "ScanMatch unknow error";
                return absl::make_unique<transform::Rigid2d>(pose_prediction);
            }
            // The online correlative scan matcher will refine the initial estimate for the Ceres scan matcher.
            transform::Rigid2d initial_ceres_pose = pose_prediction; // 临时变量记录初始位姿
            if (options_.use_online_correlative_scan_matching())
            {
                // 在(x,y,yaw)三个维度以一定的步长暴力匹配来对initial_ceres_pose进行初步的校正
                const double score = real_time_correlative_scan_matcher_.Match(
                    pose_prediction, filtered_aligned_cloud,
                    *matching_submap->grid(), &initial_ceres_pose);
                kRealTimeCorrelativeScanMatcherScoreMetric->Observe(score);
            }
            auto pose_observation = absl::make_unique<transform::Rigid2d>();
            ceres::Solver::Summary summary;
            // 通过对象ceres_scan_matcher_把扫描匹配问题描述成一个最小二乘问题，使用优化库Ceres库寻优，
            // 得到一个使得观测数据出现概率最大化的位姿估计，位姿估计值记录在pose_observatio中
            ceres_scan_matcher_.Match(pose_prediction.translation(), // 优化的目标位置,推断器或者经过相关性扫描匹配优化后的位置
                                      initial_ceres_pose,            // 初始位姿，推断器推断出来的位置，没有经过相关性扫描匹配
                                      filtered_aligned_cloud,        // 基于local坐标系，但是经过重力矫正的点云数据。
                                      *matching_submap->grid(),      // 用于匹配的子图，其存储了栅格信息
                                      pose_observation.get(),        // 用于存储优化过后的位姿
                                      &summary);                     // 用于优化过程信息的记录
            if (pose_observation)
            {
                kCeresScanMatcherCostMetric->Observe(summary.final_cost);
                const double residual_distance = (pose_observation->translation() - pose_prediction.translation()).norm();
                kScanMatcherResidualDistanceMetric->Observe(residual_distance);
                const double residual_angle = std::abs(pose_observation->rotation().angle() - pose_prediction.rotation().angle());
                kScanMatcherResidualAngleMetric->Observe(residual_angle);
            }
            return pose_observation;
        }

        /*
         * 输入的点云坐标原点已转到tracking frame. 由于可能会有多个雷达传感器数据，这里第一步便是把多个雷达传感器的数据变成一份点云数据
         * 如果使用IMU数据，必须先用IMU数据初始化extrapolator_，然后再处理点云和ODOM数据
         */
        Local2DMatchResPtr LocalTrajectoryBuilder2D::AddRangeData(const std::string &sensor_id,
                                                                  const sensor::TimedPointCloudData &unsynchronized_data)
        {
            LOG(INFO) << "AddRangeData\n";
            // 把输入的索引和未同步的数据记录在数据收集器range_data_collator_中，得到一个做了时间同步的扫描数据
            auto synchronized_data = range_data_collator_.AddRangeData(sensor_id, unsynchronized_data);
            if (synchronized_data.ranges.empty())
            {
                LOG(INFO) << "Range data collator filling buffer.";
                return nullptr;
            }
            const common::Time &time = synchronized_data.time;
            if (!options_.use_imu_data())
            {
                // 如配置不使用IMU估计位姿，需要根据扫描匹配的结果估计位姿。如使用IMU进行估计，那么将在接收到IMU数据
                // 之后开始构造位姿估计器
                InitializeExtrapolator(time);
            }
            if (extrapolator_ == nullptr)
            {
                // Until we've initialized the extrapolator with our first IMU message, we
                // cannot compute the orientation of the rangefinder.
                LOG(INFO) << "Extrapolator not yet initialized.";
                return nullptr;
            }
            CHECK(!synchronized_data.ranges.empty());
            // TODO(gaschler): Check if this can strictly be 0.
            // 检查扫描数据的时间确保最后一个数据的时间偏移量大于等于0
            CHECK_LE(synchronized_data.ranges.back().point_time.time, 0.f);
            const common::Time time_first_point = time + common::FromSeconds(synchronized_data.ranges.front().point_time.time);
            if (time_first_point < extrapolator_->GetLastPoseTime())
            {
                LOG(INFO) << "Extrapolator is still initializing.";
                return nullptr;
            }
            // 位姿估计器已完成初始化
            std::vector<transform::Rigid3f> range_data_poses;
            range_data_poses.reserve(synchronized_data.ranges.size());
            bool warned = false;
            for (const auto &range : synchronized_data.ranges)
            {
                const auto timeLastExtra = extrapolator_->GetLastExtrapolatedTime();
                common::Time timePoint = time + common::FromSeconds(range.point_time.time);
                if (timePoint < timeLastExtra)
                {
                    if (!warned)
                    {
                        LOG(ERROR) << "Timestamp jumps backwards from " << timeLastExtra << " to " << timePoint;
                        warned = true;
                    }
                    timePoint = timeLastExtra;
                }
                // 根据每个雷达点的时间估计机器人位姿，注意这里range_data_poses存储的不是雷达点的位姿，而是这个时间对
                // 应的机器人的位姿，目的是为了下面消除运动畸变
                range_data_poses.push_back(extrapolator_->ExtrapolatePose(timePoint).cast<float>());
            }
            if (num_accumulated_ == 0)
            {
                // 'accumulated_range_data_.origin' is uninitialized until the last accumulation.
                accumulated_range_data_ = sensor::RangeData{{}, {}, {}};
            }
            // Drop any returns below the minimum range and convert returns beyond the maximum range into misses.
            for (size_t i = 0; i < synchronized_data.ranges.size(); ++i)
            {
                const sensor::TimedRangefinderPoint &hit = synchronized_data.ranges[i].point_time;
                // 当前雷达点所在点云的原点坐标乘以此时局部坐标系里的机器人位姿就得到当前点云原点在局部坐标系的坐标
                // 注意这里点云的origin是tracking frame里的坐标
                const Eigen::Vector3f origin_in_local = range_data_poses[i] * synchronized_data.origins.at(synchronized_data.ranges[i].origin_index);
                // 根据点云原点坐标乘以雷达点的相对坐标得到雷达点在局部坐标系中的坐标
                sensor::RangefinderPoint hit_in_local = range_data_poses[i] * sensor::ToRangefinderPoint(hit);
                // 重新计算雷达点到原点的距离，即消除运动畸变后的距离
                const Eigen::Vector3f delta = hit_in_local.position - origin_in_local;
                const float range = delta.norm();
                if (range >= options_.min_range())
                {
                    if (range <= options_.max_range())
                    {
                        accumulated_range_data_.returns.push_back(hit_in_local);
                    }
                    else
                    {
                        // 这个障碍物点距离超过了最大距离限制，那么在这个雷达点到原点的这个线段中间取一个点作为miss点
                        hit_in_local.position = origin_in_local + options_.missing_data_ray_length() / range * delta;
                        accumulated_range_data_.misses.push_back(hit_in_local);
                    }
                }
            }
            ++num_accumulated_;
            // 此时accumulated_range_data_里面的点坐标都是局部坐标系里的独立的三维坐标
            if (num_accumulated_ >= options_.num_accumulated_range_data())
            {
                const common::Time current_sensor_time = synchronized_data.time;
                absl::optional<common::Duration> sensor_duration;
                if (last_sensor_time_.has_value())
                {
                    sensor_duration = current_sensor_time - last_sensor_time_.value();
                }
                num_accumulated_ = 0;
                last_sensor_time_ = current_sensor_time;
                const transform::Rigid3d gravity_alignment = transform::Rigid3d::Rotation(extrapolator_->EstimateGravityOrientation(time));
                // TODO(gaschler): This assumes that 'range_data_poses.back()' is at time 'time'.
                accumulated_range_data_.origin = range_data_poses.back().translation();
                const auto &transform = gravity_alignment.cast<float>() * range_data_poses.back().inverse();
                const auto &aligned_range_data = TransformToGravityAlignedFrameAndFilter(transform, accumulated_range_data_);
                return AddAccumulatedRangeData(time, aligned_range_data, gravity_alignment, sensor_duration);
            }
            return nullptr;
        }

        Local2DMatchResPtr LocalTrajectoryBuilder2D::AddAccumulatedRangeData(
            const common::Time time,
            const sensor::RangeData &gravity_aligned_range_data,
            const transform::Rigid3d &gravity_alignment,
            const absl::optional<common::Duration> &sensor_duration)
        {
            if (gravity_aligned_range_data.returns.empty())
            {
                LOG(WARNING) << "Dropped empty horizontal range data.";
                return nullptr;
            }
            // Computes a gravity aligned pose prediction. 从位姿推理器获得6自由度的位姿
            const transform::Rigid3d pose_predict_org = extrapolator_->ExtrapolateRobotPose(time); // extrapolator_->ExtrapolatePose(time)
            // 把上面的6自由度位姿投影成3自由度的位姿。作为scan match 的初值。进行重力对齐后，可认为机器人是在垂直的重力的水平面运动
            // 则只有x,y，yaw变换，这样需要优化的变量减少。
            const transform::Rigid2d pose_prediction = transform::Project2D(pose_predict_org * gravity_alignment.inverse());
            const sensor::PointCloud &filtered_aligned_cloud = sensor::AdaptiveVoxelFilter(gravity_aligned_range_data.returns, options_.adaptive_voxel_filter_options());
            if (filtered_aligned_cloud.empty())
            {
                return nullptr;
            }
            // local map frame <- gravity-aligned frame
            std::unique_ptr<transform::Rigid2d> pose_estimate_2d = ScanMatch(time, pose_prediction, filtered_aligned_cloud);
            if (pose_estimate_2d == nullptr)
            {
                LOG(WARNING) << "Scan matching failed.";
                return nullptr;
            }
            // 位姿重新转换成6自由度
            const transform::Rigid3d pose_estimate = transform::Embed3D(*pose_estimate_2d) * gravity_alignment;
            extrapolator_->AddPose(time, pose_estimate);
            // 将扫描数据转换到新的位姿下，即Submap坐标系。
            sensor::RangeData range_data_in_local = TransformRangeData(gravity_aligned_range_data, transform::Embed3D(pose_estimate_2d->cast<float>()));
            // 将转换后的扫描数据插入子图中，返回插入结果。把雷达点插入到当前地图中本质是对栅格地图概率的更新。
            std::unique_ptr<InsertionResult> insertion_result = InsertIntoSubmap(
                time, range_data_in_local, filtered_aligned_cloud,
                pose_estimate, gravity_alignment.rotation());
            const auto wall_time = std::chrono::steady_clock::now();
            if (last_wall_time_.has_value())
            {
                const auto wall_time_duration = wall_time - last_wall_time_.value();
                kLocalSlamLatencyMetric->Set(common::ToSeconds(wall_time_duration));
                if (sensor_duration.has_value())
                {
                    kLocalSlamRealTimeRatio->Set(common::ToSeconds(sensor_duration.value()) / common::ToSeconds(wall_time_duration));
                }
            }
            const double thread_cpu_time_seconds = common::GetThreadCpuTimeSeconds();
            if (last_thread_cpu_time_seconds_.has_value())
            {
                const double thread_cpu_duration_seconds = thread_cpu_time_seconds - last_thread_cpu_time_seconds_.value();
                if (sensor_duration.has_value())
                {
                    kLocalSlamCpuRealTimeRatio->Set(common::ToSeconds(sensor_duration.value()) / thread_cpu_duration_seconds); // 监控延时
                }
            }
            last_wall_time_ = wall_time;
            last_thread_cpu_time_seconds_ = thread_cpu_time_seconds;
            return absl::make_unique<MatchingResult>(MatchingResult{time, pose_estimate, std::move(range_data_in_local),
                                                                    std::move(insertion_result)});
        }

        /*
         * range_data_in_local 世界坐标系下的点云，仅这个数据被加到submap里，其他参数被包装到InsertionResult里并返回
         * 插入地图(即地图概率更新)逻辑:
         *   更新子图中所有栅格的概率，一般用对数Odd(x)进行相乘或者Log(Odd(x))相加减，当地图较大时，大量的乘法计算会影响
         *   地图更新效率;为提升效率，其他大部分SLAM采用Log(Odd(x))相加减的方式;carto采用以空间换时间的策略，建立地图更
         *   新查找表，更新过程变成了查表操作，连加减法都省了，即牺牲内存空间，节约时间;
         * 查表逻辑:
         *   将[0.1,0.9]的概率映射为范围[1, 32767]内的整数，所谓的查找其实就适合一个线性数组，数组的索引是[1, 32767]内
         *   的整数，对应的数组值就是下一个概率值;一个点有hit和miss两种状态，一个格子被hit或者miss其概率变化是已知且固定的;
         *   则有用于hit和miss的两个查找表(数组),然后把所有的概率变化都提前计算好;
         * 查找举例
         *   一个格子初始时是未知的，概率为0，hit一次概率加1，那么查找表里面索引为0的值为1;hit两次概率为2, 则查找表索引为1
         *   的值为2;
         */
        Local2DInsertResPtr LocalTrajectoryBuilder2D::InsertIntoSubmap(
            const common::Time time,
            const sensor::RangeData &range_data_in_local,
            const sensor::PointCloud &filtered_aligned_cloud, // 滤波后的原始点云
            const transform::Rigid3d &pose_estimate,          // 世界坐标下的机器人位姿
            const Eigen::Quaterniond &gravity_alignment /*重力加速度旋转向量*/)
        {
            if (motion_filter_.IsSimilar(time, pose_estimate))
            {
                // 运动滤波器，当两帧位置相差太小(认为机器人几乎没有运动)，则抛弃
                return nullptr;
            }
            // 调用submap封装类， 执行插入新的激光数据， 即submap更新，返回的是更新和插入后的submap2d
            auto insertion_submaps = active_submaps_.InsertRangeData(range_data_in_local);
            return absl::make_unique<InsertionResult>(InsertionResult{
                std::make_shared<const TrajectoryNode::Data>(TrajectoryNode::Data{
                    time,
                    gravity_alignment,
                    filtered_aligned_cloud,
                    {}, // 'high_resolution_point_cloud' is only used in 3D.
                    {}, // 'low_resolution_point_cloud' is only used in 3D.
                    {}, // 'rotational_scan_matcher_histogram' is only used in 3D.
                    pose_estimate}),
                std::move(insertion_submaps)});
        }

        void LocalTrajectoryBuilder2D::AddImuData(const sensor::ImuData &imu_data)
        {
            CHECK(options_.use_imu_data()) << "An unexpected IMU packet was added.";
            LOG(INFO) << "AddImuData\n";
            InitializeExtrapolator(imu_data.time);
            extrapolator_->AddImuData(imu_data);
        }

        void LocalTrajectoryBuilder2D::AddOdometryData(const sensor::OdometryData &odometry_data)
        {
            if (extrapolator_ == nullptr)
            {
                // Until we've initialized the extrapolator we cannot add odometry data.
                LOG(INFO) << "Extrapolator not yet initialized.";
                return;
            }
            extrapolator_->AddOdometryData(odometry_data);
        }

        void LocalTrajectoryBuilder2D::InitializeExtrapolator(const common::Time time)
        {
            if (extrapolator_ != nullptr)
                return;
            const auto &pose_option = options_.pose_extrapolator_options();
            CHECK(!pose_option.use_imu_based());
            double queue_duration = pose_option.constant_velocity().pose_queue_duration();
            double imu_gravity_time_const = pose_option.constant_velocity().imu_gravity_time_constant();
            LOG(INFO) << "init extrapolator2\n";
            // LOG(INFO) << "init extrapolator by imu. queue duration " << queue_duration << " gravity_time " << imu_gravity_time_const << "\n";
            LOG(INFO) << "init extrapolator0\n";
            extrapolator_ = absl::make_unique<PoseExtrapolator>(common::FromSeconds(queue_duration), imu_gravity_time_const);
            extrapolator_->AddPose(time, transform::Rigid3d::Identity());
            LOG(INFO) << "init extrapolator1\n";
        }

        void LocalTrajectoryBuilder2D::RegisterMetrics(metrics::FamilyFactory *family_factory)
        {
            auto *latency = family_factory->NewGaugeFamily(
                "mapping_2d_local_trajectory_builder_latency",
                "Duration from first incoming point cloud in accumulation to local slam "
                "result");
            kLocalSlamLatencyMetric = latency->Add({});
            auto *real_time_ratio = family_factory->NewGaugeFamily(
                "mapping_2d_local_trajectory_builder_real_time_ratio",
                "sensor duration / wall clock duration.");
            kLocalSlamRealTimeRatio = real_time_ratio->Add({});

            auto *cpu_real_time_ratio = family_factory->NewGaugeFamily(
                "mapping_2d_local_trajectory_builder_cpu_real_time_ratio",
                "sensor duration / cpu duration.");
            kLocalSlamCpuRealTimeRatio = cpu_real_time_ratio->Add({});
            auto score_boundaries = metrics::Histogram::FixedWidth(0.05, 20);
            auto *scores = family_factory->NewHistogramFamily(
                "mapping_2d_local_trajectory_builder_scores", "Local scan matcher scores",
                score_boundaries);
            kRealTimeCorrelativeScanMatcherScoreMetric = scores->Add({{"scan_matcher", "real_time_correlative"}});
            auto cost_boundaries = metrics::Histogram::ScaledPowersOf(2, 0.01, 100);
            auto *costs = family_factory->NewHistogramFamily(
                "mapping_2d_local_trajectory_builder_costs", "Local scan matcher costs",
                cost_boundaries);
            kCeresScanMatcherCostMetric = costs->Add({{"scan_matcher", "ceres"}});
            auto distance_boundaries = metrics::Histogram::ScaledPowersOf(2, 0.01, 10);
            auto *residuals = family_factory->NewHistogramFamily(
                "mapping_2d_local_trajectory_builder_residuals",
                "Local scan matcher residuals", distance_boundaries);
            kScanMatcherResidualDistanceMetric = residuals->Add({{"component", "distance"}});
            kScanMatcherResidualAngleMetric = residuals->Add({{"component", "angle"}});
        }
    } // namespace mapping
} // namespace cartographer
