#include "cartographer/mapping/internal/2d/local_trajectory_builder_2d.h"
#include <limits>
#include <memory>
#include "absl/memory/memory.h"
#include "cartographer/metrics/family_factory.h"
#include "cartographer/sensor/range_data.h"
#include "cartographer/mapping/internal/2d/scan_matching/adaptive_matcher_angle_2d.h"
#include "cartographer/mapping/internal/2d/scan_matching/adaptive_scan_matcher_2d.h"
#include "cartographer/rcm/time.h"

using namespace cartographer::mapping::scan_matching;

namespace cartographer
{
    namespace mapping
    {
        typedef std::unique_ptr<LocalTrajectoryBuilder2D::MatchingResult> MatchResultPtr2D;
        typedef std::unique_ptr<LocalTrajectoryBuilder2D::InsertionResult> InsertResultPtr2D;

        static auto *kLocalSlamLatencyMetric = metrics::Gauge::Null();
        static auto *kLocalSlamRealTimeRatio = metrics::Gauge::Null();
        static auto *kLocalSlamCpuRealTimeRatio = metrics::Gauge::Null();
        static auto *kRealTimeCorrelativeScanMatcherScoreMetric = metrics::Histogram::Null();
        static auto *kCeresScanMatcherCostMetric = metrics::Histogram::Null();
        static auto *kScanMatcherResidualDistanceMetric = metrics::Histogram::Null();
        static auto *kScanMatcherResidualAngleMetric = metrics::Histogram::Null();

        LocalTrajectoryBuilder2D::LocalTrajectoryBuilder2D(const proto::LocalTrajectoryBuilderOptions2D &options,
                                                           const std::vector<std::string> &expected_range_sensor_ids)
            : options_(options)
            , active_submaps_(options.submaps_options())
            , motion_filter_(options_.motion_filter_options())
            //, correlative_matcher_(options_.real_time_correlative_scan_matcher_options())
            , ceres_scan_matcher_(options_.ceres_scan_matcher_options())
            , range_data_collator_(expected_range_sensor_ids)
        {
            dynamic_detector_ = std::make_shared<MultiDynamicDetector>();
            const auto &match_options = options_.real_time_correlative_scan_matcher_options();
            // correlative_matcher_ = std::make_shared<RealTimeCorrelativeScanMatcher2D>(match_options);
            correlative_matcher_ = std::make_shared<AdaptiveMatcherAngle2D>(match_options);
            // correlative_matcher_ = std::make_shared<AdaptiveScanMatcher2D>(match_options);
        }

        LocalTrajectoryBuilder2D::~LocalTrajectoryBuilder2D()
        {
        }

        sensor::RangeData LocalTrajectoryBuilder2D::GravityAlignedAndFilter(const transform::Rigid3f &transform_to_gravity,
                                                                            const sensor::RangeData &range_data) const
        {
            const sensor::RangeData cropped =
                sensor::CropRangeData(sensor::TransformRangeData(range_data, transform_to_gravity), options_.min_z(), options_.max_z());
            return sensor::RangeData{cropped.origin, sensor::VoxelFilter(cropped.returns, options_.voxel_filter_size()),
                                     sensor::VoxelFilter(cropped.misses, options_.voxel_filter_size())};
        }

        MatchResultPtr2D LocalTrajectoryBuilder2D::AddRangeData(const std::string &sensor_id,
                                                                const sensor::TimedPointCloudData &unsynchronized_data)
        {
            // 裁剪合并多个雷达传感器数据
            auto synchronized_data = range_data_collator_.AddRangeData(sensor_id, unsynchronized_data);
            const auto &time = synchronized_data.time;
            // LOG(INFO) << "[tmp] AddRangeData " << time;
            if (synchronized_data.ranges.empty())
            {
                LOG(INFO) << "range data collator filling buffer.";
                return nullptr;
            }
            // 位姿外推器是前端前置条件，必须先初始化
            if (!options_.use_imu_data())
            {
                // 不使用IMU数据，则只能根据前端输出的两帧位姿求线速和角速
                InitializeExtrapolator(time);
            }
            if (extrapolator_ == nullptr)
            {
                // 使用IMU数据时，位姿外推器由IMU数据处理函数来初始化，执行到这里说明点云数据比IMU数据先到达
                LOG(INFO) << "Extrapolator not yet initialized.";
                return nullptr;
            }
            // 再次确认点云数据不为空，上面已判空
            CHECK(!synchronized_data.ranges.empty());
            // 点云数据的结束点时间应该为0，因为carto的点云时间戳都是基于结束点时间的偏移量(均为负值)
            CHECK_LE(synchronized_data.ranges.back().point_time.time, 0.f);
            // 最后一次插值时间
            const auto lastPoseTime = extrapolator_->GetLastPoseTime();
            // time是最后一个点对应的时刻，point_time.time都是负值，表示相对于结束点时间的偏差;
            const auto timeFirstPoint = time + common::FromSeconds(synchronized_data.ranges.front().point_time.time);
            // 插值器是根据时间往后推算的，所以点的时间必须大于插值时间
            if (timeFirstPoint < lastPoseTime)
            {
                LOG(INFO) << "extrapolator initializing. " << time << " " << timeFirstPoint << " " << lastPoseTime;
                return nullptr;
            }
            // 通过时间插值记录每帧点云的位姿(注意是点云结束点的位姿，不是点云原点位姿，也不是每个点的位姿，因为点云时间是结束点的时间)
            bool warned = false;
            std::vector<transform::Rigid3f> range_data_poses;
            range_data_poses.reserve(synchronized_data.ranges.size());
            for (const auto &range : synchronized_data.ranges)
            {
                common::Time time_point = time + common::FromSeconds(range.point_time.time);
                // 插值器是根据时间往后推算的，所以点的时间必须大于插值时间
                if (time_point < extrapolator_->GetLastExtrapolatedTime())
                {
                    if (!warned)
                    {
                        LOG(ERROR) << "Timestamp of individual range data point jumps backwards from "
                                   << extrapolator_->GetLastExtrapolatedTime() << " to " << time_point;
                        warned = true;
                    }
                    time_point = extrapolator_->GetLastExtrapolatedTime();
                }
                range_data_poses.push_back(extrapolator_->ExtrapolatePose(time_point).cast<float>());
            }
            if (num_accumulated_ == 0)
            {
                // 存在一个问题：上一次处理的点云数据延后释放
                accumulated_range_data_ = sensor::RangeData{{}, {}, {}};
            }
            // 这里是对多帧点云遍历，上面已经对点云结束点的坐标进行插值，这里先求点云原点坐标，基于此对每个点坐标进行插值以消除运动畸变
            for (size_t i = 0; i < synchronized_data.ranges.size(); ++i)
            {
                // 这是当前点对应的带时间的point
                const auto &hitPointTimed = synchronized_data.ranges[i].point_time;
                // 该点所在点云帧的原点坐标(相对坐标),因为可能是由多帧点云合并在一起的，而carto的点云原点是最后一个点，所以原点坐标
                // 不一定是0
                const auto &originOfHit = synchronized_data.origins.at(synchronized_data.ranges[i].origin_index);
                // 这里计算的是原点在局部系的坐标，注意range_data_poses[i]表示的是该点的时刻机器人所在的位姿，并不是原点时刻机器人
                // 所在位姿，其计算相对偏移的公式是:range_data_poses[i] * (originOfHit - hitPoint)
                const auto originInLocal = range_data_poses[i] * originOfHit;
                // hit点在局部系的位姿是原点位姿乘以相对位姿，但是这个原点位姿是该点所在时刻插值算出来的机器人位姿，所以每个雷达点都对应
                // 一个原点位姿即range_data_poses[i]
                sensor::RangefinderPoint hitInLocal = range_data_poses[i] * sensor::ToRangefinderPoint(hitPointTimed);
                // 点坐标相对于原点的偏移量
                const auto delta = hitInLocal.position - originInLocal;
                const float range = delta.norm();
                if (range >= options_.min_range())
                {
                    if (range <= options_.max_range())
                    {
                        accumulated_range_data_.returns.push_back(hitInLocal);
                    }
                    else
                    {
                        // 点离雷达太远了，取一个中间点作为miss点
                        hitInLocal.position = originInLocal + options_.missing_data_ray_length() / range * delta;
                        accumulated_range_data_.misses.push_back(hitInLocal);
                    }
                }
            }
            ++num_accumulated_;
            if (num_accumulated_ < options_.num_accumulated_range_data())
                return nullptr;
            // 计算与上一次处理的时间差
            absl::optional<common::Duration> sensor_duration;
            if (last_sensor_time_.has_value())
                sensor_duration = time - last_sensor_time_.value();
            last_sensor_time_ = time;
            num_accumulated_ = 0;
            // gravity_alignment是ImuTracker维护的当前姿态
            const transform::Rigid3d gravity_alignment = transform::Rigid3d::Rotation(extrapolator_->EstimateGravityOrientation(time));
            // TODO(gaschler): This assumes that 'range_data_poses.back()' is at time
            // carto将点云的结束点当作原点
            accumulated_range_data_.origin = range_data_poses.back().translation();
            // tracking系的位姿，inverse就是逆向变换到tracking的原点，然后再变换到重力系
            const auto transform_to_gravity = gravity_alignment.cast<float>() * range_data_poses.back().inverse();
            // 体素滤波减少点量
            return AddAccumulatedRangeData(time, GravityAlignedAndFilter(transform_to_gravity, accumulated_range_data_), gravity_alignment,
                                           sensor_duration);
        }

        std::unique_ptr<transform::Rigid2d> LocalTrajectoryBuilder2D::ScanMatch(const common::Time time,
                                                                                const transform::Rigid2d &pose_prediction,
                                                                                const sensor::PointCloud &aligned_cloud)
        {
            if (active_submaps_.submaps().empty())
            {
                // 第一帧时没有子图，无法匹配，则返回插值的位姿
                return absl::make_unique<transform::Rigid2d>(pose_prediction);
            }
            // 前端维护两个子图，第一个子图用来匹配，第二个子图只是插入点云数据
            std::shared_ptr<const Submap2D> matching_submap = active_submaps_.submaps().front();
            // The online correlative scan matcher will refine the initial estimate for the Ceres scan matcher.
            transform::Rigid2d initial_pose = pose_prediction;
            bool use_correlative_matching = options_.use_online_correlative_scan_matching();
            if (use_correlative_matching)
            {
                // 暴力匹配，即基于初始位姿在x,y,yaw三个维度以一定的步长计算得到一组候选位姿，然后用这一组候选位姿
                // 把点云与栅格地图匹配，得分最高时的候选位姿就是输出结果
                const double score = correlative_matcher_->Match(pose_prediction, aligned_cloud, *matching_submap->grid(), &initial_pose);
                kRealTimeCorrelativeScanMatcherScoreMetric->Observe(score);
            }
            ceres::Solver::Summary summary;
            auto pose_observation = absl::make_unique<transform::Rigid2d>();
            ceres_scan_matcher_.Match(pose_prediction.translation(), initial_pose, aligned_cloud, *matching_submap->grid(),
                                      pose_observation.get(), &summary);
            // 更新粒子滤波
            if (pose_observation)
                correlative_matcher_->Success(pose_prediction, *pose_observation);
            else
                correlative_matcher_->Failed();
            if (pose_observation)
            {
                kCeresScanMatcherCostMetric->Observe(summary.final_cost);
                const double residual_distance = (pose_observation->translation() - pose_prediction.translation()).norm();
                kScanMatcherResidualDistanceMetric->Observe(residual_distance);
                const double residual_angle = std::abs(pose_observation->rotation().angle() - pose_prediction.rotation().angle());
                kScanMatcherResidualAngleMetric->Observe(residual_angle);
            }
            extrapolator_->ScanMatchSuccess(time);
            return pose_observation;
        }

        MatchResultPtr2D LocalTrajectoryBuilder2D::AddAccumulatedRangeData(const common::Time time,
                                                                           const sensor::RangeData &gravity_aligned_cloud,
                                                                           const transform::Rigid3d &gravity_alignment,
                                                                           const absl::optional<common::Duration> &sensor_duration)
        {
            if (gravity_aligned_cloud.returns.empty())
            {
                LOG(WARNING) << "[local] dropped empty horizontal range data.";
                return nullptr;
            }
            // tracking系的位姿
            const transform::Rigid3d pose_prediction_extra = extrapolator_->ExtrapolateRobotPose(time);
            // tracking系的位姿右乘tracking系的姿态的逆(逆就是反向旋转),即重力系所在位姿，此时点云已转到重力系
            const transform::Rigid2d pose_prediction = transform::Project2D(pose_prediction_extra * gravity_alignment.inverse());
            // 自适应体素过滤是为了加速匹配，其不会用被用来插入子图
            const auto aligned_cloud = sensor::AdaptiveVoxelFilter(gravity_aligned_cloud.returns, options_.adaptive_voxel_filter_options());
            if (aligned_cloud.empty())
            {
                LOG(WARNING) << "[local] AdaptiveVoxelFilter return empty data";
                return nullptr;
            }
            // local map frame <- gravity-aligned frame
            auto pose_estimate_2d = ScanMatch(time, pose_prediction, aligned_cloud);
            if (pose_estimate_2d == nullptr)
            {
                LOG(WARNING) << "[local] ScanMatch failed " << time;
                return nullptr;
            }
            // 3d的后验位姿加入位姿外推器
            const transform::Rigid3d pose_estimate = transform::Embed3D(*pose_estimate_2d) * gravity_alignment;
            extrapolator_->AddPose(time, pose_estimate);
            // 重力对齐的点云数据插入到子图，新建的子图的原点就是其创建时点云的原点；所以子图里的位姿都是2d的(x,y,yaw)
            sensor::RangeData rangeLocal2d = TransformRangeData(gravity_aligned_cloud, transform::Embed3D(pose_estimate_2d->cast<float>()));
            // detect dynamic points
            std::vector<int> dynamicCnts;
            auto rangePose = rangeLocal2d.origin.head<2>();
            LOG(INFO) << "[local] submap size " << active_submaps_.submaps_.size();
            auto &dynOptions = DynamicOptions::GetInstance();
            if (active_submaps_.submaps_.size() > 0 && dynOptions.dynEnable)
            {
                LOG(INFO) << "[local] curr pose " << rangePose(0) << " " << rangePose(1);
                mapping::Grid2D *gridPtr = active_submaps_.submaps_.front()->GridPtr();
                dynamic_detector_->DetectDynPoint(gridPtr, rangeLocal2d, time, dynamicCnts);
            }
            // insert submap
            auto insertion = InsertIntoSubmap(time, rangeLocal2d, aligned_cloud, pose_estimate, gravity_alignment.rotation(), dynamicCnts);
            if (insertion == nullptr)
            {
                insertion = absl::make_unique<InsertionResult>(InsertionResult{nullptr, {}, {}});
            }
            dynamicCnts.swap(insertion->dynamicCnts);
            // record time
            const auto wall_time = std::chrono::steady_clock::now();
            if (last_wall_time_.has_value())
            {
                const auto wall_time_duration = wall_time - last_wall_time_.value();
                kLocalSlamLatencyMetric->Set(common::ToSeconds(wall_time_duration));
                if (sensor_duration.has_value())
                {
                    kLocalSlamRealTimeRatio->Set(common::ToSeconds(sensor_duration.value()) / common::ToSeconds(wall_time_duration));
                }
            }
            const double thread_cpu_time_seconds = common::GetThreadCpuTimeSeconds();
            if (last_thread_cpu_time_seconds_.has_value())
            {
                const double thread_cpu_duration_seconds = thread_cpu_time_seconds - last_thread_cpu_time_seconds_.value();
                if (sensor_duration.has_value())
                {
                    kLocalSlamCpuRealTimeRatio->Set(common::ToSeconds(sensor_duration.value()) / thread_cpu_duration_seconds);
                }
            }
            last_wall_time_ = wall_time;
            last_thread_cpu_time_seconds_ = thread_cpu_time_seconds;
            return absl::make_unique<MatchingResult>(MatchingResult{time, pose_estimate, std::move(rangeLocal2d), std::move(insertion)});
        }

        InsertResultPtr2D LocalTrajectoryBuilder2D::InsertIntoSubmap(const common::Time time, const sensor::RangeData &rangeLocal2d,
                                                                     const sensor::PointCloud &aligned_cloud,
                                                                     const transform::Rigid3d &pose_estimate,
                                                                     const Eigen::Quaterniond &gravity_alignment,
                                                                     const std::vector<int> &dynamicCnts)
        {
            if (motion_filter_.IsSimilar(time, pose_estimate))
            {
                LOG(INFO) << "[local] no motion";
                return nullptr;
            }
            LocalSubmap2D insertion_submaps;
            if (!dynamicCnts.empty())
            {
                int index = 0;
                int cullCnt = 0;
                sensor::RangeData cullCloud;
                cullCloud.origin = rangeLocal2d.origin;
                for (auto &point : rangeLocal2d.returns)
                {
                    if (dynamicCnts[index] != -2 || dynamicCnts[index] <= DynamicOptions::GetInstance().dynValidMotionTimes)
                    {
                        cullCloud.returns.push_back(point);
                    }
                    else
                        cullCnt++;
                    index++;
                }
                for (auto &point : rangeLocal2d.misses)
                    cullCloud.misses.push_back(point);
                LOG(INFO) << "[local] cullCnt " << cullCnt;
                insertion_submaps = active_submaps_.InsertRangeData(cullCloud);
            }
            else
            {
                insertion_submaps = active_submaps_.InsertRangeData(rangeLocal2d);
            }
            // 使用ScanMatch优化后的位姿变换点云，将变换后的点云插入到子图，将未变换的点云(即原始的经过重力对齐的点云)返回，该点云的
            // 点都是消除畸变后的相对于点云原点的坐标
            return absl::make_unique<InsertionResult>(InsertionResult{std::make_shared<const TrajectoryNode::Data>(TrajectoryNode::Data{
                                                                          time,
                                                                          gravity_alignment,
                                                                          aligned_cloud,
                                                                          {}, // 'high_resolution_point_cloud' is only used in 3D.
                                                                          {}, // 'low_resolution_point_cloud' is only used in 3D.
                                                                          {}, // 'rotational_scan_matcher_histogram' is only used in 3D.
                                                                          pose_estimate,
                                                                          rangeLocal2d}),
                                                                      std::move(insertion_submaps)});
        }

        void LocalTrajectoryBuilder2D::AddImuData(const sensor::ImuData &imu_data)
        {
            CHECK(options_.use_imu_data()) << "An unexpected IMU packet was added.";
            InitializeExtrapolator(imu_data.time);
            extrapolator_->AddImuData(imu_data);
        }

        void LocalTrajectoryBuilder2D::AddOdometryData(const sensor::OdometryData &odometry_data)
        {
            if (extrapolator_ == nullptr)
            {
                // Until we've initialized the extrapolator we cannot add odometry data.
                LOG(INFO) << "Extrapolator not yet initialized.";
                return;
            }
            extrapolator_->AddOdometryData(odometry_data);
        }

        void LocalTrajectoryBuilder2D::InitializeExtrapolator(const common::Time time)
        {
            if (extrapolator_ != nullptr)
                return;
            LOG(INFO) << "[local] extrapolator init " << time;
            CHECK(!options_.pose_extrapolator_options().use_imu_based());
            // TODO(gaschler): Consider using InitializeWithImu as 3D does.
            auto imu_gravity_time_constant = options_.pose_extrapolator_options().constant_velocity().imu_gravity_time_constant();
            auto pose_queue_duration = common::FromSeconds(options_.pose_extrapolator_options().constant_velocity().pose_queue_duration());
            extrapolator_ = absl::make_unique<PoseExtrapolator>(pose_queue_duration, imu_gravity_time_constant);
            extrapolator_->AddPose(time, transform::Rigid3d::Identity());
        }

        void LocalTrajectoryBuilder2D::RegisterMetrics(metrics::FamilyFactory *family_factory)
        {
            auto *latency = family_factory->NewGaugeFamily("mapping_2d_local_trajectory_builder_latency",
                                                           "Duration from first incoming point cloud in accumulation to local slam "
                                                           "result");
            kLocalSlamLatencyMetric = latency->Add({});
            auto *real_time_ratio = family_factory->NewGaugeFamily("mapping_2d_local_trajectory_builder_real_time_ratio",
                                                                   "sensor duration / wall clock duration.");
            kLocalSlamRealTimeRatio = real_time_ratio->Add({});
            auto *cpu_real_time_ratio = family_factory->NewGaugeFamily("mapping_2d_local_trajectory_builder_cpu_real_time_ratio",
                                                                       "sensor duration / cpu duration.");
            kLocalSlamCpuRealTimeRatio = cpu_real_time_ratio->Add({});
            auto score_boundaries = metrics::Histogram::FixedWidth(0.05, 20);
            auto *scores = family_factory->NewHistogramFamily("mapping_2d_local_trajectory_builder_scores", "Local scan matcher scores",
                                                              score_boundaries);
            kRealTimeCorrelativeScanMatcherScoreMetric = scores->Add({{"scan_matcher", "real_time_correlative"}});
            auto cost_boundaries = metrics::Histogram::ScaledPowersOf(2, 0.01, 100);
            auto *costs = family_factory->NewHistogramFamily("mapping_2d_local_trajectory_builder_costs", "Local scan matcher costs",
                                                             cost_boundaries);
            kCeresScanMatcherCostMetric = costs->Add({{"scan_matcher", "ceres"}});
            auto distance_boundaries = metrics::Histogram::ScaledPowersOf(2, 0.01, 10);
            auto *residuals = family_factory->NewHistogramFamily("mapping_2d_local_trajectory_builder_residuals",
                                                                 "Local scan matcher residuals", distance_boundaries);
            kScanMatcherResidualDistanceMetric = residuals->Add({{"component", "distance"}});
            kScanMatcherResidualAngleMetric = residuals->Add({{"component", "angle"}});
        }
    } // namespace mapping
} // namespace cartographer
