/*
 * Copyright 2016 The Cartographer Authors
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#include "cartographer_ros/sensor_bridge.h"

#include "absl/memory/memory.h"
#include "cartographer_ros/msg_conversion.h"
#include "cartographer_ros/time_conversion.h"

namespace cartographer_ros
{
    namespace carto = ::cartographer;

    using carto::transform::Rigid3d;

    namespace
    {
        const std::string &CheckNoLeadingSlash(const std::string &frame_id)
        {
            if (frame_id.size() > 0)
            {
                CHECK_NE(frame_id[0], '/') << "The frame_id " << frame_id
                                           << " should not start with a /. See 1.7 in "
                                              "http://wiki.ros.org/tf2/Migration.";
            }
            return frame_id;
        }
    } // namespace

    SensorBridge::SensorBridge(
        const int num_subdivisions_per_laser_scan,
        const std::string &tracking_frame,
        const double lookup_transform_timeout_sec,
        tf2_ros::Buffer *const tf_buffer,
        carto::mapping::TrajectoryBuilderInterface *const trajectory_builder)
        : num_subdivisions_per_laser_scan_(num_subdivisions_per_laser_scan),
          tf_bridge_(tracking_frame, lookup_transform_timeout_sec, tf_buffer),
          trajectory_builder_(trajectory_builder)
    {
    }

    std::unique_ptr<cartographer::sensor::OdometryData> ToOdometryData(
        const nav_msgs::msg::Odometry::ConstSharedPtr &msg, const TfBridge &tf_bridge)
    {
        const carto::common::Time time = FromRos(msg->header.stamp);
        const auto sensor_to_tracking = tf_bridge.LookupToTracking(time, CheckNoLeadingSlash(msg->child_frame_id));
        if (sensor_to_tracking == nullptr)
        {
            return nullptr;
        }
        return absl::make_unique<carto::sensor::OdometryData>(
            carto::sensor::OdometryData{
                time, ToRigid3d(msg->pose.pose) * sensor_to_tracking->inverse()});
    }

    std::unique_ptr<carto::sensor::OdometryData> SensorBridge::ToOdometryData(
        const nav_msgs::msg::Odometry::ConstSharedPtr &msg)
    {
        const carto::common::Time time = FromRos(msg->header.stamp);
        const auto sensor_to_tracking = tf_bridge_.LookupToTracking(
            time, CheckNoLeadingSlash(msg->child_frame_id));
        if (sensor_to_tracking == nullptr)
        {
            return nullptr;
        }
        return absl::make_unique<carto::sensor::OdometryData>(
            carto::sensor::OdometryData{
                time, ToRigid3d(msg->pose.pose) * sensor_to_tracking->inverse()});
    }

    std::unique_ptr<cartographer::sensor::ImuData> ToImuData(
        const sensor_msgs::msg::Imu::ConstSharedPtr &msg, const TfBridge &tf_bridge)
    {
        const carto::common::Time time = FromRos(msg->header.stamp);
        const auto sensor_to_tracking = tf_bridge.LookupToTracking(time, CheckNoLeadingSlash(msg->header.frame_id));
        if (sensor_to_tracking == nullptr)
        {
            return nullptr;
        }
        CHECK(sensor_to_tracking->translation().norm() < 1e-5)
            << "The IMU frame must be colocated with the tracking frame. "
               "Transforming linear acceleration into the tracking frame will "
               "otherwise be imprecise.";
        const auto &angular_velocity = sensor_to_tracking->rotation() * ToEigen(msg->angular_velocity);
        const auto &linear_acceleration = sensor_to_tracking->rotation() * ToEigen(msg->linear_acceleration);
        const auto &imu_in_tracking = carto::sensor::ImuData{
            time, linear_acceleration, angular_velocity};
        return absl::make_unique<carto::sensor::ImuData>(imu_in_tracking);
    }

    std::unique_ptr<carto::sensor::ImuData> SensorBridge::ToImuData(
        const sensor_msgs::msg::Imu::ConstSharedPtr &msg)
    {
        // 线加速度不可用
        CHECK_NE(msg->linear_acceleration_covariance[0], -1)
            << "Your IMU data claims to not contain linear acceleration measurements "
               "by setting linear_acceleration_covariance[0] to -1. Cartographer "
               "requires this data to work. See "
               "http://docs.ros.org/api/sensor_msgs/html/msg/Imu.html.";
        // 角速度不可用
        CHECK_NE(msg->angular_velocity_covariance[0], -1)
            << "Your IMU data claims to not contain angular velocity measurements "
               "by setting angular_velocity_covariance[0] to -1. Cartographer "
               "requires this data to work. See "
               "http://docs.ros.org/api/sensor_msgs/html/msg/Imu.html.";

        const carto::common::Time time = FromRos(msg->header.stamp);
        const auto sensor_to_tracking = tf_bridge_.LookupToTracking(time, CheckNoLeadingSlash(msg->header.frame_id));
        if (sensor_to_tracking == nullptr)
        {
            return nullptr;
        }
        // 推荐将IMU的坐标系当做tracking frame，因为IMU频率很高，如不以它为tracking_frame，还需要将imu数据
        // 进行转换，增加计算量。最后考虑旋转向量，如tracking_frame为“imu_link”，则相当于没有旋转。
        // IMU坐标系原点应当尽量与机器人坐标系重合。
        // 检查IMU坐标系相对于机器人坐标系的平移距离以保证IMU坐标系原点尽量与机器人坐标系重合。否则，将传感
        // 器测量的线加速度转换到机器人坐标系下就会出现比较大的偏差， 对于位姿估计而言是不利的。
        CHECK(sensor_to_tracking->translation().norm() < 1e-5)
            << "The IMU frame must be colocated with the tracking frame. "
               "Transforming linear acceleration into the tracking frame will "
               "otherwise be imprecise.";
        return absl::make_unique<carto::sensor::ImuData>(carto::sensor::ImuData{
            time, sensor_to_tracking->rotation() * ToEigen(msg->linear_acceleration),
            sensor_to_tracking->rotation() * ToEigen(msg->angular_velocity)});
    }

    void SensorBridge::HandleOdometryMessage(
        const std::string &sensor_id, const nav_msgs::msg::Odometry::ConstSharedPtr &msg)
    {
        std::unique_ptr<carto::sensor::OdometryData> odometry_data = ToOdometryData(msg);
        if (odometry_data != nullptr)
        {
            trajectory_builder_->AddSensorData(
                sensor_id,
                carto::sensor::OdometryData{odometry_data->time, odometry_data->pose});
        }
    }

    /*
    判断GPS的数据是否为固定解，如不是，这些数据无法保证精度(可能是周围信号不好，周遭环境影响比较大)，会丢弃这些数据，
    然后加入一个固定的空位姿。由于初始数据为经纬度(时分)坐标，无法直接进行定位，所以需要将地心坐标系(ecef)转成机器人
    所在坐标系。通过ComputeLocalFrameFromLatLong函数对经纬度信息进行计算，并打印经纬度信息。
    */
    void SensorBridge::HandleNavSatFixMessage(
        const std::string &sensor_id, const sensor_msgs::msg::NavSatFix::ConstSharedPtr &msg)
    {
        const carto::common::Time time = FromRos(msg->header.stamp);
        // 如不是固定解,就加入一个固定的空位姿
        if (msg->status.status == sensor_msgs::msg::NavSatStatus::STATUS_NO_FIX)
        {
            trajectory_builder_->AddSensorData(
                sensor_id,
                carto::sensor::FixedFramePoseData{time, absl::optional<Rigid3d>()});
            return;
        }
        // 确定ecef原点到局部坐标系的坐标变换
        if (!ecef_to_local_frame_.has_value())
        {
            ecef_to_local_frame_ = ComputeLocalFrameFromLatLong(msg->latitude, msg->longitude);
            LOG(INFO) << "Using NavSatFix. Setting ecef_to_local_frame with lat = " << msg->latitude << ", long = " << msg->longitude << ".";
        }
        // 通过这个坐标变换乘以之后的gps数据,就相当于减去了一个固定的坐标,从而得到了gps数据间的相对坐标变换
        trajectory_builder_->AddSensorData(
            sensor_id, carto::sensor::FixedFramePoseData{
                           time, absl::optional<Rigid3d>(Rigid3d::Translation(
                                     ecef_to_local_frame_.value() * LatLongAltToEcef(msg->latitude, msg->longitude, msg->altitude)))});
    }

    void SensorBridge::HandleLandmarkMessage(
        const std::string &sensor_id,
        const cartographer_ros_msgs::msg::LandmarkList::ConstSharedPtr &msg)
    {
        auto landmark_data = ToLandmarkData(*msg);

        auto tracking_from_landmark_sensor = tf_bridge_.LookupToTracking(
            landmark_data.time, CheckNoLeadingSlash(msg->header.frame_id));
        if (tracking_from_landmark_sensor != nullptr)
        {
            for (auto &observation : landmark_data.landmark_observations)
            {
                observation.landmark_to_tracking_transform = *tracking_from_landmark_sensor * observation.landmark_to_tracking_transform;
            }
        }
        trajectory_builder_->AddSensorData(sensor_id, landmark_data);
    }

    void SensorBridge::HandleImuMessage(const std::string &sensor_id, const sensor_msgs::msg::Imu::ConstSharedPtr &msg)
    {
        std::unique_ptr<carto::sensor::ImuData> imu_data = ToImuData(msg);
        if (imu_data != nullptr)
        {
            trajectory_builder_->AddSensorData(
                sensor_id,
                carto::sensor::ImuData{imu_data->time, imu_data->linear_acceleration,
                                       imu_data->angular_velocity});
        }
    }

    void SensorBridge::HandleLaserScanMessage(
        const std::string &sensor_id, const sensor_msgs::msg::LaserScan::ConstSharedPtr &msg)
    {
        carto::sensor::PointCloudWithIntensities point_cloud;
        carto::common::Time time;
        std::tie(point_cloud, time) = ToPointCloudWithIntensities(*msg);
        HandleLaserScan(sensor_id, time, msg->header.frame_id, point_cloud);
    }

    void SensorBridge::HandleMultiEchoLaserScanMessage(
        const std::string &sensor_id,
        const sensor_msgs::msg::MultiEchoLaserScan::ConstSharedPtr &msg)
    {
        carto::sensor::PointCloudWithIntensities point_cloud;
        carto::common::Time time;
        std::tie(point_cloud, time) = ToPointCloudWithIntensities(*msg);
        HandleLaserScan(sensor_id, time, msg->header.frame_id, point_cloud);
    }

    void SensorBridge::HandlePointCloud2Message(
        const std::string &sensor_id,
        const sensor_msgs::msg::PointCloud2::ConstSharedPtr &msg)
    {
        carto::sensor::PointCloudWithIntensities point_cloud;
        carto::common::Time time;
        std::tie(point_cloud, time) = ToPointCloudWithIntensities(*msg);
        HandleRangefinder(sensor_id, time, msg->header.frame_id, point_cloud.points);
    }

    const TfBridge &SensorBridge::tf_bridge() const
    {
        return tf_bridge_;
    }

    void SensorBridge::HandleLaserScan(const std::string &sensor_id,
                                       const carto::common::Time time,
                                       const std::string &frame_id,
                                       const carto::sensor::PointCloudWithIntensities &points)
    {
        if (points.points.empty())
        {
            return;
        }
        CHECK_LE(points.points.back().time, 0.f);
        // TODO(gaschler): Use per-point time instead of subdivisions.
        // 一帧雷达数据被分成几次处理, 一般将这个参数设置为1
        for (int i = 0; i != num_subdivisions_per_laser_scan_; ++i)
        {
            const size_t start_index = points.points.size() * i / num_subdivisions_per_laser_scan_;
            const size_t end_index = points.points.size() * (i + 1) / num_subdivisions_per_laser_scan_;
            carto::sensor::TimedPointCloud subdivision(points.points.begin() + start_index, points.points.begin() + end_index);
            if (start_index == end_index)
            {
                continue;
            }
            const double time_to_subdivision_end = subdivision.back().time;
            // `subdivision_time` is the end of the measurement so sensor::Collator will
            // send all other sensor data first.
            const carto::common::Time subdivision_time = time + carto::common::FromSeconds(time_to_subdivision_end);
            auto it = sensor_to_previous_subdivision_time_.find(sensor_id);
            if (it != sensor_to_previous_subdivision_time_.end() && it->second >= subdivision_time)
            {
                LOG(WARNING) << "Ignored subdivision of a LaserScan message from sensor "
                             << sensor_id << " because previous subdivision time "
                             << it->second << " is not before current subdivision time "
                             << subdivision_time;
                continue;
            }
            sensor_to_previous_subdivision_time_[sensor_id] = subdivision_time;
            for (auto &point : subdivision)
            {
                point.time -= time_to_subdivision_end;
            }
            CHECK_EQ(subdivision.back().time, 0.f);
            HandleRangefinder(sensor_id, subdivision_time, frame_id, subdivision);
        }
    }

    void SensorBridge::HandleRangefinder(const std::string &sensor_id,
                                         const carto::common::Time time,
                                         const std::string &frame_id,
                                         const carto::sensor::TimedPointCloud &ranges)
    {
        if (!ranges.empty())
        {
            CHECK_LE(ranges.back().time, 0.f);
        }
        // This was added to get rid of the TimedPointCloudData warning for a missing argument
        std::vector<float> intensities_;
        /*
         * 获取sensor frame到tracking frame变换中的位移作为TimedPointCloudData的原点，即雷达中心在tracking frame的位置
         *
         * tracking frame一般是imu, laser scan, 或者base_footprint, tracking frame是机器人本体上的坐标系，用来把机器人上的
         * 传感器数据统一转换到这个坐标系，然后基于统一的坐标系来跟踪机器人的位姿
         *
         * 如果tracking frame是imu，那么这里的sensor_to_tracking就是雷达坐标系到imu坐标系的转换关系，这个转换关系在imu和雷
         * 达安装好后就是固定的，一般imu的中心与机器人运动中心在水平面上是重叠的，雷达中心可能与IMU中心不重叠
         */
        const auto sensor_to_tracking = tf_bridge_.LookupToTracking(time, CheckNoLeadingSlash(frame_id));
        if (sensor_to_tracking != nullptr)
        {
            trajectory_builder_->AddSensorData(sensor_id,
                                               carto::sensor::TimedPointCloudData{
                                                   time, sensor_to_tracking->translation().cast<float>(),
                                                   carto::sensor::TransformTimedPointCloud(ranges, sensor_to_tracking->cast<float>()),
                                                   intensities_});
        }
    }

} // namespace cartographer_ros
