// Copyright 2020 Tier IV, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//
// Author: v1.0 Yukihiro Saito
//

#include "multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp"

#include "multi_object_tracker/utils/utils.hpp"

// #include <tier4_autoware_utils/geometry/boost_polygon_utils.hpp>
// #include <tier4_autoware_utils/math/normalization.hpp>
// #include <tier4_autoware_utils/math/unit_conversion.hpp>

#include <bits/stdc++.h>
#include <tf2/LinearMath/Matrix3x3.h>
#include <tf2/LinearMath/Quaternion.h>
#include <tf2/utils.h>

// #ifdef ROS_DISTRO_GALACTIC
// #include <tf2_geometry_msgs/tf2_geometry_msgs.h>
// #else
// #include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
// #endif
// #include "object_recognition_utils/object_recognition_utils.hpp"

#define EIGEN_MPL2_ONLY
#include <Eigen/Core>
#include <Eigen/Geometry>
namespace perception
{
    namespace algorithm
    {
        // using Label = autoware_auto_perception_msgs::msg::ObjectClassification;

        NormalVehicleTracker::NormalVehicleTracker(
            const double &time, const LidarObject &object)
            : Tracker(time, object),
              z_(object.z),
              tracking_offset_(Eigen::Vector2d::Zero())
        {
            object_ = object;

            // Initialize parameters
            // measurement noise covariance: detector uncertainty + ego vehicle motion uncertainty
            float r_stddev_x = 0.5;            // in vehicle coordinate [m]
            float r_stddev_y = 0.4;            // in vehicle coordinate [m]
            float r_stddev_yaw = 20 * deg2rad; // in map coordinate [rad]
            float r_stddev_vel = 1.0;          // in object coordinate [m/s]
            ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0);
            ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0);
            ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0);
            ekf_params_.r_cov_vel = std::pow(r_stddev_vel, 2.0);

            // velocity deviation threshold
            //   if the predicted velocity is close to the observed velocity,
            //   the observed velocity is used as the measurement.
            velocity_deviation_threshold_ = 10 / 3.6; // [m/s]

            // OBJECT SHAPE MODEL
            bounding_box_ = {
                object.length, object.width, object.height};
            // set minimum size
            bounding_box_.length = std::max(bounding_box_.length, 0.3);
            bounding_box_.width = std::max(bounding_box_.width, 0.3);
            bounding_box_.height = std::max(bounding_box_.height, 0.3);

            // Set motion model parameters
            {
                constexpr double q_stddev_acc_long =
                    9.8 * 0.35;                                 // [m/(s*s)] uncertain longitudinal acceleration
                constexpr double q_stddev_acc_lat = 9.8 * 0.15; // [m/(s*s)] uncertain lateral acceleration
                constexpr double q_stddev_yaw_rate_min = 1.5;   // [deg/s] uncertain yaw change rate, minimum
                constexpr double q_stddev_yaw_rate_max = 15.0;  // [deg/s] uncertain yaw change rate, maximum
                constexpr double q_stddev_slip_rate_min =
                    0.3; // [deg/s] uncertain slip angle change rate, minimum
                constexpr double q_stddev_slip_rate_max =
                    10.0;                               // [deg/s] uncertain slip angle change rate, maximum
                constexpr double q_max_slip_angle = 30; // [deg] max slip angle
                constexpr double lf_ratio = 0.3;        // [-] ratio of front wheel position
                constexpr double lf_min = 1.0;          // [m] minimum front wheel position
                constexpr double lr_ratio = 0.25;       // [-] ratio of rear wheel position
                constexpr double lr_min = 1.0;          // [m] minimum rear wheel position
                motion_model_.setMotionParams(
                    q_stddev_acc_long, q_stddev_acc_lat, q_stddev_yaw_rate_min, q_stddev_yaw_rate_max,
                    q_stddev_slip_rate_min, q_stddev_slip_rate_max, q_max_slip_angle, lf_ratio, lf_min, lr_ratio,
                    lr_min);
            }

            // Set motion limits
            {
                constexpr double max_vel = 100 / 3.6;             // [m/s] maximum velocity
                constexpr double max_slip = 30;                   // [deg] maximum slip angle
                motion_model_.setMotionLimits(max_vel, max_slip); // maximum velocity and slip angle
            }

            // Set initial state
            {
                const double x = object.x;
                const double y = object.y;
                const double yaw = object.heading;
                auto pose_cov = object.pose_cov;
                double vel = 0.0;
                double vel_cov;
                const double &length = bounding_box_.length;

                // initial state covariance
                constexpr double p0_stddev_x = 1.0; // in object coordinate [m]
                constexpr double p0_stddev_y = 0.3; // in object coordinate [m]
                constexpr double p0_stddev_yaw =
                    deg2rad * 25; // in map coordinate [rad]
                constexpr double p0_cov_x = std::pow(p0_stddev_x, 2.0);
                constexpr double p0_cov_y = std::pow(p0_stddev_y, 2.0);
                constexpr double p0_cov_yaw = std::pow(p0_stddev_yaw, 2.0);

                const double cos_yaw = std::cos(yaw);
                const double sin_yaw = std::sin(yaw);
                const double sin_2yaw = std::sin(2.0 * yaw);
                pose_cov[MSG_COV_IDX::X_X] =
                    p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw;
                pose_cov[MSG_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw;
                pose_cov[MSG_COV_IDX::Y_Y] =
                    p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw;
                pose_cov[MSG_COV_IDX::Y_X] = pose_cov[MSG_COV_IDX::X_Y];
                pose_cov[MSG_COV_IDX::YAW_YAW] = p0_cov_yaw;

                constexpr double p0_stddev_vel = 1000 / 3.6; // in object coordinate [m/s]
                vel_cov = std::pow(p0_stddev_vel, 2.0);

                const double slip = 0.0;
                const double p0_stddev_slip = deg2rad * 5; // in object coordinate [rad/s]
                const double slip_cov = std::pow(p0_stddev_slip, 2.0);

                // initialize motion model
                motion_model_.initialize(time, x, y, yaw, pose_cov, vel, vel_cov, slip, slip_cov, length);
            }
            object_.has_position_covariance = true;
            /* calc nearest corner index TODO*/
            // setNearestCornerOrSurfaceIndex(self_transform); // this index is used in next measure step
        }

        bool NormalVehicleTracker::predict(const double &time)
        {
            return motion_model_.predictState(time);
        }

        LidarObject NormalVehicleTracker::getUpdatingObject(
            const LidarObject &object)
        {
            LidarObject updating_object = object;

            // current (predicted) state
            const double tracked_x = motion_model_.getStateElement(IDX::X);
            const double tracked_y = motion_model_.getStateElement(IDX::Y);
            const double tracked_yaw = motion_model_.getStateElement(IDX::YAW);

            // OBJECT SHAPE MODEL
            // convert to bounding box if input is convex shape
            LidarObject bbox_object;

            bbox_object = object;

            // get offset measurement TODO
            // int nearest_corner_index = utils::getNearestCornerOrSurface(
            //     tracked_x, tracked_y, tracked_yaw, bounding_box_.width, bounding_box_.length, self_transform);
            // utils::calcAnchorPointOffset(
            //     last_input_bounding_box_.width, last_input_bounding_box_.length, nearest_corner_index,
            //     bbox_object, tracked_yaw, updating_object, tracking_offset_);

            // UNCERTAINTY MODEL

            // measurement noise covariance
            float r_cov_x;
            float r_cov_y;
            // using Label = autoware_auto_perception_msgs::msg::ObjectClassification;
            // const uint8_t label = object_recognition_utils::getHighestProbLabel(object.classification);
            const uint8_t label = object.type;
            if (label == 0)
            {
                r_cov_x = ekf_params_.r_cov_x;
                r_cov_y = ekf_params_.r_cov_y;
            }
            else if (label == 0)
            {
                // if label is changed, enlarge the measurement noise covariance
                constexpr float r_stddev_x = 2.0; // [m]
                constexpr float r_stddev_y = 2.0; // [m]
                r_cov_x = std::pow(r_stddev_x, 2.0);
                r_cov_y = std::pow(r_stddev_y, 2.0);
            }
            else
            {
                r_cov_x = ekf_params_.r_cov_x;
                r_cov_y = ekf_params_.r_cov_y;
            }

            // yaw angle fix
            double pose_yaw = object.heading;
            // bool is_yaw_available =
            //     object.kinematics.orientation_availability !=
            //     LidarObjectKinematics::UNAVAILABLE;

            // fill covariance matrix
            auto &pose_cov = updating_object.pose_cov;
            const double cos_yaw = std::cos(pose_yaw);
            const double sin_yaw = std::sin(pose_yaw);
            const double sin_2yaw = std::sin(2.0f * pose_yaw);
            pose_cov[MSG_COV_IDX::X_X] =
                r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw;      // x - x
            pose_cov[MSG_COV_IDX::X_Y] = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y
            pose_cov[MSG_COV_IDX::Y_Y] =
                r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y
            pose_cov[MSG_COV_IDX::Y_X] = pose_cov[MSG_COV_IDX::X_Y];       // y - x
            pose_cov[MSG_COV_IDX::X_YAW] = 0.0;                            // x - yaw
            pose_cov[MSG_COV_IDX::Y_YAW] = 0.0;                            // y - yaw
            pose_cov[MSG_COV_IDX::YAW_X] = 0.0;                            // yaw - x
            pose_cov[MSG_COV_IDX::YAW_Y] = 0.0;                            // yaw - y
            pose_cov[MSG_COV_IDX::YAW_YAW] = ekf_params_.r_cov_yaw;        // yaw - yaw
            // if (!is_yaw_available)
            // {
            //     pose_cov[MSG_COV_IDX::YAW_YAW] *= 1e3; // yaw is not available, multiply large value
            // }
            auto &twist_cov = updating_object.twist_cov;
            twist_cov[MSG_COV_IDX::X_X] = ekf_params_.r_cov_vel; // vel - vel

            return updating_object;
        }

        bool NormalVehicleTracker::measureWithPose(
            const LidarObject &object)
        {
            // current (predicted) state
            const double tracked_vel = motion_model_.getStateElement(IDX::VEL);

            // velocity capability is checked only when the object has velocity measurement
            // and the predicted velocity is close to the observed velocity
            bool is_velocity_available = false;

            const double &observed_vel = std::sqrt(object.vx * object.vx + object.vy + object.vy);
            if (std::fabs(tracked_vel - observed_vel) < velocity_deviation_threshold_)
            {
                // Velocity deviation is small
                is_velocity_available = true;
            }

            // update
            bool is_updated = false;
            {
                const double x = object.x;
                const double y = object.y;
                const double yaw = object.heading;
                const double vel = std::sqrt(object.vx * object.vx + object.vy + object.vy);

                if (is_velocity_available)
                {
                    is_updated = motion_model_.updateStatePoseHeadVel(
                        x, y, yaw, object.pose_cov, vel,
                        object.twist_cov);
                }
                else
                {
                    is_updated = motion_model_.updateStatePoseHead(
                        x, y, yaw, object.pose_cov);
                }
                motion_model_.limitStates();
            }

            // position z
            constexpr double gain = 0.1;
            z_ = (1.0 - gain) * z_ + gain * object.z;

            return is_updated;
        }

        bool NormalVehicleTracker::measureWithShape(
            const LidarObject &object)
        {
            constexpr double gain = 0.1;
            constexpr double gain_inv = 1.0 - gain;

            // update object size
            bounding_box_.length = gain_inv * bounding_box_.length + gain * object.length;
            bounding_box_.width = gain_inv * bounding_box_.width + gain * object.width;
            bounding_box_.height = gain_inv * bounding_box_.height + gain * object.height;
            last_input_bounding_box_ = {
                object.length, object.width, object.height};
            // set minimum size
            bounding_box_.length = std::max(bounding_box_.length, 0.3);
            bounding_box_.width = std::max(bounding_box_.width, 0.3);
            bounding_box_.height = std::max(bounding_box_.height, 0.3);

            // update motion model
            motion_model_.updateExtendedState(bounding_box_.length);

            // // update offset into object position
            // motion_model_.adjustPosition(gain * tracking_offset_.x(), gain * tracking_offset_.y());
            // // update offset
            // tracking_offset_.x() = gain_inv * tracking_offset_.x();
            // tracking_offset_.y() = gain_inv * tracking_offset_.y();

            return true;
        }

        bool NormalVehicleTracker::measure(
            const LidarObject &object, const double &time)
        {
            // keep the latest input object
            object_ = object;

            // update classification TODO
            // const auto &current_classification = getClassification();
            // if (object_recognition_utils::getHighestProbLabel(object.classification) == Label::UNKNOWN)
            // {
            //     setClassification(current_classification);
            // }

            // check time gap
            const double dt = motion_model_.getDeltaTime(time);
            if (0.01 /*10msec*/ < dt)
            {
                // RCLCPP_WARN(
                //     logger_,
                //     "NormalVehicleTracker::measure There is a large gap between predicted time and measurement "
                //     "time. (%f)",
                //     dt);
            }

            // update object
            const LidarObject updating_object =
                getUpdatingObject(object);
            measureWithPose(updating_object);
            measureWithShape(updating_object);

            /* calc nearest corner index TODO*/
            // setNearestCornerOrSurfaceIndex(self_transform); // this index is used in next measure step

            return true;
        }

        bool NormalVehicleTracker::getTrackedObject(
            const double &time, LidarObject &object) const
        {
            // TODO
            // object = object_recognition_utils::toTrackedObject(object_);
            // object.object_id = getUUID();
            // object.classification = getClassification();
            Eigen::VectorXd pose, twist;
            pose << object.x, object.y, object.z, 0, 0, object.heading;
            twist << object.vx, object.vy, 0, 0, 0, object.angular_vz;
            auto &pose_cov = object.pose_cov;
            auto &twist_cov = object.twist_cov;

            // predict from motion model
            if (!motion_model_.getPredictedState(
                    time, pose, pose_cov, twist,
                    twist_cov))
            {
                // RCLCPP_WARN(logger_, "NormalVehicleTracker::getTrackedObject: Failed to get predicted state.");
                return false;
            }

            // recover bounding box from tracking point
            const double dl = bounding_box_.length - last_input_bounding_box_.length;
            const double dw = bounding_box_.width - last_input_bounding_box_.width;
            // TODO
            // const Eigen::Vector2d recovered_pose = utils::recoverFromTrackingPoint(
            //     pose_with_cov.pose.position.x, pose_with_cov.pose.position.y,
            //     motion_model_.getStateElement(IDX::YAW), dw, dl, last_nearest_corner_index_, tracking_offset_);
            // pose_with_cov.pose.position.x = recovered_pose.x();
            // pose_with_cov.pose.position.y = recovered_pose.y();

            // position
            // pose_with_cov.z = z_;

            // set shape
            pose[2] = z_;
            object.x = pose[0];
            object.y = pose[1];
            object.z = pose[2];
            object.heading = pose[6];
            object.vx = twist[0];
            object.vy = twist[1];
            object.pose_cov = pose_cov;
            object.twist_cov = twist_cov;
            object.length = bounding_box_.length;
            object.width = bounding_box_.width;
            object.height = bounding_box_.height;
            const auto origin_yaw = object_.heading;
            const auto ekf_pose_yaw = pose[5];
            // TODO

            return true;
        }
    }
}