#include <cv_bridge/cv_bridge.h>
#include <ros/ros.h>
#include <sensor_msgs/LaserScan.h>
#include <sensor_msgs/image_encodings.h>

#include <rosbag/bag.h>
#include <rosbag/view.h>

#include "../camera_models/include/EquidistantCamera.h"
#include "../camera_models/include/PinholeCamera.h"
#include "LaseCamCalCeres.h"
#include "calcCamPose.h"
#include "config.h"
#include "selectScanPoints.h"
#include "utilities.h"
#include <Eigen/Core>

template <typename T>
T readParam(ros::NodeHandle &n, std::string name)
{
    std::cout << name << std::endl;
    T ans;
    if (n.getParam(name, ans)) {
        ROS_INFO_STREAM("Loaded " << name << ": " << ans);
    } else {
        ROS_ERROR_STREAM("Failed to load " << name);
        n.shutdown();
    }
    return ans;
}

int main(int argc, char **argv)
{
    ros::init(argc, argv, "LaserCamCal");
    ros::NodeHandle nh;
    ros::NodeHandle pnh("~");
    int             continus_line_cnt = 0;
    Eigen::Vector3d last_line_start;
    Eigen::Vector3d last_line_end;

    std::string config_file;
    config_file = readParam<std::string>(pnh, "config_file");
    readParameters(config_file);

    rosbag::Bag bag_input;
    bag_input.open(bag_path, rosbag::bagmode::Read);
    std::vector<std::string> topics;
    topics.push_back(scan_topic_name);
    topics.push_back(img_topic_name);
    rosbag::View views(bag_input, rosbag::TopicQuery(topics));

    // Load apritag pose
    std::vector<CamPose> vCampose;
    LoadCamPoseFromTxt(savePath + "apriltag_pose.txt", vCampose);
    std::cout << "Load apriltag pose size: " << vCampose.size() << std::endl;

    if (vCampose.size() < 10) {
        std::cout << "apriltag pose less than 10." << std::endl;
        return 0;
    }

    /// Select keyframe to calibrating
#if 0
    std::vector<CamPose> sparseTagpose;
    CamPose             older = vCampose.at(0);
    sparseTagpose.push_back(older);
    double dist_min = 0.20;  // 10cm
    double theta_min = 3.1415926 * 10 / 180.;
    for (int j = 1; j < vCampose.size(); ++j) {
        CamPose newer = vCampose.at(j);
        double  dist = (older.twc - newer.twc).norm();
        double  theta = 2 * std::acos(((older.qwc.inverse() * newer.qwc)).w());
        if ((dist > dist_min) || (fabs(theta) > theta_min)) {
            older = newer;
            sparseTagpose.push_back(older);
        }
    }
    vCampose = sparseTagpose;
#endif

    // 准备标定数据
    std::vector<Oberserve> obs;
    std::vector<double>    camSelectTimestamp;
    // 处理激光数据
    int ii_cnt = 10;
    for (rosbag::MessageInstance const m : views) {
        if (m.getTopic() != scan_topic_name) {
            continue;
        }
        sensor_msgs::LaserScan::Ptr  scan = m.instantiate<sensor_msgs::LaserScan>();
        std::vector<Eigen::Vector3d> Points;
        TranScanToPoints(*scan, Points);

        //      ii_cnt++;
        //      if(ii_cnt % 20 != 0) continue;

        double timestamp = scan->header.stamp.toSec();

        std::vector<Eigen::Vector3d> pre_points;

        pre_points = AutoGetLinePts(Points, false);

        // 检测到了直线
        if (pre_points.size() <= 30) {
            continue;
        }

#ifdef DEBUG_TEST
        std::cout << std::endl << "scan time: " << std::fixed << std::setprecision(18) << timestamp << std::endl;
        std::cout << "pre_points[start]:" << pre_points[0] << std::endl;
        std::cout << "pre_points[  end]:" << pre_points[pre_points.size() - 1] << std::endl;
#endif

        Eigen::Vector3d              lastPoint;
        std::vector<Eigen::Vector3d> post_points;
        bool                         dist_correction = false;
        lastPoint = pre_points[0];
        for (int idx = 0; idx < pre_points.size(); idx++) {
            Eigen::Vector3d point = pre_points[idx];
            double          dist = point.head(2).norm();
            if (dist > 100) {
                point.x() = lastPoint.x();
                point.y() = lastPoint.y();
                dist_correction = true;
            }
            lastPoint = point;
            post_points.push_back(point);
        }

// #ifdef DEBUG_TEST
#if 1
        if (dist_correction == true) {
            printf("\n##################### PRE START #########################\n");
            for (int idx = 0; idx < pre_points.size(); idx++) {
                auto  &pre_point = pre_points[idx];
                double dist = pre_point.head(2).norm();
                if (dist > 100) {
                    printf("++++++++++++  pre_points[%d] x/y:%f/%f\n", idx, pre_point.x(), pre_point.y());
                    auto post_point = post_points[idx];
                    printf("------------ post_points[%d] x/y:%f/%f\n", idx, post_point.x(), post_point.y());
                }
            }
            printf("##################### PRE  END  #########################\n");
        }
#endif

        Eigen::Vector2d line;
        LineFittingCeres(post_points, line);

#ifdef DEBUG_TEST
        printf("##################### POST START #########################\n");
        for (int idx_ = 0; idx_ < post_points.size(); idx_++) {
            printf("points[%d] x/y:%f/%f\n", idx_, post_points[idx_].x(), post_points[idx_].y());
        }
        printf("##################### POST  END  #########################\n");
        std::cout << "post_points start x/y:" << post_points.begin()->x() << "/" << post_points.begin()->y()
                  << std::endl;
        std::cout << "post_points end   x/y:" << post_points.end()->x() << "/" << post_points.end()->y() << std::endl;
#endif

        ShowPoints(Points, post_points, 0.1);

        // 激光所在直线不能垂直于某个轴
        double x_start(post_points[0].x()), x_end(post_points[post_points.size() - 1].x());
        double y_start(post_points[0].y()), y_end(post_points[post_points.size() - 1].y());
        // if (fabs(x_end - x_start) > fabs(y_end - y_start)) {
        //     y_start = -(x_start * line(0) + 1) / line(1);
        //     y_end = -(x_end * line(0) + 1) / line(1);

        // } else  // 可能垂直于 x 轴，采用y值来计算 x
        // {
        //     x_start = -(y_start * line(1) + 1) / line(0);
        //     x_end = -(y_end * line(1) + 1) / line(0);
        // }

        // x_start = - x_start;
        // x_end = - x_end;
        // y_start = - y_start;
        // y_end = - y_end;

        std::vector<Eigen::Vector3d> points_on_line;
        points_on_line.push_back(Eigen::Vector3d(x_start, y_start, 0.001));
        points_on_line.push_back(Eigen::Vector3d(x_end, y_end, 0.001));

#ifdef DEBUG_TEST
        printf("##################### LINE FITTING START #########################\n");
        std::cout << "points_on_line start x/y:" << x_start << "/" << y_start << std::endl;
        std::cout << "points_on_line end   x/y:" << x_end << "/" << y_end << std::endl;
        printf("##################### LINE FITTING  END  #########################\n");
#endif

        Eigen::Vector3d delta_line_start = points_on_line[0] - last_line_start;
        Eigen::Vector3d delta_line_end = points_on_line[1] - last_line_end;
        if (delta_line_start.head(2).norm() > 0.05 || delta_line_end.head(2).norm() > 0.05) {
            last_line_start = points_on_line[0];
            last_line_end = points_on_line[1];
            std::cout << "last continus_line_cnt:" << continus_line_cnt << std::endl;
            continus_line_cnt = 0;
            continue;
        } else {
            continus_line_cnt++;
            if (continus_line_cnt != 5) {
                continue;
            }
        }

        // 在 camera 里找时间戳最近的一个 pose
        double  min_dt = 10000;
        CamPose colsetTagPose;
        for (int i = 0; i < vCampose.size(); ++i) {
            CamPose tmp = vCampose.at(i);
            double  t = fabs(tmp.timestamp - timestamp);
            if (t < min_dt) {
                min_dt = t;
                colsetTagPose = tmp;
            }
        }

        if (min_dt >= 0.039)  // 20ms
        {
            continue;
        }
        std::cout << "scan and tag time: " << std::fixed << std::setprecision(18) << timestamp << " "
                  << colsetTagPose.timestamp << std::endl;
        camSelectTimestamp.push_back(colsetTagPose.timestamp);
        /////////////////////////////////////////////////

#ifdef DEBUG_TEST
        for (rosbag::MessageInstance const m : views) {
            if (m.getTopic() != img_topic_name) {
                continue;
            }

            sensor_msgs::ImageConstPtr img = m.instantiate<sensor_msgs::Image>();
            double                     img_timestamp = img->header.stamp.toSec();
            if (fabs(colsetTagPose.timestamp - img_timestamp) < 1.0) {
                cv_bridge::CvImagePtr cv_ptr;
                cv_ptr = cv_bridge::toCvCopy(img, sensor_msgs::image_encodings::BGR8);
                cv::Mat img_raw = cv_ptr->image.clone();
                if (img_raw.channels() == 3) {
                    cv::cvtColor(img_raw, img_raw, CV_BGR2GRAY);
                }
                cv::imshow("tag time IMAGE", img_raw);
                cv::waitKey(100);
            } else if (fabs(colsetTagPose.timestamp - img_timestamp) < 1.05 &&
                       1.00 > fabs(colsetTagPose.timestamp - img_timestamp)) {
                cv::waitKey(800);
            }
        }
#endif

        Oberserve ob;
        ob.tagPose_Qca = colsetTagPose.qwc.inverse();
        ob.tagPose_tca = -ob.tagPose_Qca.toRotationMatrix() * colsetTagPose.twc;
        ob.points = post_points;
        ob.points_on_line = points_on_line;
        obs.push_back(ob);

        Eigen::Vector2d delta_line;
        Eigen::Vector2d center_point;
        delta_line(0) = x_start - x_end;
        delta_line(1) = y_start - y_end;

        center_point(0) = (x_start + x_end) / 2.0;
        center_point(1) = (y_start + y_end) / 2.0;

        double len = delta_line.norm();
        double distance = center_point.norm();
        double planar_cam_distance = ob.tagPose_tca.norm();

#ifdef DEBUG_TEST
        std::cout << "                 tagPose_tca:" << std::endl << ob.tagPose_tca << std::endl;
        std::cout << "points_on_line           len:" << len << std::endl;
        std::cout << "points_on_line      distance:" << distance << std::endl;
        std::cout << "planar_cam_distance distance:" << planar_cam_distance << std::endl;
#endif
    }

    if (obs.size() < 5) {
        std::cout << "Valid Calibra Data Less" << std::endl;
        bag_input.close();
        return 0;
    }
    std::cout << "obs size: " << obs.size() << std::endl;

    Eigen::Matrix4d Tlc_initial = Eigen::Matrix4d::Identity();
    // Eigen::Matrix3d Rlc_initial;
    // Eigen::Vector3d tlc_initial;

    // Rlc_initial << 0, 0, -1, 1, 0, 0, 0, -1, 0;
    // tlc_initial << 0, 0, 0.6;

    // Eigen::Quaterniond qlc_initial(Rlc_initial);
    // Tlc_initial.block(0, 0, 3, 3) = qlc_initial.toRotationMatrix();
    // Tlc_initial.block(0, 3, 3, 1) = tlc_initial;

    CamLaserCalClosedSolution(obs, Tlc_initial);

    Eigen::Matrix4d Tcl = Tlc_initial.inverse();
    CamLaserCalibration(obs, Tcl, false);
    // CamLaserCalibration(obs,Tcl, true);

    std::cout << "\n----- Transform from Camera to Laser Tlc is: -----\n" << std::endl;
    Eigen::Matrix4d Tlc = Tcl.inverse();
    std::cout << Tlc << std::endl;

    std::cout << "\n----- Transform from Camera to Laser, euler angles and translations are: -----\n" << std::endl;
    Eigen::Matrix3d Rlc(Tlc.block(0, 0, 3, 3));
    Eigen::Vector3d tlc(Tlc.block(0, 3, 3, 1));
    EulerAngles     rpy = ToEulerAngles(Eigen::Quaterniond(Rlc));
    std::cout << "   roll(rad): " << rpy.roll << " pitch(rad): " << rpy.pitch << " yaw(rad): " << rpy.yaw << "\n"
              << "or roll(deg): " << rpy.roll * 180. / M_PI << " pitch(deg): " << rpy.pitch * 180. / M_PI
              << " yaw(deg): " << rpy.yaw * 180. / M_PI << "\n"
              << "       tx(m): " << tlc.x() << "  ty(m): " << tlc.y() << "   tz(m): " << tlc.z() << std::endl;

    std::cout << "Rlc * Rlc^T:" << Rlc * Rlc.transpose() << std::endl;
    // save to yaml file
    cv::Mat cvTlc;
    cv::eigen2cv(Tlc, cvTlc);
    std::string     fn = savePath + "result.yaml";
    cv::FileStorage fs(fn, cv::FileStorage::WRITE);
    fs << "extrinsicTlc" << cvTlc;
    cv::Mat cvrpy;
    cv::eigen2cv(Eigen::Vector3d(rpy.roll, rpy.pitch, rpy.yaw), cvrpy);
    cv::Mat cvtlc;
    cv::eigen2cv(tlc, cvtlc);
    fs << "RollPitchYaw" << cvrpy;
    fs << "txtytz" << cvtlc;
    fs.release();

    std::cout << "\n Result file : " << fn << std::endl;
    std::cout << "\n-------------- Calibration Code End --------------\n" << std::endl;

    ros::spin();
}
