#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <string>
#include <iomanip>
#include <chrono>
#include <opencv2/opencv.hpp>
#include <Eigen/Eigen>
#include <Eigen/Dense>
#include <Eigen/Geometry>
#include <opengv/relative_pose/methods.hpp>
#include <opengv/relative_pose/CentralRelativeAdapter.hpp>
#include <boost/filesystem.hpp>
#include "poseEstimate.hpp"

#include "ResultTrajectory.h"
#include "colors.h"
#include "print.h"

using namespace std;
using namespace cv;
using namespace Eigen;
using namespace boost::filesystem;
using namespace ov_eval;

// 构造函数实现
PoseEstimator::PoseEstimator() {
    // 构造函数内容，如果有需要初始化的内容可以放在这里
}

std::vector<std::string> PoseEstimator::split(const std::string &s, char delimiter) {
    vector<string> tokens;
    string token;
    istringstream tokenStream(s);
    while (getline(tokenStream, token, delimiter)) {
        tokens.push_back(token);
    }
    return tokens;
}

// orb feaure
// void PoseEstimator::find_feature_matches(const cv::Mat &img_1, const cv::Mat &img_2, std::vector<cv::KeyPoint> &keypoints_1, std::vector<cv::KeyPoint> &keypoints_2, std::vector<cv::DMatch> &matches, int nfeatures) {
//   Mat descriptors_1, descriptors_2;
//   Ptr<FeatureDetector> detector = ORB::create(nfeatures);   // limit the number of points
//   Ptr<DescriptorExtractor> descriptor = ORB::create();
//   Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");

//   detector->detect(img_1, keypoints_1);
//   detector->detect(img_2, keypoints_2);

//   descriptor->compute(img_1, keypoints_1, descriptors_1);
//   descriptor->compute(img_2, keypoints_2, descriptors_2);

//   matcher->match(descriptors_1, descriptors_2, matches);

//     double min_dist = 10000, max_dist = 0;
//     for (const auto &m : matches) {
//         double dist = m.distance;
//         if (dist < min_dist) min_dist = dist;
//         if (dist > max_dist) max_dist = dist;
//     }

//     vector<DMatch> good_matches;
//     for (const auto &m : matches) {
//         if (m.distance <= max(2 * min_dist, 30.0)) {
//             good_matches.push_back(m);
//         }
//     }
//     matches.swap(good_matches);
// }

// SIFT feature
void PoseEstimator::find_feature_matches(const cv::Mat &img_1, const cv::Mat &img_2, std::vector<cv::KeyPoint> &keypoints_1, std::vector<cv::KeyPoint> &keypoints_2, std::vector<cv::DMatch> &matches, int nfeatures) {
    Mat descriptors_1, descriptors_2;

    Ptr<SIFT> detector = SIFT::create(nfeatures);
    FlannBasedMatcher matcher(makePtr<flann::KDTreeIndexParams>(5), makePtr<flann::SearchParams>(32));
    detector->detect(img_1, keypoints_1);
    detector->detect(img_2, keypoints_2);
    detector->compute(img_1, keypoints_1, descriptors_1);
    detector->compute(img_2, keypoints_2, descriptors_2);

    // 使用K=2的KNN匹配来获取最佳匹配点对
    std::vector<std::vector<DMatch>> matches_vector;
    matcher.knnMatch(descriptors_1, descriptors_2, matches_vector, 2);

    // Lowe比率测试筛选匹配点
    std::vector<Point2d> pts1, pts2;
    pts1.reserve(matches_vector.size());
    pts2.reserve(matches_vector.size());

    for (const auto &m : matches_vector) {
        // 对最佳和次优匹配点应用Lowe比率测试
        if (m[0].distance / m[1].distance < 0.75) {  // Lowe比率阈值为0.75
            pts1.emplace_back(keypoints_1[m[0].queryIdx].pt);
            pts2.emplace_back(keypoints_2[m[0].trainIdx].pt);
            matches.push_back(m[0]);  // 只保留通过测试的最佳匹配
        }
    }
}

void PoseEstimator::pose_CV_essential(const std::vector<cv::KeyPoint> &keypoints_1, const std::vector<cv::KeyPoint> &keypoints_2, const std::vector<cv::DMatch> &matches, cv::Mat &R, cv::Mat &t, int method) {
    // 相机内参,TUM Freiburg2
    // Mat K = (Mat_<double>(3, 3) << 520.9, 0, 325.1, 0, 521.0, 249.7, 0, 0, 1);
    // EuRoc数据集
    Mat K = (Mat_<double>(3, 3) << 458.6, 0, 367.2, 0, 457.2, 248.3, 0, 0, 1);
    vector<Point2f> points1;
    vector<Point2f> points2;

    for (const auto &m : matches) {
        points1.push_back(keypoints_1[m.queryIdx].pt);
        points2.push_back(keypoints_2[m.trainIdx].pt);
    }

    Mat points1Mat = Mat(points1);
    Mat points2Mat = Mat(points2);
    Mat mask;
    Mat essential_matrix = findEssentialMat(points1Mat, points2Mat, K, method, 0.99, 1.0, mask);
    recoverPose(essential_matrix, points1Mat, points2Mat, K, R, t, mask);
}

void PoseEstimator::pose_CV_fundamental(const std::vector<cv::KeyPoint> &keypoints_1, const std::vector<cv::KeyPoint> &keypoints_2, const std::vector<cv::DMatch> &matches, cv::Mat &R, cv::Mat &t, int method) {
    Mat K = (Mat_<double>(3, 3) << 458.6, 0, 367.2, 0, 457.2, 248.3, 0, 0, 1);

    vector<Point2f> points1;
    vector<Point2f> points2;

    for (const auto &m : matches) {
        points1.push_back(keypoints_1[m.queryIdx].pt);
        points2.push_back(keypoints_2[m.trainIdx].pt);
    }

    Mat points1Mat = Mat(points1);
    Mat points2Mat = Mat(points2);

    // -- 计算本质矩阵
    Mat mask; 
    Mat fundamental_matrix = findFundamentalMat(points1Mat, points2Mat, method, 3, 0.99);
    Mat essential_matrix = K.t() * fundamental_matrix * K;
    recoverPose(essential_matrix, points1Mat, points2Mat, K, R, t, mask);
}

std::pair<Eigen::Matrix3d, Eigen::Vector3d> PoseEstimator::pose_GV_eightpt(const std::vector<cv::KeyPoint>& keypoints_1, std::vector<cv::KeyPoint>& keypoints_2, std::vector<cv::DMatch>& matches) {
    Matrix3d K;
    K << 458.6, 0, 367.2, 0, 457.2, 248.3, 0, 0, 1;
    opengv::bearingVectors_t bearings1, bearings2;
    for (size_t i = 0; i < matches.size(); i++) {
        Point2f pt1 = keypoints_1[matches[i].queryIdx].pt;
        Point2f pt2 = keypoints_2[matches[i].trainIdx].pt;
        Vector3d bv1((pt1.x - K(0,2)) / K(0,0),
                     (pt1.y - K(1,2)) / K(1,1), 1.0);
        Vector3d bv2((pt2.x - K(0,2)) / K(0,0),
                     (pt2.y - K(1,2)) / K(1,1), 1.0);

        bearings1.push_back(bv1.normalized());
        bearings2.push_back(bv2.normalized());
    }

    opengv::relative_pose::CentralRelativeAdapter adapter(bearings1, bearings2);
    auto relative_pose = opengv::relative_pose::eightpt(adapter);

    Matrix3d R = relative_pose.block<3, 3>(0, 0);
    Vector3d t = relative_pose.col(3);

    return std::make_pair(R, t);
}

std::pair<Eigen::Matrix3d, Eigen::Vector3d> PoseEstimator::pose_GV_eigensolver(const std::vector<cv::KeyPoint>& keypoints_1, std::vector<cv::KeyPoint>& keypoints_2, std::vector<cv::DMatch>& matches) {
    Matrix3d K;
    K << 458.6, 0, 367.2, 0, 457.2, 248.3, 0, 0, 1;
    opengv::bearingVectors_t bearings1, bearings2;
    for (size_t i = 0; i < matches.size(); i++) {
        Point2f pt1 = keypoints_1[matches[i].queryIdx].pt;
        Point2f pt2 = keypoints_2[matches[i].trainIdx].pt;
        Vector3d bv1((pt1.x - K(0,2)) / K(0,0),
                     (pt1.y - K(1,2)) / K(1,1), 1.0);
        Vector3d bv2((pt2.x - K(0,2)) / K(0,0),
                     (pt2.y - K(1,2)) / K(1,1), 1.0);

        bearings1.push_back(bv1.normalized());
        bearings2.push_back(bv2.normalized());
    }

    opengv::relative_pose::CentralRelativeAdapter adapter(bearings1, bearings2);
    auto relative_pose = opengv::relative_pose::eigensolver(adapter);

    Matrix3d R = relative_pose.block<3, 3>(0, 0);
    Vector3d t = relative_pose.col(3);

    return std::make_pair(R, t);
}

void PoseEstimator::convertCvMatToEigen(const cv::Mat& cvMat, Eigen::Matrix3d& eigenMat) {
    assert(cvMat.rows == 3 && cvMat.cols == 3);
    for (int i = 0; i < cvMat.rows; i++) {
        for (int j = 0; j < cvMat.cols; j++) {
            eigenMat(i, j) = cvMat.at<double>(i, j);
        }
    }
}

void PoseEstimator::convertEigenToCvMat(const Eigen::Vector3d& eigenVec, cv::Mat& cvMat) {
    cvMat.create(3, 1, CV_64F);
    for (int i = 0; i < 3; i++) {
        cvMat.at<double>(i, 0) = eigenVec(i);
    }
}

void PoseEstimator::savePose(std::ifstream &timefile, std::ofstream &outfile, const cv::Mat &t_cv, const Eigen::Quaterniond &q) {
    std::string line;
    std::getline(timefile, line);
    auto data = split(line, ',');
    std::string timestamp = data[0];
    double timestamp_sec = std::stod(timestamp) / 1e9;
    outfile << std::setprecision(9) << timestamp_sec << " "
            << std::setprecision(9) << t_cv.at<double>(0, 0) << " "
            << t_cv.at<double>(1, 0) << " " << t_cv.at<double>(2, 0) << " "
            << q.x() << " " << q.y() << " " << q.z() << " " << q.w() << std::endl;
}

void PoseEstimator::getRMSE(const std::string& groundTruthPath, const std::string& estimatePath){
    boost::filesystem::path path_gt(groundTruthPath);
    std::vector<double> times;
    std::vector<Eigen::Matrix<double, 7, 1>> poses;
    std::vector<Eigen::Matrix3d> cov_ori, cov_pos;
    ov_eval::Loader::load_data(groundTruthPath, times, poses, cov_ori, cov_pos);

    double length = ov_eval::Loader::get_total_length(poses);
    cerr << "[COMP]: " << times.size() << " poses in " << path_gt.stem().string() << " => length of " << fixed << setprecision(2) << length << " meters" << endl;
    // align_mode （对齐模式）参数的取值有: posyaw、posyawsingle、se3、se3single、sim3和none类型
    ov_eval::ResultTrajectory traj(estimatePath, groundTruthPath, "se3");
    ov_eval::Statistics error_ori, error_pos;
    traj.calculate_ate(error_ori, error_pos);
    cout << "Absolute Trajectory Error" << endl;
    cout << "rmse_ori = " << fixed << setprecision(3) << error_ori.rmse << " | rmse_pos = " << error_pos.rmse << endl;
    cout << "mean_ori = " << error_ori.mean << " | mean_pos = " << error_pos.mean << endl;
    cout << "min_ori  = " << error_ori.min << " | min_pos  = " << error_pos.min << endl;
    cout << "max_ori  = " << error_ori.max << " | max_pos  = " << error_pos.max << endl;
    cout << "std_ori  = " << error_ori.std << " | std_pos  = " << error_pos.std << endl;
    cout << "======================================" << endl;
}