//
// Created by cch on 2022/4/26.
//

#ifndef FRONT_MATCH_FEATUREMATCH_H
#define FRONT_MATCH_FEATUREMATCH_H
#include <eigen3/Eigen/Core>
#include <eigen3/Eigen/Dense>
#include <eigen3/Eigen/Geometry>
#include <opencv2/core.hpp>
#include <opencv2/core/eigen.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>

using namespace cv;
using namespace Eigen;
const std::string path = "/home/cch/practise/Front-Match/Data";
class Match {
public:
  //完成图片的读取
  Match() {
    K = (Mat_<double>(3, 3) << 520.9, 0, 325.1, 0, 521.0, 249.7, 0, 0, 1);
  }
  virtual void KeyPointMatch(){};
  bool inBorder(const cv::Point2f &pt) {
    const int BOARED_SIZE = 1;
    int img_x = cvRound(pt.x);
    int img_y = cvRound(pt.y);

    return BOARED_SIZE <= img_x && img_x < m_col - BOARED_SIZE &&
           BOARED_SIZE <= img_y && img_y < m_col - BOARED_SIZE;
  }
  void setMask() {}
  void rejectWithF(std::vector<cv::Point2f> &curPts,
                   std::vector<cv::Point2f> &lastPts) {
    if (lastPts.size() > 8) {
    }
  }
  cv::Mat ComputeEssentialMatrix(const std::vector<cv::Point2f> &points1,
                                 const std::vector<cv::Point2f> &points2) {
    cv::Mat essentialMatrix =
        cv::findEssentialMat(points1, points2, CV_FM_8POINT);
    return essentialMatrix;
  }

public:
  ~Match() {}

private:
  int m_col, m_row;
  //内参矩阵
  cv::Mat K;
};

class ORBMatch : public Match {
public:
  ORBMatch() {
    m_lastimg = cv::imread(path + "/1.png", 0);
    m_curimg = cv::imread(path + "/2.png", 0);
    assert(m_lastimg.data != nullptr && m_curimg.data != nullptr);
  }
  void KeyPointMatch() override {

    //    //仿函数对距离排序
    //    auto min_max =
    //        std::minmax_element(matches.begin(), matches.end(),
    //                            [](const DMatch &DMatch1, const DMatch
    //                            &DMatch2) {
    //                              return DMatch1.distance < DMatch2.distance;
    //                            });
    //
    //    double min_dist = min_max.first->distance;
    //    double max_dist = min_max.second->distance;
    //
    //    std::vector<DMatch> goodMatches;
    //
    //    for (int i = 0; i < descrptor1.rows; ++i) {
    //      if (matches[i].distance <= max(2 * min_dist, 30.0))
    //        goodMatches.push_back(matches[i]);
    //    }
    //    for (int i = 0; i < (int)goodMatches.size(); ++i) {
    //
    //      lastPts.push_back(m_last_Pts[goodMatches[i].queryIdx].pt);
    //      curPts.push_back(m_cur_Pts[goodMatches[i].trainIdx].pt);
    //    }
    //    ORB_EssentialMatrix = ComputeEssentialMatrix(lastPts, curPts);
    //    cv::recoverPose(ORB_EssentialMatrix, lastPts, curPts, R, t);
    //

    //先对上一帧提取特征点
    matches.clear();
    m_last_descriptor = Detect(m_lastimg, m_last_Pts);
    m_cur_descriptor = Detect(m_curimg, m_cur_Pts);
    assert(m_last_descriptor.data != nullptr &&
           m_cur_descriptor.data != nullptr);

    cv::Ptr<DescriptorMatcher> matcher =
        DescriptorMatcher::create("BruteForce-Hamming");

    matcher->match(m_last_descriptor, m_cur_descriptor, matches);
    ShowTrack(matches);
  }
  cv::Mat Detect(const cv::Mat &img, std::vector<cv::KeyPoint> &Pts) {
    cv::Mat descriptor;
    cv::Ptr<FeatureDetector> dector = ORB::create();
    cv::Ptr<DescriptorExtractor> extractor = ORB::create();

    dector->detect(img, Pts);
    extractor->compute(img, Pts, descriptor);
    return descriptor;
  }
  void ShowTrack(std::vector<DMatch> &goodMatches) {
    cv::Mat good_img;

    cv::drawMatches(m_lastimg, m_last_Pts, m_curimg, m_cur_Pts, goodMatches,
                    good_img);
    cv::imshow("2", good_img);
    cv::waitKey(0);
  }

private:
  std::vector<cv::KeyPoint> m_cur_Pts, m_last_Pts;
  std::vector<cv::Point2f> curPts, lastPts;
  cv::Mat m_curimg, m_lastimg;
  cv::Mat ORB_EssentialMatrix;
  cv::Mat R, t;
  cv::Mat m_cur_descriptor, m_last_descriptor;
  std::vector<cv::DMatch> matches, goodMatches;
};

class LKMatch : public Match {
public:
  LKMatch() {
    m_curImg = cv::imread(path + "/1.png", 0);
    m_LastImg = cv::imread(path + "/2.png", 0);
    assert(m_curImg.data != nullptr && m_LastImg.data != nullptr);
  };
  void KeyPointMatch() override {
    m_LastPts.clear();
    m_curPts.clear();

    cv::goodFeaturesToTrack(m_LastImg, m_LastPts, 200, 0.01, MIN_DIST, mask);
    if (m_LastPts.empty()) {
      std::cout << "not enough points" << std::endl;
    }
    cv::calcOpticalFlowPyrLK(m_LastImg, m_curImg, m_LastPts, m_curPts, status,
                             error, cv::Size(21, 21), 3);

    essentialMatrix = ComputeEssentialMatrix(m_LastPts, m_curPts);

    cv::recoverPose(essentialMatrix, m_LastPts, m_curPts, R, t);

    std::cout << "LK" << R << std::endl;
    std::cout << "LK" << t << std::endl;
  }

  inline Matrix3d GetRotation() {
    Eigen::Matrix3d Rotation;
    cv::cv2eigen(R, Rotation);
    return Rotation;
  }

  inline Vector3d GetTranslation() {
    Eigen::Vector3d translation;
    cv::cv2eigen(t, translation);
    return translation;
  }

  inline std::vector<cv::Point2f> GetCurPoints() { return m_curPts; }

  inline std::vector<cv::Point2f> GetLastPoints() { return m_LastPts; }

private:
  cv::Mat m_curImg, m_LastImg;
  std::vector<cv::Point2f> m_curPts, m_LastPts;
  double MIN_DIST = 25.0;
  cv::Mat mask;
  std::vector<uchar> status;
  std::vector<float> error;
  cv::Mat essentialMatrix;
  cv::Mat R, t;
};

#endif // FRONT_MATCH_FEATUREMATCH_H
