//
// Created by 孙强 on 2022/5/13.
//

#include <iostream>
#include <string>
#include <opencv2/core.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/opencv.hpp>

using namespace std;
using namespace cv;

void extract_features(
        vector<string> &image_names,
        vector<vector<KeyPoint>> &key_points_for_all,
        vector<Mat> &descriptor_for_all,
        vector<vector<Vec3b>> &colors_for_all
) {
    key_points_for_all.clear();
    descriptor_for_all.clear();
    Mat image;
    //读取图像，获取图像特征点，并保存
    Ptr<ORB> orb = ORB::create(5000);
    for (auto it = image_names.begin(); it != image_names.end(); ++it) {
        image = imread(*it);
        if (image.empty()) continue;

        vector<KeyPoint> key_points;
        Mat descriptor;

        orb->detectAndCompute(image, noArray(), key_points, descriptor);

        //熟练太少则排除
        if (key_points.size() <= 10) continue;

        key_points_for_all.push_back(key_points);
        descriptor_for_all.push_back(descriptor);

        vector<Vec3b> colors(key_points.size());
        for (int i = 0; i < key_points.size(); ++i) {
            Point2f p = key_points[i].pt;
            colors[i] = image.at<Vec3b>(p.y, p.x);
        }
        colors_for_all.push_back(colors);
    }


}

void match_features(Mat &query, Mat &train, vector<DMatch> &matches) {
    vector<vector<DMatch>> knn_matches;
    BFMatcher matcher(NORM_L2);
    matcher.knnMatch(query, train, knn_matches, 2);

    //获取满足Ratio Test的最小匹配的距离
    float min_dist = FLT_MAX;
    for (int r = 0; r < knn_matches.size(); ++r) {
        //Ratio Test
        if (knn_matches[r][0].distance > 0.6 * knn_matches[r][1].distance)
            continue;

        float dist = knn_matches[r][0].distance;
        if (dist < min_dist) min_dist = dist;
    }

    matches.clear();
    for (size_t r = 0; r < knn_matches.size(); ++r) {
        //排除不满足Ratio Test的点和匹配距离过大的点
        if (
                knn_matches[r][0].distance > 0.6 * knn_matches[r][1].distance ||
                knn_matches[r][0].distance > 5 * max(min_dist, 10.0f)
                )
            continue;

        //保存匹配点
        matches.push_back(knn_matches[r][0]);
    }
}

    /*
     * 得到匹配点后，就可以使用OpenCV3.0中新加入的函数findEssentialMat()来求取本征矩阵了。
     * 得到本征矩阵后，再使用另一个函数对本征矩阵进行分解，并返回两相机之间的相对变换R和T。
     * 注意这里的T是在第二个相机的坐标系下表示的，也就是说，其方向从第二个相机指向第一个相机（即世界坐标系所在的相机），
     * 且它的长度等于1。
     * */
    bool find_transform(Mat& K, vector<Point2f>& p1, vector<Point2f>& p2, Mat& R, Mat& T, Mat& mask){
        //根据内参矩阵获取相机的焦距和光心坐标（主点坐标）
        double focal_length = 0.5*(K.at<double>(0) + K.at<double>(4));
        Point2d principle_point(K.at<double>(2), K.at<double>(5));

        //根据匹配点求取本征矩阵，使用RANSAC，进一步排除失配点
        Mat E = findEssentialMat(p1, p2, focal_length, principle_point, RANSAC, 0.999, 1.0, mask);
//        Mat F = findFundamentalMat(p1,p2,focal_length, principle_point,RANSAC,0.999,1.0,mask);
        if (E.empty()) return false;

        double feasible_count = countNonZero(mask);
        cout << (int)feasible_count << " -in- " << p1.size() << endl;
        //对于RANSAC而言，outlier数量大于50%时，结果是不可靠的
        if (feasible_count <= 15 || (feasible_count / p1.size()) < 0.6)
            return false;

        //分解本征矩阵，获取相对变换
        int pass_count = recoverPose(E, p1, p2, R, T, focal_length, principle_point, mask);

        //同时位于两个相机前方的点的数量要足够大
        if (((double)pass_count) / feasible_count < 0.7)
            return false;

        return true;
    }



void reconstruct(Mat& K, Mat& R, Mat& T, vector<Point2f>& p1, vector<Point2f>& p2, Mat& structure)
{
    //两个相机的投影矩阵[R T]，triangulatePoints只支持float型
    Mat proj1(3, 4, CV_32FC1);
    Mat proj2(3, 4, CV_32FC1);

    proj1(Range(0, 3), Range(0, 3)) = Mat::eye(3, 3, CV_32FC1);
    proj1.col(3) = Mat::zeros(3, 1, CV_32FC1);

    R.convertTo(proj2(Range(0, 3), Range(0, 3)), CV_32FC1);
    T.convertTo(proj2.col(3), CV_32FC1);

    Mat fK;
    K.convertTo(fK, CV_32FC1);
    proj1 = fK*proj1;
    proj2 = fK*proj2;

    //三角化重建
    triangulatePoints(proj1, proj2, p1, p2, structure);
}





