/**
 * 本程序测试LK光流的效果
*/

#include "ygz/Frame.h"
#include "ygz/Settings.h"
#include "ygz/ORBExtractor.h"
#include "ygz/ORBMatcher.h"
#include "ygz/EurocReader.h"
#include "ygz/Viewer.h"
#include "ygz/LKFlow.h"
#include "ygz/IMUData.h"
#include "ygz/Feature.h"

#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>

string leftFolder = "/home/gaoxiang/dataset/euroc/V2_03_difficult/cam0/data";
string rightFolder = "/home/gaoxiang/dataset/euroc/V2_03_difficult/cam1/data";
string timeFolder = "./examples/EuRoC_TimeStamps/V203.txt";
string imuFolder = "/home/gaoxiang/dataset/euroc/V2_03_difficult/imu0/data.csv";
string groundTruthFile = "/home/xiang/dataset/euroc/V2_03_difficult/state_groundtruth_estimate0/data.csv";
string configFile = "./examples/EuRoC.yaml";

using namespace ygz;

int main(int argc, char **argv) {

    vector<string> vstrImageLeft;
    vector<string> vstrImageRight;
    vector<double> vTimeStamp;
    VecIMU vimus;

    LoadImages(leftFolder, rightFolder, timeFolder, vstrImageLeft, vstrImageRight, vTimeStamp);
    LoadImus(imuFolder, vimus);

    // read camera parameters
    cv::FileStorage fsSettings(configFile, cv::FileStorage::READ);
    assert(fsSettings.isOpened());

    cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r;
    fsSettings["LEFT.K"] >> K_l;
    fsSettings["RIGHT.K"] >> K_r;

    fsSettings["LEFT.P"] >> P_l;
    fsSettings["RIGHT.P"] >> P_r;

    fsSettings["LEFT.R"] >> R_l;
    fsSettings["RIGHT.R"] >> R_r;

    fsSettings["LEFT.D"] >> D_l;
    fsSettings["RIGHT.D"] >> D_r;

    int rows_l = fsSettings["LEFT.height"];
    int cols_l = fsSettings["LEFT.width"];
    int rows_r = fsSettings["RIGHT.height"];
    int cols_r = fsSettings["RIGHT.width"];

    if (K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() ||
        D_r.empty() ||
        rows_l == 0 || rows_r == 0 || cols_l == 0 || cols_r == 0) {
        LOG(ERROR) << "ERROR: Calibration parameters to rectify stereo are missing!" << endl;
        return 1;
    }

    cv::Mat M1l, M2l, M1r, M2r;
    cv::initUndistortRectifyMap(K_l, D_l, R_l, P_l.rowRange(0, 3).colRange(0, 3), cv::Size(cols_l, rows_l), CV_32F, M1l,
                                M2l);
    cv::initUndistortRectifyMap(K_r, D_r, R_r, P_r.rowRange(0, 3).colRange(0, 3), cv::Size(cols_r, rows_r), CV_32F, M1r,
                                M2r);

    const int nImages = vstrImageLeft.size();

    // Create camera object
    setting::initSettings();
    float fx = fsSettings["Camera.fx"];
    float fy = fsSettings["Camera.fy"];
    float cx = fsSettings["Camera.cx"];
    float cy = fsSettings["Camera.cy"];
    float bf = fsSettings["Camera.bf"];

    shared_ptr<CameraParam> camera (new CameraParam(fx, fy, cx, cy, bf));

    cv::Mat imLeft, imRight, imLeftRect, imRightRect;

    shared_ptr<Frame> lastFrame = nullptr, currentFrame = nullptr;
    size_t imuIndex = 0;

    float aveTrackedPts = 0;
    float aveTrackedRate = 0;
    float aveTrackTime = 0;
    for (int ni = 0; ni < nImages; ni++) {

        // Read left and right images from file
        imLeft = cv::imread(vstrImageLeft[ni], CV_LOAD_IMAGE_UNCHANGED);
        imRight = cv::imread(vstrImageRight[ni], CV_LOAD_IMAGE_UNCHANGED);

        cv::remap(imLeft, imLeftRect, M1l, M2l, cv::INTER_LINEAR);
        cv::remap(imRight, imRightRect, M1r, M2r, cv::INTER_LINEAR);

        VecIMU vimu;
        double tframe = vTimeStamp[ni];
        currentFrame = shared_ptr<Frame>(new Frame(imLeftRect, imRightRect, tframe, camera, vimu));

        ORBExtractor extractor(ORBExtractor::OPENCV_GFTT);
        extractor.Detect(currentFrame, true, false);
        LOG(INFO)<<"Detected "<<currentFrame->mFeaturesLeft.size()<<" features."<<endl;

        if (lastFrame) {
            // compare the last frame and current
            VecVector2f trackedPts;
            std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();

            // test the opencv's lk flow or the implemented one
            int nTracked = LKFlow(lastFrame, currentFrame, trackedPts);
            // int nTracked = LKFlowCV(lastFrame, currentFrame, trackedPts);

            std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
            double timeCost = std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
            LOG(INFO) << "LK flow tracked " << nTracked << " points in total " << lastFrame->mFeaturesLeft.size()
                      << ", cost time: " << timeCost << endl;

            // Plot the results
            cv::Mat imgLast, imgCurr;
            cv::cvtColor(lastFrame->mImLeft, imgLast, CV_GRAY2BGR);
            cv::cvtColor(currentFrame->mImLeft, imgCurr, CV_GRAY2BGR);

            for (size_t i = 0; i < trackedPts.size(); i++) {
                if (trackedPts[i][0] > 0 && trackedPts[i][1] > 0) {
                    cv::circle(
                            imgLast,
                            cv::Point2f(lastFrame->mFeaturesLeft[i]->mPixel[0],
                                        lastFrame->mFeaturesLeft[i]->mPixel[1]), 2, cv::Scalar(0, 250, 0));
                    cv::circle(
                            imgCurr,
                            cv::Point2f(trackedPts[i][0], trackedPts[i][1]),
                            2, cv::Scalar(0, 250, 0));
                }
            }

            cv::imshow("last", imgLast);
            cv::imshow("current", imgCurr);
            cv::waitKey(1);

            aveTrackedPts += nTracked;
            aveTrackedRate += float(nTracked) /lastFrame->mFeaturesLeft.size();
            aveTrackTime += timeCost;
        }

        lastFrame = currentFrame;
    }

    aveTrackedPts /= nImages;
    aveTrackedRate /= nImages;
    aveTrackTime /= nImages;
    LOG(INFO)<<"Average tracked points: "<<aveTrackedPts;
    LOG(INFO)<<"Average tracked rate: "<<aveTrackedRate;
    LOG(INFO)<<"Average time cost: "<<aveTrackTime;

    return 0;
}