#include "mainwindow.h"
#include "settings.h"
#include "calibrator.h"

#include <iostream>
#include <sstream>
#include <string>
#include <ctime>
#include <cstdio>
#include <QMessageBox>
#include <QThread>
#include <QDebug>

#include <opencv2/core.hpp>
#include <opencv2/core/utility.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/calib3d.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>

using namespace cv;
using namespace std;


Calibrator::Calibrator(int argc, char** argv, QObject* parent) : m_argc(argc), m_argv(argv), QObject(parent) {

}

Calibrator::~Calibrator() {
    m_argv = nullptr;
}

int Calibrator::calMainRun()
{
    qDebug() << "标定线程" << QThread::currentThreadId();
    const String keys
        = "{help h usage ? |           | print this message            }"
          "{@settings      |default.xml| input setting file            }"
          "{d              |           | actual distance between top-left and top-right corners of "
          "the calibration grid }"
          "{winSize        | 11        | Half of search window for cornerSubPix }";
    CommandLineParser parser(m_argc, m_argv, keys);
    parser.about("This is a camera calibration sample.\n"
                 "Usage: camera_calibration [configuration_file -- default ./default.xml]\n"
                 "Near the sample file you'll find the configuration file, which has detailed help of "
                 "how to edit it. It may be any OpenCV supported file format XML/YAML.");
    if (!parser.check()) {
        parser.printErrors();
        return 0;
    }

    if (parser.has("help")) {
        parser.printMessage();
        return 0;
    }

    //! [file_read]
    Settings s;
    const string inputSettingsFile = m_argc > 1 ? m_argv[1] : "E:/Desktop/Qt-ImageCal/QtPro/ImageCal/in_VID5.xml";
    FileStorage fs(inputSettingsFile, FileStorage::READ); // Read the settings
    if (!fs.isOpened())
    {
        cout << "Could not open the configuration file: \"" << inputSettingsFile << "\"" << endl;
        parser.printMessage();
        return -1;
    }
    // 取出xml文件中的Settings节点
    fs["Settings"] >> s;
    fs.release();                                         // close Settings file
    //! [file_read]

    //FileStorage fout("settings.yml", FileStorage::WRITE); // write config as YAML
    //fout << "Settings" << s;

    if (!s.m_GoodInput)
    {
        cout << "Invalid input detected. Application stopping. " << endl;
        return -1;
    }

    int winSize = parser.get<int>("winSize");
    // 若定义了网格板的宽度就使用定义的值，若没有就计算宽度
    float grid_width = s.m_SquareSize * (s.m_BoardSize.width - 1);
    bool release_object = false;
    if (parser.has("d")) {
        grid_width = parser.get<float>("d");
        release_object = true;
    }

    vector<vector<Point2f> > imagePoints;  // 保存每一幅输入图像的特征点集合
    Mat cameraMatrix, distCoeffs;
    Size imageSize;
    int mode = s.m_InputType == Settings::IMAGE_LIST ? CAPTURING : DETECTION;
    clock_t prevTimestamp = 0;
    const Scalar RED(0,0,255), GREEN(0,255,0);
    const char ESC_KEY = 27;

    //! [get_input]
    // 主循环，每次循环根据mode选择要走的分支
    // mode == CAPTURING：采集图像，并且计算标定板特征点，然后把特征点显示出来
    // mode == CALIBRATED：已标定完成
    for(;;)
    {
        Mat view;
        bool blinkOutput = false;

        view = s.nextImage();

        //-----  If no more image, or got enough, then stop calibration and show result -------------
        if( mode == CAPTURING && imagePoints.size() >= (size_t)s.m_NrFrames )
        {
            // 图像采集够了，开始计算标定参数：相机内外参矩阵、畸变系数
            if(this->runCalibrationAndSave(s, imageSize,  cameraMatrix, distCoeffs, imagePoints, grid_width,
                                   release_object))
                mode = CALIBRATED;
            else
                mode = DETECTION;
        }
        if(view.empty())          // If there are no more images stop the loop
        {
            // if calibration threshold was not reached yet, calibrate now
            if( mode != CALIBRATED && !imagePoints.empty() )
                this->runCalibrationAndSave(s, imageSize,  cameraMatrix, distCoeffs, imagePoints, grid_width,
                                      release_object);
            break;
        }
        //! [get_input]

        imageSize = view.size();  // Format input image.
        if( s.m_FlipVertical )    flip( view, view, 0 );

        //! [find_pattern]
        vector<Point2f> pointBuf;

        bool found;

        int chessBoardFlags = CALIB_CB_ADAPTIVE_THRESH | CALIB_CB_NORMALIZE_IMAGE;

        if(!s.m_UseFisheye) {
            // fast check erroneously fails with high distortions like fisheye
            chessBoardFlags |= CALIB_CB_FAST_CHECK;
        }

        switch( s.m_CalibrationPattern ) // Find feature points on the input format   计算标定板上的特征点
        {
        case Settings::CHESSBOARD:
            found = findChessboardCorners( view, s.m_BoardSize, pointBuf, chessBoardFlags);
            break;
        case Settings::CIRCLES_GRID:
            found = findCirclesGrid( view, s.m_BoardSize, pointBuf );
            break;
        case Settings::ASYMMETRIC_CIRCLES_GRID:
            found = findCirclesGrid( view, s.m_BoardSize, pointBuf, CALIB_CB_ASYMMETRIC_GRID );
            break;
        default:
            found = false;
            break;
        }
        //! [find_pattern]
        //! [pattern_found]
        if (found)                // If done with success,
        {
            // improve the found corners' coordinate accuracy for chessboard
            if( s.m_CalibrationPattern == Settings::CHESSBOARD)
            {
                Mat viewGray;
                cvtColor(view, viewGray, COLOR_BGR2GRAY);
                cornerSubPix(viewGray, pointBuf, Size(winSize,winSize),
                    Size(-1,-1), TermCriteria( TermCriteria::EPS+TermCriteria::COUNT, 30, 0.0001 ));
            }

            if( mode == CAPTURING &&  // For camera only take new samples after delay time
                (!s.m_InputCapture.isOpened() || clock() - prevTimestamp > s.m_Delay*1e-3*CLOCKS_PER_SEC) )
            {
                imagePoints.push_back(pointBuf);
                prevTimestamp = clock();
                blinkOutput = s.m_InputCapture.isOpened();
            }

            // Draw the corners.
            drawChessboardCorners(view, s.m_BoardSize, Mat(pointBuf), found);
        }
        //! [pattern_found]
        //----------------------------- Output Text ------------------------------------------------
        //! [output_text]
        string msg = (mode == CAPTURING) ? "100/100" :
                      mode == CALIBRATED ? "Calibrated" : "Press 'g' to start";
        int baseLine = 0;
        Size textSize = getTextSize(msg, 1, 1, 1, &baseLine);
        Point textOrigin(view.cols - 2*textSize.width - 10, view.rows - 2*baseLine - 10);

        if( mode == CAPTURING )
        {
            if(s.m_ShowUndistorsed)
                msg = format( "%d/%d Undist", (int)imagePoints.size(), s.m_NrFrames );
            else
                msg = format( "%d/%d", (int)imagePoints.size(), s.m_NrFrames );
        }

        putText( view, msg, textOrigin, 1, 1, mode == CALIBRATED ?  GREEN : RED);

        if( blinkOutput )
            bitwise_not(view, view);  // 对图像取反，不知道有啥用
        //! [output_text]
        //------------------------- Video capture  output  undistorted ------------------------------
        //! [output_undistorted]
        if( mode == CALIBRATED && s.m_ShowUndistorsed )
        {
            Mat temp = view.clone();
            if (s.m_UseFisheye)
              cv::fisheye::undistortImage(temp, view, cameraMatrix, distCoeffs);
            else
              undistort(temp, view, cameraMatrix, distCoeffs);
        }
        //! [output_undistorted]
        //------------------------------ Show image and check for input commands -------------------
        //! [await_input]
        imshow("Image View", view);
        char key = (char)waitKey(s.m_InputCapture.isOpened() ? 50 : s.m_Delay);   // 让图像显示一段时间

        if( key  == ESC_KEY )  // 按下esc键退出
            break;

        if( key == 'u' && mode == CALIBRATED )
           s.m_ShowUndistorsed = !s.m_ShowUndistorsed;

        if( s.m_InputCapture.isOpened() && key == 'g' )
        {
            mode = CAPTURING;
            imagePoints.clear();
        }
        //! [await_input]
    }

    // -----------------------Show the undistorted image for the image list ------------------------
    //! [show_results]
    if( s.m_InputType == Settings::IMAGE_LIST && s.m_ShowUndistorsed )
    {
        Mat view, rview, map1, map2;

        if (s.m_UseFisheye)
        {
            Mat newCamMat;
            fisheye::estimateNewCameraMatrixForUndistortRectify(cameraMatrix, distCoeffs, imageSize,
                                                                Matx33d::eye(), newCamMat, 1);
            fisheye::initUndistortRectifyMap(cameraMatrix, distCoeffs, Matx33d::eye(), newCamMat, imageSize,
                                             CV_16SC2, map1, map2);
        }
        else
        {
            // 计算畸变矫正矩阵
            // 其中getOptimalNewCameraMatrix函数用于计算newCameraMatrix，
            // 这是对原有cameraMatrix的优化
            initUndistortRectifyMap(
                cameraMatrix, distCoeffs, Mat(),
                getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, imageSize, 1, imageSize, 0), imageSize,
                CV_16SC2, map1, map2);
        }

        for(size_t i = 0; i < s.m_ImageList.size(); i++ )
        {
            view = imread(s.m_ImageList[i], IMREAD_COLOR);
            if(view.empty())
                continue;
            // 矫正图像
            remap(view, rview, map1, map2, INTER_LINEAR);
            imshow("Image View", rview);
            // *****这里的循环有问题，如果不按退出键，直接关闭图像窗口，循环就会一直阻塞
            // *****但是vs2019上关闭图像窗口不会阻塞
            char c = (char)waitKey();
            if( c  == ESC_KEY || c == 'q' || c == 'Q' )
                break;
        }
    }
    //! [show_results]

    return 0;
}

void Calibrator::doCal(string path) {

    // 读取一副图片
    Mat src = imread(path);
    Mat distortion = src.clone();
    Mat camera_matrix = Mat(3, 3, CV_32FC1);
    Mat distortion_coefficients;


    //导入相机内参和畸变系数矩阵
    FileStorage file_storage("out_camera_data.xml", FileStorage::READ);
    file_storage["camera_matrix"] >> camera_matrix;
    file_storage["distortion_coefficients"] >> distortion_coefficients;
    file_storage.release();

    if (!distortion_coefficients.data) {
        QMessageBox::critical(nullptr, "警告", "读取标定参数文件失败!");
        return;
    }

    //矫正，底层就是调用上面的initUndistortRectifyMap和remap函数
    undistort(src, distortion, camera_matrix, distortion_coefficients);

    imshow("img", src);
    imshow("undistort", distortion);
    imwrite("undistort.jpg", distortion);
}



// 计算映射误差
double Calibrator::computeReprojectionErrors( const vector<vector<Point3f> >& objectPoints,
                                         const vector<vector<Point2f> >& imagePoints,
                                         const vector<Mat>& rvecs, const vector<Mat>& tvecs,
                                         const Mat& cameraMatrix , const Mat& distCoeffs,
                                         vector<float>& perViewErrors, bool fisheye)
{
    vector<Point2f> imagePoints2;
    size_t totalPoints = 0;
    double totalErr = 0, err;
    perViewErrors.resize(objectPoints.size());

    // 计算每幅图像的映射误差
    for(size_t i = 0; i < objectPoints.size(); ++i )
    {
        if (fisheye)
        {
            fisheye::projectPoints(objectPoints[i], imagePoints2, rvecs[i], tvecs[i], cameraMatrix,
                                   distCoeffs);
        }
        else
        {
            // 使用标定得到的参数计算标定板上角点坐标(objectPoints[i])映射到图像上的坐标(imagePoints2)
            projectPoints(objectPoints[i], rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
        }
        // 使用NORM_L2准则计算实际检测到的特征点坐标(imagePoints[i])与映射计算得到的特征点坐标(imagePoints2)的相似度
        err = norm(imagePoints[i], imagePoints2, NORM_L2);

        size_t n = objectPoints[i].size();
        perViewErrors[i] = (float) std::sqrt(err*err/n);
        totalErr        += err*err;
        totalPoints     += n;
    }

    return std::sqrt(totalErr/totalPoints);
}

// 根据标定板类型计算角点的世界坐标
void Calibrator::calcBoardCornerPositions(Size boardSize, float squareSize, vector<Point3f>& corners,
                                     Settings::Pattern patternType /*= Settings::CHESSBOARD*/)
{
    corners.clear();

    switch(patternType)
    {
    case Settings::CHESSBOARD:
    case Settings::CIRCLES_GRID:
        for( int i = 0; i < boardSize.height; ++i )
            for( int j = 0; j < boardSize.width; ++j )
                corners.push_back(Point3f(j*squareSize, i*squareSize, 0));
        break;

    case Settings::ASYMMETRIC_CIRCLES_GRID:
        for( int i = 0; i < boardSize.height; i++ )
            for( int j = 0; j < boardSize.width; j++ )
                corners.push_back(Point3f((2*j + i % 2)*squareSize, i*squareSize, 0));
        break;
    default:
        break;
    }
}

bool Calibrator::runCalibration( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
                            vector<vector<Point2f> > imagePoints, vector<Mat>& rvecs, vector<Mat>& tvecs,
                            vector<float>& reprojErrs,  double& totalAvgErr, vector<Point3f>& newObjPoints,
                            float grid_width, bool release_object)
{
    //! [fixed_aspect]
    cameraMatrix = Mat::eye(3, 3, CV_64F);
    if( s.m_Flag & CALIB_FIX_ASPECT_RATIO )
        cameraMatrix.at<double>(0,0) = s.m_AspectRatio;
    //! [fixed_aspect]
    if (s.m_UseFisheye) {
        distCoeffs = Mat::zeros(4, 1, CV_64F);
    } else {
        distCoeffs = Mat::zeros(8, 1, CV_64F);
    }

    vector<vector<Point3f> > objectPoints(1);
    this->calcBoardCornerPositions(s.m_BoardSize, s.m_SquareSize, objectPoints[0], s.m_CalibrationPattern);  // 计算角点的世界坐标
    // 想不出这一步有啥意义，把第一行的最后一个坐标设置为宽度，然而实际上计算出来的值就是宽度，而且只改第一行，后面几行都不改，很奇怪
    objectPoints[0][s.m_BoardSize.width - 1].x = objectPoints[0][0].x + grid_width;
    newObjPoints = objectPoints[0];

    // 把二维数组元素个数设置成图片数量，并且角点坐标都相同，因为标定的时候用的是同一块标定板
    objectPoints.resize(imagePoints.size(), objectPoints[0]);

    //Find intrinsic and extrinsic camera parameters
    double rms;

    if (s.m_UseFisheye) {
        Mat _rvecs, _tvecs;
        rms = fisheye::calibrate(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, _rvecs,
                                 _tvecs, s.m_Flag);

        rvecs.reserve(_rvecs.rows);
        tvecs.reserve(_tvecs.rows);
        for(int i = 0; i < int(objectPoints.size()); i++){
            rvecs.push_back(_rvecs.row(i));
            tvecs.push_back(_tvecs.row(i));
        }
    } else {
        int iFixedPoint = -1;
        if (release_object)
            iFixedPoint = s.m_BoardSize.width - 1;
        // 计算相机外参内参矩阵
        // cameraMatrix：内参矩阵   distCoeffs：畸变系数
        // rvecs：外参，旋转矩阵     tvecs：外参，平移矩阵
        rms = calibrateCameraRO(objectPoints, imagePoints, imageSize, iFixedPoint,
                                cameraMatrix, distCoeffs, rvecs, tvecs, newObjPoints,
                                s.m_Flag | CALIB_USE_LU);
    }

    if (release_object) {
        cout << "New board corners: " << endl;
        cout << newObjPoints[0] << endl;
        cout << newObjPoints[s.m_BoardSize.width - 1] << endl;
        cout << newObjPoints[s.m_BoardSize.width * (s.m_BoardSize.height - 1)] << endl;
        cout << newObjPoints.back() << endl;
    }

    cout << "Re-projection error reported by calibrateCamera: "<< rms << endl;

    bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs);

    objectPoints.clear();
    objectPoints.resize(imagePoints.size(), newObjPoints);
    totalAvgErr = this->computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix,
                                            distCoeffs, reprojErrs, s.m_UseFisheye);

    return ok;
}

// Print camera parameters to the output file
void Calibrator::saveCameraParams( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
                              const vector<Mat>& rvecs, const vector<Mat>& tvecs,
                              const vector<float>& reprojErrs, const vector<vector<Point2f> >& imagePoints,
                              double totalAvgErr, const vector<Point3f>& newObjPoints )
{
    FileStorage fs( s.m_OutputFileName, FileStorage::WRITE );

    time_t tm;
    time( &tm );
    struct tm *t2 = localtime( &tm );
    char buf[1024];
    strftime( buf, sizeof(buf), "%c", t2 );

    fs << "calibration_time" << buf;

    if( !rvecs.empty() || !reprojErrs.empty() )
        fs << "nr_of_frames" << (int)std::max(rvecs.size(), reprojErrs.size());
    fs << "image_width" << imageSize.width;
    fs << "image_height" << imageSize.height;
    fs << "board_width" << s.m_BoardSize.width;
    fs << "board_height" << s.m_BoardSize.height;
    fs << "square_size" << s.m_SquareSize;

    if( s.m_Flag & CALIB_FIX_ASPECT_RATIO )
        fs << "fix_aspect_ratio" << s.m_AspectRatio;

    if (s.m_Flag)
    {
        std::stringstream flagsStringStream;
        if (s.m_UseFisheye)
        {
            flagsStringStream << "flags:"
                << (s.m_Flag & fisheye::CALIB_FIX_SKEW ? " +fix_skew" : "")
                << (s.m_Flag & fisheye::CALIB_FIX_K1 ? " +fix_k1" : "")
                << (s.m_Flag & fisheye::CALIB_FIX_K2 ? " +fix_k2" : "")
                << (s.m_Flag & fisheye::CALIB_FIX_K3 ? " +fix_k3" : "")
                << (s.m_Flag & fisheye::CALIB_FIX_K4 ? " +fix_k4" : "")
                << (s.m_Flag & fisheye::CALIB_RECOMPUTE_EXTRINSIC ? " +recompute_extrinsic" : "");
        }
        else
        {
            flagsStringStream << "flags:"
                << (s.m_Flag & CALIB_USE_INTRINSIC_GUESS ? " +use_intrinsic_guess" : "")
                << (s.m_Flag & CALIB_FIX_ASPECT_RATIO ? " +fix_aspectRatio" : "")
                << (s.m_Flag & CALIB_FIX_PRINCIPAL_POINT ? " +fix_principal_point" : "")
                << (s.m_Flag & CALIB_ZERO_TANGENT_DIST ? " +zero_tangent_dist" : "")
                << (s.m_Flag & CALIB_FIX_K1 ? " +fix_k1" : "")
                << (s.m_Flag & CALIB_FIX_K2 ? " +fix_k2" : "")
                << (s.m_Flag & CALIB_FIX_K3 ? " +fix_k3" : "")
                << (s.m_Flag & CALIB_FIX_K4 ? " +fix_k4" : "")
                << (s.m_Flag & CALIB_FIX_K5 ? " +fix_k5" : "");
        }
        fs.writeComment(flagsStringStream.str());
    }

    fs << "flags" << s.m_Flag;

    fs << "fisheye_model" << s.m_UseFisheye;

    fs << "camera_matrix" << cameraMatrix;
    fs << "distortion_coefficients" << distCoeffs;

    fs << "avg_reprojection_error" << totalAvgErr;
    if (s.m_WriteExtrinsics && !reprojErrs.empty())
        fs << "per_view_reprojection_errors" << Mat(reprojErrs);

    if(s.m_WriteExtrinsics && !rvecs.empty() && !tvecs.empty() )
    {
        CV_Assert(rvecs[0].type() == tvecs[0].type());
        Mat bigmat((int)rvecs.size(), 6, CV_MAKETYPE(rvecs[0].type(), 1));
        bool needReshapeR = rvecs[0].depth() != 1 ? true : false;
        bool needReshapeT = tvecs[0].depth() != 1 ? true : false;

        for( size_t i = 0; i < rvecs.size(); i++ )
        {
            Mat r = bigmat(Range(int(i), int(i+1)), Range(0,3));
            Mat t = bigmat(Range(int(i), int(i+1)), Range(3,6));

            if(needReshapeR)
                rvecs[i].reshape(1, 1).copyTo(r);
            else
            {
                //*.t() is MatExpr (not Mat) so we can use assignment operator
                CV_Assert(rvecs[i].rows == 3 && rvecs[i].cols == 1);
                r = rvecs[i].t();
            }

            if(needReshapeT)
                tvecs[i].reshape(1, 1).copyTo(t);
            else
            {
                CV_Assert(tvecs[i].rows == 3 && tvecs[i].cols == 1);
                t = tvecs[i].t();
            }
        }
        fs.writeComment("a set of 6-tuples (rotation vector + translation vector) for each view");
        fs << "extrinsic_parameters" << bigmat;
    }

    if(s.m_WritePoints && !imagePoints.empty() )
    {
        Mat imagePtMat((int)imagePoints.size(), (int)imagePoints[0].size(), CV_32FC2);
        for( size_t i = 0; i < imagePoints.size(); i++ )
        {
            Mat r = imagePtMat.row(int(i)).reshape(2, imagePtMat.cols);
            Mat imgpti(imagePoints[i]);
            imgpti.copyTo(r);
        }
        fs << "image_points" << imagePtMat;
    }

    if( s.m_WriteGrid && !newObjPoints.empty() )
    {
        fs << "grid_points" << newObjPoints;
    }
}

bool Calibrator::runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs,
                           vector<vector<Point2f> > imagePoints, float grid_width, bool release_object)
{
    vector<Mat> rvecs, tvecs;
    vector<float> reprojErrs;
    double totalAvgErr = 0;
    vector<Point3f> newObjPoints;

    bool ok = this->runCalibration(s, imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs, reprojErrs,
                             totalAvgErr, newObjPoints, grid_width, release_object);
    cout << (ok ? "Calibration succeeded" : "Calibration failed")
         << ". avg re projection error = " << totalAvgErr << endl;

    if (ok)
        this->saveCameraParams(s, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, reprojErrs, imagePoints,
                         totalAvgErr, newObjPoints);
    return ok;
}

