//
// Created by shine on 2019/11/11.
//

#include "SOFSampler.h"
#include "MakeFileDir.h"

//#define InvSample

int main(int argc, char *argv[]){

    SOFSampler sampler(argv[1], argv[2], argv[3]);
    sampler.OpticalFlowEst();

    return 0;
}


SOFSampler::SOFSampler(const string &filePath, const string &width, const string &height) {

    _filePath = filePath;
    _videoName = _filePath.substr(_filePath.find_last_of('/') + 1, _filePath.find_last_of('.') - _filePath.find_last_of('/') - 1);
    _videoHeight = atoi(height.c_str());
    _videoWidth = atoi(width.c_str());
    _frameSize = new Size(_videoWidth, _videoHeight);
    _framePixelNums = _videoHeight * _videoWidth * 3 / 2;
    _frameIndex = 0;
    cout << "视频名:" << _videoName << endl;
    cout<< "视频像素:"<< _videoWidth << "　*　" << _videoHeight << endl;

    _fin.open(filePath, ios_base::in|ios_base::binary);
    if(_fin.fail())
    {
        cout << "the input video file is error" << endl;
        exit(1);
    }

    _fin.seekg(0, ios::end);   //设置文件指针到文件流的尾部
    streampos ps = _fin.tellg();  //指出当前的文件指针
    unsigned long NumberPixel = ps;
    cout << "Number of pixel: " << ps << endl;  //输出指针的位置
    _frameCount = ps / _framePixelNums; //帧大小
    cout << "frameNumber: " << _frameCount << endl; //输出帧数
    _fin.close();

#ifdef InvSample
    createDirectory("OutPutInv/" + _videoName + "/OpticalFlow/");
    createDirectory("OutPutInv/" + _videoName + "/SamplePoints/");
    createDirectory("OutPutInv/" + _videoName + "/OpticalFeature/");
#else
    createDirectory("OutPut/" + _videoName + "/OpticalFlow/");
    createDirectory("OutPut/" + _videoName + "/SamplePoints/");
    createDirectory("OutPut/" + _videoName + "/OpticalFeature/");
#endif

}

void SOFSampler::OpticalFlowEst() {

    _fileIn = fopen(_filePath.c_str(),"rb+");
    unsigned char* _pYuvBuf = new unsigned char[_framePixelNums];

    Ptr<DenseOpticalFlow> tvl1 = createOptFlow_DualTVL1();

    for(_frameIndex = 0; _frameIndex < _frameCount; _frameIndex++){

        fread(_pYuvBuf, _framePixelNums * sizeof(unsigned char) ,1, _fileIn);
        _yuvFrame.create(_videoHeight * 3 /2, _videoWidth, CV_8UC1);
        memcpy(_yuvFrame.data, _pYuvBuf, _framePixelNums * sizeof(unsigned char));

        cv::cvtColor(_yuvFrame, _curRGBFrame, CV_YUV2BGR_I420);
//        namedWindow("yuvFrame", CV_WINDOW_NORMAL);
//        namedWindow("curFrame", CV_WINDOW_NORMAL);
//        cv::imshow("yuvFrame", _yuvFrame); //只显示y分量
//        cv::imshow("curFrame", _curRGBFrame);
        printf("第 %d 帧\n", _frameIndex);

        if(_preRGBFrame.data){

//            cout << "_preFrame received" << endl ;

            cv::cvtColor(_preRGBFrame, _preGray, CV_BGR2GRAY);
            cv::cvtColor(_curRGBFrame, _curGray, CV_BGR2GRAY);

#ifdef InvSample

            tvl1->calc(_curGray, _preGray, _flow);
            SOFSampler::drawOpticalFlow(_flow, _flowOutput);
            SOFSampler::writeOpticalFlow("OutPutInv/" + _videoName + "/OpticalFlow/");
            SOFSampler::writeSamplePointsFile("OutPutInv/" + _videoName + "/SamplePoints/");
//            SOFSampler::drawOpticalFeature();
//            SOFSampler::writeOpticalFeature("OutPutInv/" + _videoName + "/OpticalFeature/");

#else

            tvl1->calc(_preGray, _curGray, _flow);
            SOFSampler::drawOpticalFlow(_flow, _flowOutput);
            SOFSampler::writeOpticalFlow("OutPut/" + _videoName + "/OpticalFlow/");
            SOFSampler::writeSamplePointsFile("OutPut/" + _videoName + "/SamplePoints/");
            SOFSampler::drawOpticalFeature();
            SOFSampler::writeOpticalFeature("OutPut/" + _videoName + "/OpticalFeature/");

#endif


        }
        else{

            cout << "waitting for the first frame to calculate optical flow" << endl;
        }

        int c = waitKey(30);
        if ((char) c == 27) {
            break;
        }

        std::swap(_preRGBFrame, _curRGBFrame);
    }

    fclose(_fileIn);
    destroyAllWindows();
    cout << "Finished......" << endl;
}

void SOFSampler::writeOpticalFlow(string prefix) {

//    namedWindow("opticalFlow", CV_WINDOW_NORMAL);
//    imshow("opticalFlow", _flowOutput);
    imwrite(prefix + "opticalFlow_" + to_string(_frameIndex) + ".png", _flowOutput);

}

void SOFSampler::drawOpticalFlow(const Mat_<Point2f>& flow, Mat& dst, float maxMotion)
{
    dst.create(flow.size(), CV_8UC3);
    dst.setTo(Scalar::all(0));

    // determine motion range:
    float maxRad = maxMotion;

    if (maxMotion <= 0)
    {
        maxRad = 1;
        for (int y = 0; y < flow.rows; ++y)
        {
            for (int x = 0; x < flow.cols; ++x)
            {
                Point2f u = flow(y, x);

                if (!isFlowCorrect(u))
                    continue;

                maxRad = max(maxRad, sqrt(u.x * u.x + u.y * u.y));
            }
        }
    }

    for (int y = 0; y < flow.rows; ++y)
    {
        for (int x = 0; x < flow.cols; ++x)
        {
            Point2f u = flow(y, x);

            if (isFlowCorrect(u))
                dst.at<Vec3b>(y, x) = computeColor(u.x / maxRad, u.y / maxRad);
        }
    }
}

Vec3b SOFSampler::computeColor(float fx, float fy) {

    static bool first = true;

    // relative lengths of color transitions:
    // these are chosen based on perceptual similarity
    // (e.g. one can distinguish more shades between red and yellow
    //  than between yellow and green)
    const int RY = 15;
    const int YG = 6;
    const int GC = 4;
    const int CB = 11;
    const int BM = 13;
    const int MR = 6;
    const int NCOLS = RY + YG + GC + CB + BM + MR;
    static Vec3i colorWheel[NCOLS];

    if (first)
    {
        int k = 0;

        for (int i = 0; i < RY; ++i, ++k)
            colorWheel[k] = Vec3i(255, 255 * i / RY, 0);

        for (int i = 0; i < YG; ++i, ++k)
            colorWheel[k] = Vec3i(255 - 255 * i / YG, 255, 0);

        for (int i = 0; i < GC; ++i, ++k)
            colorWheel[k] = Vec3i(0, 255, 255 * i / GC);

        for (int i = 0; i < CB; ++i, ++k)
            colorWheel[k] = Vec3i(0, 255 - 255 * i / CB, 255);

        for (int i = 0; i < BM; ++i, ++k)
            colorWheel[k] = Vec3i(255 * i / BM, 0, 255);

        for (int i = 0; i < MR; ++i, ++k)
            colorWheel[k] = Vec3i(255, 0, 255 - 255 * i / MR);

        first = false;
    }

    const float rad = sqrt(fx * fx + fy * fy);
    const float a = atan2(-fy, -fx) / (float)CV_PI;

    const float fk = (a + 1.0f) / 2.0f * (NCOLS - 1);
    const int k0 = static_cast<int>(fk);
    const int k1 = (k0 + 1) % NCOLS;
    const float f = fk - k0;

    Vec3b pix;

    for (int b = 0; b < 3; b++)
    {
        const float col0 = colorWheel[k0][b] / 255.f;
        const float col1 = colorWheel[k1][b] / 255.f;

        float col = (1 - f) * col0 + f * col1;

        if (rad <= 1)
            col = 1 - rad * (1 - col); // increase saturation with radius
        else
            col *= .75; // out of range

        pix[2 - b] = static_cast<uchar>(255.f * col);
    }

    return pix;

}

void SOFSampler::writeSamplePointsFile(string prefix) {

    ofstream outFile(prefix + "samplePoints_of_" + to_string(_frameIndex) + ".txt");
    vector<SamplePointWithWeight> SamplePointList;

//    long count = 0;

    for(int y = 5; y < _flow.rows - 5; y += 2){
        for(int x = 5; x < _flow.cols - 5; x += 2){

//            count ++;
//            if(count == 71625){
//
//                cout << "!!!!!!!!!!" << endl;
//            }

            Point2f fxy = _flow.at<Point2f>(y, x);
            Point2d startPoint(x, y), endPoint(x + fxy.x, y + fxy.y);

            SpherePointPair spherePointPair;
            SamplePointWithWeight samplePointWithWeight;

            spherePointPair.startYaw = (double(x) + 0.5 ) / double(_flow.cols) * 180.0 * 2.0 - 180.0;
            spherePointPair.startPitch = 180.0 / 2.0 - (double(y) + 0.5 ) / double(_flow.rows) * 180.0;


            ///若没有找到追踪点,即为endPoint超出图像范围
            if(endPoint.x < 0 || endPoint.x > _flow.cols || endPoint.y < 0 || endPoint.y > _flow.rows){

//                cout << "该点无法追踪到光流:(" << x << "," << y << ")..." << endl;

#ifdef InvSample

                SamplePointWithWeight samplePointWithWeight;
                samplePointWithWeight.yaw = spherePointPair.startYaw;
                samplePointWithWeight.pitch = spherePointPair.startPitch;
                samplePointWithWeight.weight = 0;

#else

                continue;

#endif

            }

            ///若找到追踪点,即为endPoint在图像范围内
            else{

#ifdef InvSample

                spherePointPair.endYaw = (endPoint.x ) / _flow.cols * 180 * 2 - 180;
                spherePointPair.endPitch = 180 / 2 - (endPoint.y +0.5) / _flow.rows * 180;
                //cosδ=sinφ1sinφ2+cosφ1cosφ2cosΔλ
                //其中，φ是纬度，λ是经度，R是地球平均半径（R=6371km）。
                // Δφ=φ2−φ1，Δλ=λ2−λ1 分别为纬度、经度的差值。
                spherePointPair.sphereDistance  = acos(sin(spherePointPair.startPitch) * sin(spherePointPair.endPitch) + \
                                            cos(spherePointPair.startPitch) * cos(spherePointPair.endPitch) * \
                                            cos(spherePointPair.startYaw - spherePointPair.endYaw));



                samplePointWithWeight.yaw = spherePointPair.startYaw;
                samplePointWithWeight.pitch = spherePointPair.startPitch;
                samplePointWithWeight.weight = spherePointPair.sphereDistance;

#else

                spherePointPair.endYaw = (endPoint.x ) / _flow.cols * 180 * 2 - 180;
                spherePointPair.endPitch = 180 / 2 - (endPoint.y +0.5) / _flow.rows * 180;
                //cosδ=sinφ1sinφ2+cosφ1cosφ2cosΔλ
                //其中，φ是纬度，λ是经度，R是地球平均半径（R=6371km）。
                // Δφ=φ2−φ1，Δλ=λ2−λ1 分别为纬度、经度的差值。
                spherePointPair.sphereDistance  = acos(sin(spherePointPair.startPitch) * sin(spherePointPair.endPitch) + \
                                            cos(spherePointPair.startPitch) * cos(spherePointPair.endPitch) * \
                                            cos(spherePointPair.startYaw - spherePointPair.endYaw));

                SamplePointWithWeight samplePointWithWeight;
                samplePointWithWeight.yaw = spherePointPair.endYaw;
                samplePointWithWeight.pitch = spherePointPair.endPitch;
                samplePointWithWeight.weight = spherePointPair.sphereDistance;

#endif

            }

            SamplePointList.push_back(samplePointWithWeight);

        }
    }

//    cout << SamplePointList.size() << endl;
    outFile << SamplePointList.size() << endl;
    for(vector<SamplePointWithWeight>::iterator iter = SamplePointList.begin(); iter != SamplePointList.end(); iter ++){
//       cout <<  iter -> yaw << " " << iter -> pitch << " " << iter -> weight << endl;
       outFile <<  iter -> yaw << " " << iter -> pitch << " " << iter -> weight << endl;
    }

}

void SOFSampler::slerp(double *starting, double *ending, double *result, double t) {
    double cosa = starting[0]*ending[0] + starting[1]*ending[1] + starting[2]*ending[2] + starting[3]*ending[3];

    // If the dot product is negative, the quaternions have opposite handed-ness and slerp won't take
    // the shorter path. Fix by reversing one quaternion.
    if ( cosa < 0.0f )
    {
        ending[0] = -ending[0];
        ending[1] = -ending[1];
        ending[2] = -ending[2];
        ending[3] = -ending[3];
        cosa = -cosa;
    }

    double k0, k1;

    // If the inputs are too close for comfort, linearly interpolate
    if ( cosa > 0.9995f )
    {
        k0 = 1.0f - t;
        k1 = t;
    }
    else {
        double sina = sqrt(1.0f - cosa * cosa);
        double a = atan2(sina, cosa);
        k0 = sin((1.0f - t) * a) / sina;
        k1 = sin(t * a) / sina;
    }

    result[0] = starting[0] * k0 + ending[0] * k1;
    result[1] = starting[1] * k0 + ending[1] * k1;
    result[2] = starting[2] * k0 + ending[2] * k1;
    result[3] = starting[3] * k0 + ending[3] * k1;
}

void SOFSampler::drawArrow(cv::Mat &img, cv::Point pStart, cv::Point pEnd, int len, int alpha, cv::Scalar &color,
                           int thickness, int lineType) {
    const double PI = 3.1415926;
    Point arrow;
    //计算 θ 角（最简单的一种情况在下面图示中已经展示，关键在于 atan2 函数，详情见下面）
    double angle = atan2((double)(pStart.y - pEnd.y), (double)(pStart.x - pEnd.x));
//     line(img, pStart, pEnd, color, thickness, lineType);
    //计算箭角边的另一端的端点位置（上面的还是下面的要看箭头的指向，也就是pStart和pEnd的位置）
    arrow.x = pEnd.x + len * cos(angle + PI * alpha / 180);
    arrow.y = pEnd.y + len * sin(angle + PI * alpha / 180);
    line(img, pEnd, arrow, color, thickness, lineType);
    arrow.x = pEnd.x + len * cos(angle - PI * alpha / 180);
    arrow.y = pEnd.y + len * sin(angle - PI * alpha / 180);
    line(img, pEnd, arrow, color, thickness, lineType);
}

void SOFSampler::drawOpticalFeature() {

    _preFeatureFrame = _preRGBFrame.clone();
    _curFeatureFrame = _curRGBFrame.clone();
    _earthERP = cv::imread("earthERP.png", IMREAD_COLOR);
    cv::resize(_earthERP, _earthERP, *_frameSize);

    for(int y = 30; y < _preFeatureFrame.rows; y += 30){
        for(int x = 30; x < _preFeatureFrame.cols; x += 30){

            cv::RNG &rng = theRNG();
            Scalar color = Scalar(rng(256), rng(256), rng(256));
            Point2f fxy = _flow.at<Point2f>(y, x);

            //像素坐标系
            Point2d startPoint(x, y), endPoint(x + fxy.x, y + fxy.y);
            circle(_curFeatureFrame, endPoint, 3, CV_RGB(0,0,255),-1);

            //归一化图像坐标系
            Point2d startNormalizedPoint(startPoint.x / _preFeatureFrame.cols, startPoint.y / _preFeatureFrame.rows);
            Point2d endNormalizedPoint(endPoint.x / _preFeatureFrame.cols, endPoint.y / _preFeatureFrame.rows);

            //球面坐标系
            SphericalCoordinatePoint scpStart, scpEnd;

            scpStart.yaw = (startNormalizedPoint.x - 0.5) * 2 * M_PI;
            scpStart.pitch = (0.5 - startNormalizedPoint.y) * M_PI;

            scpEnd.yaw = (endNormalizedPoint.x - 0.5) * 2 * M_PI;
            scpEnd.pitch = (0.5 - endNormalizedPoint.y) * M_PI;

            //空间直角坐标系
            RectangularCoordinatePoint rcpStart, rcpEnd;

            rcpStart.x = cos(scpStart.pitch) * cos(scpStart.yaw);
            rcpStart.y = sin(scpStart.pitch);
            rcpStart.z = -cos(scpStart.pitch) * sin(scpStart.yaw);

            rcpEnd.x = cos(scpEnd.pitch) * cos(scpEnd.yaw);
            rcpEnd.y = sin(scpEnd.pitch);
            rcpEnd.z = -cos(scpEnd.pitch) * sin(scpEnd.yaw);

            for (float t = 0; t < 1; t += 0.01)//绘制插值曲线
            {

                //四元数插值
                double p[4] = {0, rcpStart.x, rcpStart.y, rcpStart.z};
                double q[4] = {0, rcpEnd.x, rcpEnd.y, rcpEnd.z};
                double r[4] = {0};
                slerp(p, q, r, t);

                //空间直角坐标系
                RectangularCoordinatePoint rcpInnerPoint;
                rcpInnerPoint.x = r[1];
                rcpInnerPoint.y = r[2];
                rcpInnerPoint.z = r[3];

                //球面坐标系
                SphericalCoordinatePoint scpInnerPoint;
                scpInnerPoint.yaw = atan2(-rcpInnerPoint.z, rcpInnerPoint.x);
                scpInnerPoint.pitch = asin(rcpInnerPoint.y / sqrt(rcpInnerPoint.x * rcpInnerPoint.x + rcpInnerPoint.y * rcpInnerPoint.y + rcpInnerPoint.z * rcpInnerPoint.z));

                //归一化图像坐标系
                Point2d normalizedInnerPoint(scpInnerPoint.yaw / (2 * M_PI) + 0.5, -(scpInnerPoint.pitch / M_PI - 0.5));
                //像素坐标系
                Point2d innerPixelPoint(normalizedInnerPoint.x * _curFeatureFrame.cols, normalizedInnerPoint.y * _curFeatureFrame.rows);

                ///描绘曲线点
                circle(_preFeatureFrame, innerPixelPoint, 0, Scalar(255, 0, 255),-1);//绘制曲线
                circle(_earthERP, innerPixelPoint, 0, Scalar(255, 0, 255),-1);//绘制曲线

            }

            ///过滤较小光流的绘制点
            double distance = sqrt((startPoint.x - endPoint.x) * (startPoint.x - endPoint.x) + (startPoint.y - endPoint.y) * (startPoint.y - endPoint.y));
            if(distance > 3)
            {
                Scalar arrowColor = Scalar(255, 0, 255);
                drawArrow(_preFeatureFrame, startPoint, endPoint, int(distance)/3, 50, arrowColor, 1, 4);//绘制箭头
                drawArrow(_earthERP, startPoint, endPoint, distance/3, 50, arrowColor, 1, 4);//绘制箭头
            }
        }
    }

}

void SOFSampler::writeOpticalFeature(string prefix) {

//    namedWindow("preFeatureFrame", WINDOW_NORMAL);
//    imshow("preFeatureFrame", _preFeatureFrame);
//
//    namedWindow("curFeatureFrame", WINDOW_NORMAL);
//    imshow("curFeatureFrame", _curFeatureFrame);
//
//    namedWindow("earthFlow", WINDOW_NORMAL);
//    imshow("earthFlow", _earthERP);

    cv::imwrite(prefix + "opticalFeature_" + to_string(_frameIndex) + ".png", _preFeatureFrame);
    cv::imwrite(prefix + "earthFlow_" + to_string(_frameIndex) + ".png", _earthERP);
}