/*****************************************************************************
*   Markerless AR desktop application.
******************************************************************************
*   by Khvedchenia Ievgen, 5th Dec 2012
*   http://computer-vision-talks.com
******************************************************************************
*   Ch3 of the book "Mastering OpenCV with Practical Computer Vision Projects"
*   Copyright Packt Publishing 2012.
*   http://www.packtpub.com/cool-projects-with-opencv/book
*****************************************************************************/

////////////////////////////////////////////////////////////////////
// File includes:
#include "ARDrawingContext.hpp"
#include "ARPipeline.hpp"
#include "DebugHelpers.hpp"

////////////////////////////////////////////////////////////////////
// Standard includes:
#include <opencv2/opencv.hpp>
#include <Windows.h>
#include <gl/gl.h>
#include <gl/glu.h>
#include <fstream>

/**
 * Processes a recorded video or live view from web-camera and allows you to adjust homography refinement and 
 * reprojection threshold in runtime.
 */
void processVideo(const cv::Mat& patternImage, CameraCalibration& calibration, cv::VideoCapture& capture);

/**
 * Processes single image. The processing goes in a loop.
 * It allows you to control the detection process by adjusting homography refinement switch and 
 * reprojection threshold in runtime.
 */
void processSingleImage(const cv::Mat& patternImage, CameraCalibration& calibration, const cv::Mat& image);

/**
 * Performs full detection routine on camera frame and draws the scene using drawing context.
 * In addition, this function draw overlay with debug information on top of the AR window.
 * Returns true if processing loop should be stopped; otherwise - false.
 */
bool processFrame(const cv::Mat& cameraFrame, ARPipeline& pipeline, ARDrawingContext& drawingCtx, bool flag);

int main(int argc, const char * argv[])
{
    // Change this calibration to yours:
    //CameraCalibration calibration(526.58037684199849f, 524.65577209994706f, 318.41744018680112f, 202.96659047014398f);//ori
	//CameraCalibration calibration(587.58642996017329f, 594.97930673771259f, 319.50000000000000f, 239.50000000000000f);
	CameraCalibration calibration;
  
	int choice;
	std::cout<<"Is background a picture or live camera?\n1. a picture\n2. live camera\nchoose: ";
	std::cin>>choice;
	//argc = 3;
	argv[1] = "PyramidPattern.jpg";
	if (choice == 1){
		argc = 3;
		argv[2]="PyramidPatternTest.bmp";
		calibration.create(526.58037684199849f, 524.65577209994706f, 318.41744018680112f, 202.96659047014398f);
	}
	else{
		argc = 2;
		//argv[2] = "";
		calibration.create(575.59922331157532f, 546.05254101167429f, 319.50000000000000f, 239.50000000000000f);
		calibration.importDistorsionCoeff("cam_lifecam02.out");
	}

    if (argc < 2)
    {
        std::cout << "Input image not specified" << std::endl;
        std::cout << "Usage: markerless_ar_demo <pattern image> [filepath to recorded video or image]" << std::endl;
        return 1;
    }

    // Try to read the pattern:
    cv::Mat patternImage = cv::imread(argv[1]);
	//cv::Mat patternImage = cv::imread("PyramidPattern.jpg");
    if (patternImage.empty())
    {
        std::cout << "Input image cannot be read" << std::endl;
        return 2;
    }

    if (argc == 2)// live camera
    {
        processVideo(patternImage, calibration, cv::VideoCapture(0));
    }
    else if (argc == 3)
    {
        std::string input = argv[2];
        //cv::Mat testImage = cv::imread(input);
		cv::Mat testImage = cv::imread(input);
        if (!testImage.empty())
        {
            processSingleImage(patternImage, calibration, testImage);
        }
        else //record video
        {
            cv::VideoCapture cap;
            if (cap.open(input))
            {
                processVideo(patternImage, calibration, cap);
            }
        }
    }
    else
    {
        std::cerr << "Invalid number of arguments passed" << std::endl;
        return 1;
    }

    return 0;
}

void processVideo(const cv::Mat& patternImage, CameraCalibration& calibration, cv::VideoCapture& capture)
{
//#if _DEBUG
	DWORD ts, te, tu;
	int count = 0;
	
//#endif
	int framecount = 0;
    // Grab first frame to get the frame dimensions
    cv::Mat currentFrame; 
	cv::Mat prevFrame;
    capture >> currentFrame;
	prevFrame = currentFrame.clone();

    // Check the capture succeeded:
    if (currentFrame.empty())
    {
        std::cout << "Cannot open video capture device" << std::endl;
        return;
    }

    cv::Size frameSize(currentFrame.cols, currentFrame.rows);

//#if _DEBUG
	ts = GetTickCount();
//#endif
    ARPipeline pipeline(patternImage, calibration, false);
//#if _DEBUG
	te = GetTickCount();
	std::ofstream file("temp.txt");
	file << "PatternBuild " << te-ts << std::endl;
	//file << "Frame     FeatureExtract Homography RefineHomography FindPattern Total" << std::endl;
	file << "Frame     FeatureExtract Homography RefineHomography FindPattern Total" << std::endl;
	file.close();
//#endif
    ARDrawingContext drawingCtx("Markerless AR", frameSize, calibration);

    bool shouldQuit = false;
    do
    {
		if (!pipeline.pause){
			prevFrame = currentFrame.clone();
			capture >> currentFrame;
		} else {
			currentFrame = prevFrame.clone();
		}
        if (currentFrame.empty())
        {
            shouldQuit = true;
            continue;
        }

//#if _DEBUG
		if (count < 20){
			ts = GetTickCount();
			std::ofstream file("temp.txt", std::ios::out|std::ios::app);
			if (count < 10)
				file << "Frame " << count << "  ";
			else
				file << "Frame " << count << " ";
			file.close();
		}
//#endif
		if (framecount == 0)
			shouldQuit = processFrame(currentFrame, pipeline, drawingCtx, true);
		else
			shouldQuit = processFrame(currentFrame, pipeline, drawingCtx, false);
		framecount ++;
		framecount = framecount%1;

//#if _DEBUG
		if (count < 20){
			te = GetTickCount();
			tu = te - ts;
			std::cout << tu << std::endl;
			std::ofstream file("temp.txt", std::ios::out|std::ios::app);
			file << tu << std::endl;
			file.close();
			count ++;
		}
//#endif
    } while (!shouldQuit);

}

void processSingleImage(const cv::Mat& patternImage, CameraCalibration& calibration, const cv::Mat& image)
{
    cv::Size frameSize(image.cols, image.rows);
    ARPipeline pipeline(patternImage, calibration, false);
    ARDrawingContext drawingCtx("Markerless AR", frameSize, calibration);

    bool shouldQuit = false;
    do
    {
        shouldQuit = processFrame(image, pipeline, drawingCtx, true);
    } while (!shouldQuit);
}

bool processFrame(const cv::Mat& cameraFrame, ARPipeline& pipeline, ARDrawingContext& drawingCtx, bool flag)
{
    // Clone image used for background (we will draw overlay on it)
    cv::Mat img = cameraFrame.clone();

    // Draw information:
	if (pipeline.pause)
		cv::putText(img, "Pause   ('p' to play)", cv::Point(10,15), CV_FONT_HERSHEY_PLAIN, 1, CV_RGB(0,200,0));
	else
		cv::putText(img, "Play   ('p' to pause)", cv::Point(10,15), CV_FONT_HERSHEY_PLAIN, 1, CV_RGB(0,200,0));
    if (pipeline.m_patternDetector.enableHomographyRefinement)
        cv::putText(img, "Pose refinement: On   ('h' to switch off)", cv::Point(10,30), CV_FONT_HERSHEY_PLAIN, 1, CV_RGB(0,200,0));
    else
        cv::putText(img, "Pose refinement: Off  ('h' to switch on)",  cv::Point(10,30), CV_FONT_HERSHEY_PLAIN, 1, CV_RGB(0,200,0));

    cv::putText(img, "RANSAC threshold: " + ToString(pipeline.m_patternDetector.homographyReprojectionThreshold) + "( Use'-'/'+' to adjust)", cv::Point(10, 45), CV_FONT_HERSHEY_PLAIN, 1, CV_RGB(0,200,0));

    // Set a new camera frame:
    drawingCtx.updateBackground(img);

	if (flag){
    // Find a pattern and update it's detection status:
		drawingCtx.isPatternPresent = pipeline.processFrame(cameraFrame);

    // Update a pattern pose:
		drawingCtx.patternPose = pipeline.getPatternLocation();
	}

    // Request redraw of the window:
    drawingCtx.updateWindow();

    // Read the keyboard input:
    int keyCode = cv::waitKey(5); 

    bool shouldQuit = false;
    if (keyCode == '+' || keyCode == '=')
    {
        pipeline.m_patternDetector.homographyReprojectionThreshold += 0.2f;
        pipeline.m_patternDetector.homographyReprojectionThreshold = min(10.0f, pipeline.m_patternDetector.homographyReprojectionThreshold);
    }
    else if (keyCode == '-')
    {
        pipeline.m_patternDetector.homographyReprojectionThreshold -= 0.2f;
        pipeline.m_patternDetector.homographyReprojectionThreshold = max(0.0f, pipeline.m_patternDetector.homographyReprojectionThreshold);
    }
    else if (keyCode == 'h')
    {
        pipeline.m_patternDetector.enableHomographyRefinement = !pipeline.m_patternDetector.enableHomographyRefinement;
    }
	else if (keyCode == 'p'){
		pipeline.pause = !pipeline.pause;
	}
    else if (keyCode == 27 || keyCode == 'q')
    {
        shouldQuit = true;
    }

    return shouldQuit;
}


