/*#include <opencv/cv.h>
#include "opencv2/opencv.hpp"
#include <opencv/highgui.h>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/nonfree/features2d.hpp>
#include <opencv2/flann/flann.hpp>
#include <opencv2/flann/flann_base.hpp>
#include <opencv2/highgui/highgui.hpp>
 */
#include <string>
#include "Tracker.h"


using namespace std;
using namespace cv;
/*
Rect getBounds(double x,double y,double x_offset,int y_offset,Size frameSize){
	double tmpl_top  = y + y_offset;
	double tmpl_down = y - y_offset;
	double tmpl_left = x - x_offset;
	double tmpl_right= x + x_offset;
	
	if (tmpl_down <= 0){
		tmpl_down= 0;
	}
	if (tmpl_top >= frameSize.height-1){
		tmpl_top = frameSize.height-1;
	}
	if (tmpl_left <= 0){
		tmpl_left = 0;
	}
	if (tmpl_right >= frameSize.width){
		tmpl_right = frameSize.width;
	}

	Rect result(tmpl_left, tmpl_down,tmpl_right-tmpl_left,tmpl_top - tmpl_down);
	return result;
}

void ncr_descriptors_tracking(const char* filename){
	VideoCapture cap(filename);
	if(!cap.isOpened())  // check if we succeeded
		throw "cannot open video capture";
	Mat edges;
	Mat frame;
	cap >> frame;
	cvtColor(frame, frame, CV_BGR2GRAY);
	int frame_index = 1;
	
	vector<vector<KeyPoint>	> framesKeypoints;
	SiftFeatureDetector surf(2500);
	vector<KeyPoint> keypoints;
	surf.detect(frame, keypoints);
	
	SurfDescriptorExtractor surfDesc;
	Mat lastDescriptors;
	surfDesc.compute(frame, keypoints, lastDescriptors);
	framesKeypoints.push_back(vector<KeyPoint>());
	Mat prev_frame = frame;
	
	for (int i = 0; i < keypoints.size(); i++) {
		if (!(keypoints[i].pt.x<0 || keypoints[i].pt.x > frame.rows || keypoints[i].pt.y<0 || keypoints[i].pt.y > frame.cols)) {
			framesKeypoints[0].push_back(keypoints[i]);

		}
	}

	for(;;)
	{
		if (!cap.grab()) {
			break;
		}
		cap.retrieve(frame); // get a new frame from camera
		cvtColor(frame, frame, CV_BGR2GRAY);
		Size frameSize  = frame.size();

		vector<KeyPoint> newKeypoints;
		vector<KeyPoint>& lastKeypoints = framesKeypoints.back();
		for(int i=0; i<lastKeypoints.size();i++){

			Rect template_rec = getBounds(lastKeypoints[i].pt.x, lastKeypoints[i].pt.y, TEMPLATE_SIZE, TEMPLATE_SIZE, frameSize);

			Rect roiRec = getBounds(lastKeypoints[i].pt.x, lastKeypoints[i].pt.y, ROI_SIZE, ROI_SIZE, frameSize);

			Mat res = cvCreateImage(cvSize(roiRec.width-template_rec.width+1,
											 roiRec.height-template_rec.height+1),
									  IPL_DEPTH_32F, 1);

			matchTemplate(Mat(frame,roiRec), Mat(prev_frame, template_rec) , res, CV_TM_CCORR_NORMED);
			Point    minloc, maxloc;
			double   minval, maxval;
			
			minMaxLoc(res, &minval, &maxval, &minloc, &maxloc, Mat());
			KeyPoint newLocatedKeypoint = lastKeypoints[i];
			newLocatedKeypoint.pt.x = maxloc.x+roiRec.x + TEMPLATE_SIZE;
			newLocatedKeypoint.pt.y = maxloc.y+roiRec.y + TEMPLATE_SIZE;
			newKeypoints.push_back(newLocatedKeypoint);
		}

		framesKeypoints.push_back(newKeypoints);
		Mat frameWithKeypoints;
		drawKeypoints(frame,newKeypoints,frameWithKeypoints);
		imshow( "Display window", frameWithKeypoints);
		cvWaitKey(3000);
		prev_frame = frame;
		frame_index++;
		
	}
	return;
}
 */
int main (int argc, char * const argv[]) {
    string filename("");
	if(argc > 1){
        filename = argv[1];
    }
	//ncr_descriptors_tracking("test.mov");
	Tracker testTracker(filename,0.70,50);
	testTracker.myCalibrateCamera();
	testTracker.track();

}
