#ifndef TRACKANDCLASSIFYTEST_HPP
#define TRACKANDCLASSIFYTEST_HPP

#include <cv.h>
#include <highgui.h>
#include <ml.h>

#include "HandInterface.hpp"

#include "AbstractHandTracker.hpp"

#include <iostream>
//TODO clean up includes

#include "gtest.h"



#include "HiVisualiser.hpp"

using namespace cv;
using namespace hi;

//FIX: This is only really a pseudo-header!

namespace {
	
	// The fixture for testing class PerPixelProbabilityTransformerTest.
	class TrackAndClassifyTest : public ::testing::Test {
	public:
		
		AbstractHandTracker *ht;
		ResourceTracker *rt;
		
		cv::Ptr< cv::VideoCapture > capPtr;
		cv::Ptr< VideoPointTagSequence > pointTagSequencePtr;
		cv::Ptr< VideoGestureTagSequence > gestureTagSequencePtr;
		cv::Mat frame;
		
		HandLocaliser* hl;
		
		cv::Rect trackingRect;
		
		double errorThreshold;
		
		int localisedHandInFrame;
		
	protected:
		
		virtual void SetUp();
		
		virtual void TearDown();
		
		virtual void setHandTracker(int trackerCode)=0;
		
		int initTrackerTest(const char* videoNameString);
		
		void testTracker(int trackerCode, const char* videoNameString);
		
		void trainSVM(Mat& trainingData, Mat& trainingResponses );
	};
	
};


void ::TrackAndClassifyTest::SetUp() {
	rt = new ResourceTracker( "../config/runtimeSettings/cfg.xml" );
	rt->loadCategory("TestData");
}

void ::TrackAndClassifyTest::TearDown() {
	delete rt;
	delete ht;
}

int ::TrackAndClassifyTest::initTrackerTest(const char* videoNameString) {
	//HiVisualiser::addWindow( "Track and Classify Test" );
	
	capPtr = rt->getFileVideoCapture( videoNameString );
	pointTagSequencePtr = rt->getPointTags( videoNameString );
	gestureTagSequencePtr = rt->getGestureTags( videoNameString );
	
	//FIXNOW
	int tagCount = pointTagSequencePtr->tagCount;
	
	hl = new HandLocaliser();
	
	int localisationframeNumber = 0; //NOTE: frame zero is never tracked as it is always used by 
	
	for ( ; localisationframeNumber < tagCount; localisationframeNumber++ ) {
		try {
			(*capPtr) >> frame;
			pointTagSequencePtr->getNextPointTag();
			gestureTagSequencePtr->getNextGestureTag();
			trackingRect = hl->localiseHand( frame );
			
			cout << "Face found. First tracking frame:" << endl;
			
			cerr << localisationframeNumber + 1 << ";" ;
			
			break;
		}
		catch (cv::Exception e) {
			if ( e.code == HI_ERR_NOFACEFOUND ) {
				continue;
			} else {
				throw;
			}
			
		}
	}
	
	errorThreshold  = 0.75 * (trackingRect.width + trackingRect.height);
	
	forceInside( frame.size(), trackingRect);
	
	return localisationframeNumber;
}

void ::TrackAndClassifyTest::testTracker(int trackerCode, const char* videoNameString ) {
	
	//VISUALISATION
	HiVisualiser::addWindow( "TrackAndClassifyTest" );
//	HiVisualiser::addWindow("A");
	HiVisualiser::moveWindow("TrackAndClassifyTest", 700, 150);
	
	// -- SVM load/prepare -- //
	CvSVM svm = CvSVM();
	svm.load( "svms.xml", "svm1" );
	
	RecordProperty( "videoNameString ", videoNameString);
	int localisationFrameNumber = initTrackerTest( videoNameString ); //returns number of first frame where face was found
	int firstTrackingFrame = localisationFrameNumber+1;
	
	setHandTracker( trackerCode );
	
	//FIXNOW!
	int tagCount = pointTagSequencePtr->tagCount;
	
	double errorSum = 0; 
	
	bool frameErrorBelowThreshold = true;
	
	
	// -- ML Classifier data prep -- //
	int tagsLeft = tagCount - localisationFrameNumber; //FIX don't assume all tags of both kinds present!
	
	Size perSampleSize( 100, 100 );
	int perFrameDataDimension = 100*100;
	Mat trainingData( tagsLeft, perFrameDataDimension, CV_32FC1, Scalar::all(0) );
	Mat trainingResponses( tagsLeft, 1, CV_32FC1, Scalar::all(0) );
	
	Mat dataSampleForPrediction( 1, perFrameDataDimension, CV_32FC1, Scalar::all(0) );
	
	int dataSampleIndex = 0;
	int correctPredictions = 0;
	
	//TODO RAT char is 1 byte, float is 4?
	//cout << "charsize in bytes=" << sizeof(char) << endl;
	Mat handImgGrey;
	
	// -- Prepare timing data -- //
	
	double t = 0;
	t = (double)cvGetTickCount();
	
	//TODO RAT
	for( int frameCount = firstTrackingFrame; frameCount<tagCount; frameCount++) {
	//for( int frameCount = firstTrackingFrame; frameCount<50; frameCount++) {
		// -- get next frame -- //
		(*capPtr) >> frame;
		
		//VISUALISATION
		Mat imgToDraw;
		frame.copyTo(imgToDraw);
		HiVisualiser::windowMatrixMap["TrackAndClassifyTest"] = imgToDraw;
		
		// -- tracking frame -- //
		trackingRect = ht->track( frame );
		
		Point groundTruth = pointTagSequencePtr->getNextPointTag();
		
		// -- get and display gesture tag -- //
		char gestureTag = gestureTagSequencePtr->getNextGestureTag();
		
		// -- adding sample to gesture training data -- //
		
		//FIXNOW RESIZE!!!
		Rect testSaR( trackingRect.x, trackingRect.y, 100, 100 );
		
		Mat handImg( frame, testSaR );
		
		cvtColor( handImg, handImgGrey, CV_BGR2GRAY );
		
		CV_Assert( handImgGrey.isContinuous() );
		
		int cols = handImgGrey.cols, rows = handImgGrey.rows;
		cols *= rows; //as the array is continuous, we may treat it as one long stretch of memory
		rows = 1;
		
		const uchar* Mi = handImgGrey.ptr<uchar>(0);
		for(int j = 0; j < cols; j++) {
			trainingData.at<float>( dataSampleIndex, j ) = Mi[j]/255.0; //TODO, float is 4 bytes here too?
			dataSampleForPrediction.at<float>( 0, j ) = Mi[j]/255.0;
		}
		
		{
//		for(int j = 0; j < perFrameDataDimension; j++) {
//			testDataOut.at<float>( j/100, j % 100 ) = trainingData.at<float>( 100, j );
//		}
		
		
		//trainingResponses.at<float>( dataSampleIndex, 0 ) = (float) gestureTag; //BROKEN!
//		
		//BROKEN!
//		switch (gestureTag) {
//			case 'o':
//				trainingResponses.at<float>( dataSampleIndex, 0 ) = 0.0;
//				break;
//			case 'c':
//				trainingResponses.at<float>( dataSampleIndex, 0 ) = 1.0;
//				break;
//			default:
//				CV_Error( CV_StsError, "Unrecognised tag in gesture tags!"); //throws exception!
//				break;
//		}
		
		//OK
//		switch (gestureTag) {
//			case 'o':
//				trainingResponses.at<float>( dataSampleIndex, 0 ) = 0.0;
//				break;
//			case 'c':
//				trainingResponses.at<float>( dataSampleIndex, 0 ) = 1.0;
//				break;
//			default:
//				CV_Error( CV_StsError, "Unrecognised tag in gesture tags!"); //throws exception!
//				break;
//		}
//		
		}
		dataSampleIndex++;
		
		//resize( src, dataMatrix, Size(100,100), 
		
		float gesturePrediction = svm.predict( dataSampleForPrediction );
		
		bool taggedOpen;
		bool predictedOpen;
		
		//VISUALISATION
		//FIX use ref/& return from getMatrix method? 
		switch (gestureTag) {
			case 'o':
				circle(imgToDraw, Point(5, 5), 4, Scalar(0, 0, 255), 2);
				taggedOpen = true;
				break;
			case 'c':
				circle(imgToDraw, Point(5, 5), 2, Scalar(0, 255, 0), 2);
				taggedOpen = false;
				break;
			default:
				CV_Error( CV_StsError, "Unrecognised tag in gesture tags!"); //throws exception!
				break;
		}
		
		if ( gesturePrediction == 0.0) {
			predictedOpen = true;
			circle(imgToDraw, Point(20, 5), 4, Scalar(0, 0, 255), 2); //FIX add existence check!
		} else if ( gesturePrediction == 1.0) {
			predictedOpen = false;
			circle(imgToDraw, Point(20, 5), 2, Scalar(0, 255, 0), 2);
		} else {
			CV_Error( CV_StsError, "Unexpected prediction!"); //throws exception!
		}		
		
		if ( taggedOpen == predictedOpen ) {
			correctPredictions++;
		}
		
		// -- computing tracking error -- //

		Point trackingRectCentre = Point( trackingRect.x + trackingRect.width/2, trackingRect.y + trackingRect.height/2 );
		
		int dX = groundTruth.x - trackingRectCentre.x;
		int dY = groundTruth.y - trackingRectCentre.y;
		
		double frameError = std::sqrt( dX*dX + dY*dY );
		
		if (frameError > errorThreshold) {
			frameErrorBelowThreshold = false;
		}
		
		errorSum += frameError;
		
		//VISUALISATION
		HiVisualiser::refreshWindow( "TrackAndClassifyTest" );
		waitKey(1);
		//			char c = waitKey(0);
		//			if( c == 27 ) break;
	}
	

	Mat testDataOut( 100, 100, CV_32FC1, Scalar::all(0) );
	CV_Assert( testDataOut.isContinuous() );
	
	cout << "samplecount=" << dataSampleIndex << endl;
	cout << "correct predictions=" << correctPredictions << endl;
	cout << "fraction correct="<< correctPredictions / (float) dataSampleIndex << endl;
	
	{
//OK
//	for(int j = 0; j < perFrameDataDimension; j++) {
//		testDataOut.at<float>( j/100, j % 100 ) = trainingData.at<float>( 100, j );
//	}
	
//OK
//	for(int j = 0; j < perFrameDataDimension; j++) {
//		testDataOut.at<float>( j/100, j % 100 ) = handImgGrey.at<uchar>( j/100, j%100 )/255.0; //FIXNOW, does this work on a 64 bit machine?
//		//FIXNOW indices right way round?
//	}

//OK
//	for (int i = 0; i < 100; i++) {
//		for(int j = 0; j < 100; j++) {
//			testDataOut.at<float>( i, j ) = handImgGrey.at<uchar>( i, j )/255.0; //FIXNOW, does this work on a 64 bit machine?
//			//FIXNOW indices right way round?
//		}
//	}

//OK
//	testDataOut.data = (uchar*) trainingData.ptr<float>( 0 );
//	HiVisualiser::windowMatrixMap["A"] = testDataOut;
//	HiVisualiser::refreshWindow( "A" );
//	waitKey(0);
	
	
//OK
//	for( int i = 0; i < dataSampleIndex; i++) {
//		testDataOut.data = (uchar*) trainingData.ptr<float>( i );
//		//char tag = trainingResponses.at<char>( dataSampleIndex, 0 );
//		HiVisualiser::windowMatrixMap["A"] = testDataOut;
//		HiVisualiser::refreshWindow( "A" );
//		waitKey(33);
//	}
	
	
//	for( int i = 0; i < dataSampleIndex; i++) {
//		testDataOut.data = (uchar*) trainingData.ptr<float>( i );
//		char gestureTag = (char) trainingResponses.at<float>( i, 0 );  //BROKEN!!! SEE ABOVE !!! USE 0.0 and 1.0
//		
//		switch (gestureTag) {
//			case 'o':
//				circle(testDataOut, Point(5, 5), 2, Scalar::all(0), 2);
//				circle(testDataOut, Point(5, 5), 4, Scalar::all(1.0), 2);
//				break;
//			case 'c':
//				circle(testDataOut, Point(5, 5), 2, Scalar::all(1.0), 2);
//				circle(testDataOut, Point(5, 5), 4, Scalar::all(0.0), 2);
//				break;
//			default:
//				CV_Error( CV_StsError, "Unrecognised tag in gesture tags!"); //throws exception!
//				break;
//		}
//		
//		
//		HiVisualiser::windowMatrixMap["A"] = testDataOut;
//		HiVisualiser::refreshWindow( "A" );
//		waitKey(33);
//	}
	}
	
	//trainSVM( trainingData, trainingResponses );

	
	t = (double)cvGetTickCount() - t;
	
	double avgError = errorSum/tagCount;
	
	//Note: properties should have no spaces in names to give well-formed xml
	RecordProperty( "TagCount", tagCount );
	RecordProperty( "ErrorThreshold", errorThreshold );
	RecordProperty( "AvgPositionError ", avgError);
	double timeTrackingOnly = t/((double)cvGetTickFrequency()*1e6);
	RecordProperty( "TimeTrackingOnly", timeTrackingOnly );
	RecordProperty( "TrackingFPS", tagCount/timeTrackingOnly );
	
	//To make it easy to record data to a csv -- //NOTE: Also data recorded during init
	std::cerr << tagCount << ";";
	std::cerr << errorThreshold << ";";
	std::cerr << frameErrorBelowThreshold << ";";
	std::cerr << avgError << ";";
	std::cerr << timeTrackingOnly << ";";
	std::cerr << tagCount/timeTrackingOnly << std::endl;
	
	
	ASSERT_TRUE( frameErrorBelowThreshold );
	
}

void ::TrackAndClassifyTest::trainSVM(Mat& trainingData, Mat& trainingResponses ) {

	//FIXNOW
	//FULLTEST //FULL TEST
	CvSVMParams svmParams( 100, //CvSVM::C_SVC,
						  2, //CvSVM::RBF,
						  1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
						  NULL,
						  TermCriteria( TermCriteria::MAX_ITER | TermCriteria::EPS, 100, 1e-20 ) ); //No idea if this is appropriate

	CvSVM svm = CvSVM();
	
	svm.train_auto( trainingData, trainingResponses, Mat(), Mat(), svmParams, 2 ); //Not ok idx? //586, 10, 20
	
	svm.save( "first.svm.xml",  "firstsvm" );

}

// -- Main Method -- //

int main(int argc, char **argv) {
	//TODO RAT? REMOVE? REVISE?
//	//	Usage:
//	//	XTest.autotest.out [ gtest flags as desired ] [ --record_tracking=name ] [ 2>outputfile ]
//	//  --record_tracking=name
//	
//	char* recordTrackingString = "--record_tracking=";
//	int recordTrackingStringLen = strlen( recordTrackingString );
//	AbstractHandTrackerTest::trackingRecordName = NULL;
//	
//	if (argc > 1) {
//		for (int i=1; i<argc; i++) {
//			if ( prefixComp( recordTrackingStringLen, argv[i], recordTrackingString ) ) { //option found
//				if ( strlen(argv[i]) > recordTrackingStringLen ) { //there must be a name following the "="
//					AbstractHandTrackerTest::trackingRecordName = &argv[i][recordTrackingStringLen];
//					break;
//				}
//			}
//		}
//	}
	
	::testing::InitGoogleTest(&argc, argv);
	return RUN_ALL_TESTS();
}


// -- Example test -- //

//
//TEST_F(TrackAndClassifyTest, LKGREYaaBPMSaaOF0 ) {
//	testTracker( 0, "locVid1c" );
//}

#endif