#include <cv.h>
#include <highgui.h>

#include "HandInterface.hpp"
#include "TrackDataServer.hpp"

#include <boost/thread.hpp>

#include <iostream>

using namespace cv;
using namespace hi;

SingleTrackerRunner::SingleTrackerRunner( Ptr< AbstractTrackDataServer > trackDataServerPtr ) : trackDataServerPtr( trackDataServerPtr ),
		ht(0), hl(0), running(false), nextFrameReadyBar(2), computationCompleteBar(2), isHandActive(false), isHandActiveBuffer(false) {
}

SingleTrackerRunner::~SingleTrackerRunner() {
}

void SingleTrackerRunner::run() {
	running = true;
	
	while (running && localise() == -1 ) {}
	if (!running) return;
	
	//set up classifier
	gc = Ptr< AbstractGestureClassifier >( new BoostGestureClassifier( "C0", "C0", hl->getProbabilityTransformer() ) );
	
	//set up tracker
	//paramset3
	ht = HandTracker::init( 10.0, 0.1, 100.0, 10.0,
						   trackingRect, frame, Ptr< Segmenter >( new Segmenter( hl->getProbabilityTransformer() ) ) );

	// -- run tracker and classifier -- //
	boost::thread trackerThread( boost::bind( &SingleTrackerRunner::runTracker, this ) );
	//boost::thread classifierThread( boost::bind( &SingleTrackerRunner::runClassifier, this ) );
	
	
	//TIMING
	double prevTime = (double)cvGetTickCount();
	int frames = 0;
	
	double camTimeTotal = 0;
	
	presetNextFrame();
	
	while (running) {
		trackingRectBuffer = trackingRect;
		isHandActiveBuffer = isHandActive;
		loadNextFrame();
		nextFrameReadyBar.wait(); //allow tracking of next rect to start while processing data from previous rect
		
		//tracking post-processing
		double preCamTime = (double)cvGetTickCount();
		presetNextFrame();
		camTimeTotal += (double)cvGetTickCount() - preCamTime;
		
		trackDataServerPtr->putTrackData( isHandActiveBuffer, trackingRectBuffer.x+trackingRectBuffer.width/2, trackingRectBuffer.y+trackingRectBuffer.height/2 );
		
		computationCompleteBar.wait(); //wait for tracking to complete
//		waitKey(1);//helps visualiser run more smoothly
		
		frames++;
		if ( (frames % 100) == 0) {
			double now = (double)cvGetTickCount();
			double camseconds = camTimeTotal/((double)cvGetTickFrequency()*1e6);
			double secondsTotal = (now - prevTime)/((double)cvGetTickFrequency()*1e6);
			prevTime = now;
			
			std::cout << camseconds << ";";
			std::cout << secondsTotal << std::endl;
			camTimeTotal = 0;
		}
		
		//DO ACTUAL PROFILING, DONT COUNT TIME FOR SET NEXT
		
	}
}

void SingleTrackerRunner::stop() { running = false; }

// -- protected methods -- //


//TIMING
int frameCount = 0;
double trackTime = 0;

void SingleTrackerRunner::runTracker() {
	// -- tracking hand --//
	while (running) {
		nextFrameReadyBar.wait();
		
		double preTrackTime = (double)cvGetTickCount();
		
		trackingRect = ht->track( frame );
		
		trackTime += (double)cvGetTickCount() - preTrackTime;
		
		if ((++frameCount) == 100) {
			double secs = trackTime/((double)cvGetTickFrequency()*1e6);
			std::cout << secs << ";";
			frameCount = 0;
			trackTime = 0;
		}
		
		computationCompleteBar.wait();
	}
}

void SingleTrackerRunner::runClassifier() {
	while (running) {
		nextFrameReadyBar.wait();
		
		Mat handImg( prevFrame, trackingRectBuffer );//NOTE: uses trackingRectBuffer for thread safety
		
		isHandActive = gc->isHandActive( handImg );
		
		computationCompleteBar.wait();
	}
}

int SingleTrackerRunner::localise() {
	hl = new HandLocaliser();
	try {
		setNextFrame();
		//trackingRect = hl->localiseFace( frame );
		trackingRect = hl->localiseHand( frame );
		cout << "Face found" << endl;
	}
	catch (cv::Exception e) {
		if ( e.code == HI_ERR_NOFACEFOUND ) {
			return -1;
		} else {
			throw;
		}
	}
	
	//forcing rectangle to stay inside frame
	forceInside( frame.size(), trackingRect );
	
	return 0;
}


