#include <cv.h>
#include <highgui.h>

#include "HandInterface.hpp"

#include <boost/thread.hpp>

using namespace cv;
using namespace hi;

DoubleTrackerRunner::DoubleTrackerRunner( Ptr< AbstractTrackDataServer > trackDataServerPtr ) : trackDataServerPtr( trackDataServerPtr ),
ht(0), hl(0), running(false), runningFaceTracker(false), nextFrameReadyBar(4), computationCompleteBar(4),
isHandActive(false), isHandActiveBuffer(false), isSecondaryActive(false), isSecondaryActiveBuffer(false){
};

DoubleTrackerRunner::~DoubleTrackerRunner() {
}

void DoubleTrackerRunner::run() {
	running = true;
	
	while (running && localise() == -1 ) {}
	if (!running) return;
	
	//prepare hand classifier
	gc = Ptr< AbstractGestureClassifier >( new BoostGestureClassifier( "C0", "C0", hl->getProbabilityTransformer() ) );
	//prepare secondary hand classifier
	gc2 = Ptr< AbstractGestureClassifier >( new BoostGestureClassifier( "C0", "C0", hl->getProbabilityTransformer() ) );
	
	//init handtracker with paramset3
	ht = HandTracker::init( 10.0, 0.1, 100.0, 10.0,
						   trackingRect, frame, Ptr< Segmenter >( new Segmenter( hl->getProbabilityTransformer() ) ) );
	
   // -- run primary tracker and classifier -- //
   boost::thread trackerThread( boost::bind( &DoubleTrackerRunner::runTracker, this ) );
   boost::thread classifierThread( boost::bind( &DoubleTrackerRunner::runClassifier, this ) );
	
	// -- run face tracker -- //
	//prepare face tracker
	ft = new FaceTracker( hl->getProbabilityTransformer() );
	faceTrackingRect = trackingRect; //copy rect
	runningFaceTracker = true;
	
	boost::thread faceTrackerThread( boost::bind( &DoubleTrackerRunner::runFaceTracker, this ) );

	int processPhase = 0;

	//VISUALISATION
	Ptr< Segmenter > visualisationSegmenter( new Segmenter( hl->getProbabilityTransformer() ) );
	
	while (running) {
		trackingRectBuffer = trackingRect;
		faceTrackingRectBuffer = faceTrackingRect;
		isHandActiveBuffer = isHandActive;
		if (processPhase > 0) {
			secondaryTrackingRectBuffer = secondaryTrackingRect;
			isSecondaryActiveBuffer = isSecondaryActive;
			//Refer to HIGeom-inl.hpp for documentation on rectOfSecondInFirst
			primaryInSecondaryIntersectionRect = rectOfSecondInFirst( secondaryTrackingRectBuffer, trackingRectBuffer );
		}
		
		//VISUALISATION
		{
			Mat imgToDraw;
			frame.copyTo(imgToDraw);
			HiVisualiser::windowMatrixMap["doubletracker"] = imgToDraw;
			
			if (processPhase<2) {
				rectangle( imgToDraw, Point( faceTrackingRect.x, faceTrackingRect.y ),
						  Point( faceTrackingRect.x + faceTrackingRect.width, faceTrackingRect.y + faceTrackingRect.height ),
						  Scalar(255,0,0) );
			}
			rectangle( imgToDraw, Point( trackingRect.x, trackingRect.y ),
					  Point( trackingRect.x + trackingRect.width, trackingRect.y + trackingRect.height ),
					  Scalar(0,255,0) );
			if (processPhase>0) {
				rectangle( imgToDraw, Point( secondaryTrackingRect.x, secondaryTrackingRect.y ),
						  Point( secondaryTrackingRect.x + secondaryTrackingRect.width, secondaryTrackingRect.y + secondaryTrackingRect.height ),
						  Scalar(0,0,255) );
				if ( primaryInSecondaryIntersectionRect.width > 0 ) {
					//Note: To draw primaryInSecondaryIntersectionRect, in the correct frame of reference, use secondaryTrackingRectBuffer coords as offset
					int intersectionRectX = primaryInSecondaryIntersectionRect.x + secondaryTrackingRectBuffer.x;
					int intersectionRectY = primaryInSecondaryIntersectionRect.y+secondaryTrackingRectBuffer.y;
					rectangle( imgToDraw,
							  Point( intersectionRectX, intersectionRectY ),
							  Point( intersectionRectX + primaryInSecondaryIntersectionRect.width,
									intersectionRectY + primaryInSecondaryIntersectionRect.height ),
							  Scalar(0,255,255) );
					//TODO RAT
					Mat handImg( frame, secondaryTrackingRectBuffer );
					visualisationSegmenter->segment( handImg, primaryInSecondaryIntersectionRect );
					if (visualisationSegmenter->didSegmentation()) {
						//mask for LK features
						Mat maskInBGR;
						
						cvtColor( visualisationSegmenter->getContourImage(), maskInBGR, CV_GRAY2BGR );
						
						Mat imgToDraw_handRect( imgToDraw, secondaryTrackingRectBuffer );
						
						imgToDraw_handRect = imgToDraw_handRect + ( 0.3 * maskInBGR );
					}
				}
			}
			HiVisualiser::refreshWindow("doubletracker");
			waitKey(1);//helps visualiser run more smoothly for strange reasons
		}
		
		setNextFrame();
		nextFrameReadyBar.wait(); //allow tracking of next rect to start while processing data from previous rect
		
		//tracking post-processing and data queue for send
		switch (processPhase) {
			case 0:
				if (!rectanglesIntersect( trackingRectBuffer, faceTrackingRectBuffer )) {
					processPhase++;
					secondaryTrackingRect = faceTrackingRectBuffer;
					//init handtracker with paramset3
					ht2 = HandTracker::init( 10.0, 0.1, 100.0, 10.0,
													faceTrackingRectBuffer, frame, Ptr< Segmenter >( new Segmenter( hl->getProbabilityTransformer() ) ) );
					// -- run secondary tracker and classifier -- //
					nextFrameReadyBar.increment();
					computationCompleteBar.increment();
					boost::thread secondaryTrackerThread( boost::bind( &DoubleTrackerRunner::runSecondaryTracker, this ) );
					nextFrameReadyBar.increment();
					computationCompleteBar.increment();
					boost::thread secondaryClassifierThread( boost::bind( &DoubleTrackerRunner::runSecondaryClassifier, this ) );
				}
				break;
			case 1:
				
				if (rectanglesIntersect( secondaryTrackingRectBuffer, faceTrackingRectBuffer )) {
					trackDataServerPtr->putTrackData( isHandActiveBuffer, trackingRectBuffer.x+trackingRectBuffer.width/2,
													 trackingRectBuffer.y+trackingRectBuffer.height/2 );
					break;
				} else {
					//this is essentially a preamble to the default case. The preamble is only required during phase 1
					processPhase++;
					runningFaceTracker = false;
					//WARNING!!!
					//falls through to default!
				}
			default:
				trackDataServerPtr->putTrackData( isHandActiveBuffer, trackingRectBuffer.x+trackingRectBuffer.width/2,
												 trackingRectBuffer.y+trackingRectBuffer.height/2,
												 isSecondaryActiveBuffer, secondaryTrackingRectBuffer.x+secondaryTrackingRectBuffer.width/2,
												 secondaryTrackingRectBuffer.y+secondaryTrackingRectBuffer.height/2 );
				break;
		}

		computationCompleteBar.wait(); //wait for tracking to complete
	}
}

void DoubleTrackerRunner::runFaceTracker() {
	while (runningFaceTracker) {
		nextFrameReadyBar.wait();
		ft->track( frame, faceTrackingRect );
		computationCompleteBar.wait();
	}
	nextFrameReadyBar.decrement();
	computationCompleteBar.decrement();
}

void DoubleTrackerRunner::runTracker() {
	while (running) {
		nextFrameReadyBar.wait();
		
		trackingRect = ht->track( frame );
		
		computationCompleteBar.wait();
	}
}
						   
void DoubleTrackerRunner::runClassifier() {
   while (running) {
	   nextFrameReadyBar.wait();
	   
	   Mat handImg( prevFrame, trackingRectBuffer );//NOTE: uses trackingRectBuffer for thread safety
	   
	   isHandActive = gc->isHandActive( handImg );
	   
	   computationCompleteBar.wait();
   }
}

void DoubleTrackerRunner::runSecondaryTracker() {
	computationCompleteBar.wait();//note: this single wait call is required to keep all threads working in phase
	while (running) {
		nextFrameReadyBar.wait();
		if ( primaryInSecondaryIntersectionRect.width < 0 ) { //no intersection;
			secondaryTrackingRect = ht2->track( frame );
		} else {
			secondaryTrackingRect = ht2->track( frame, primaryInSecondaryIntersectionRect );
		}

		computationCompleteBar.wait();
	}
}

void DoubleTrackerRunner::runSecondaryClassifier() {
	computationCompleteBar.wait();//note: this single wait call is required to keep all threads working in phase
	while (running) {
		nextFrameReadyBar.wait();
		
		Mat handImg( prevFrame, secondaryTrackingRectBuffer );//NOTE: uses secondaryTrackingRectBuffer for thread safety
		
		isSecondaryActive = gc2->isHandActive( handImg );
		
		computationCompleteBar.wait();
	}
}

void DoubleTrackerRunner::stop() {
	runningFaceTracker = false;
	running = false;
}

// -- protected methods -- //
int DoubleTrackerRunner::localise() {
	hl = new HandLocaliser();
	try {
		setNextFrame();
		trackingRect = hl->localiseFace( frame );
		//trackingRect = hl->localiseHand( frame );
		cout << "Face found" << endl;
	}
	catch (cv::Exception e) {
		if ( e.code == HI_ERR_NOFACEFOUND ) {
			return -1;
		} else {
			throw;
		}
	}
	
	//forcing rectangle to stay inside frame
	forceInside( frame.size(), trackingRect );
	
	return 0;
}


