#include <stdio.h>
#include "segmentacao.h"
#include "klt.h"
#include "camshift.h"
#include "trajectory.h"
#include "recognition.h"

#define TRACKING_CAMSHIFT
//#define TRACKING_KLT

#define VIDEO_INPUT

#define DRAWING

#define FEATURES 25

IplImage *global = NULL;

void bla(IplImage *image) {
	int i,j;
	for(i=0;i < image->height; i++) {
		for(j=0; j < image->width; j++){
			if (((uchar *)(image->imageData + i*image->widthStep))[j])
			{
				((uchar *)(global->imageData + i*global->widthStep))[j*global->nChannels + 0] = 255;
				((uchar *)(global->imageData + i*global->widthStep))[j*global->nChannels + 1] = 255;
				((uchar *)(global->imageData + i*global->widthStep))[j*global->nChannels + 2] = 255;
			}
			else {
				((uchar *)(global->imageData + i*global->widthStep))[j*global->nChannels + 0] = 0;
				((uchar *)(global->imageData + i*global->widthStep))[j*global->nChannels + 1] = 0;
				((uchar *)(global->imageData + i*global->widthStep))[j*global->nChannels + 2] = 0;
			}
		}
	}
}

int main (void){
	skinRangeParam skinRange;
	IplImage *img, *img2,*img3 = NULL, *imgaux;
	
	#ifdef VIDEO_INPUT
	CvCapture *capture = cvCreateFileCapture("../videos/raul4.avi");
	if (capture == NULL) {
		printf("Houston! We gotta a problem!\n");
    exit(0);
	}
	img = cvQueryFrame(capture);
	#else
	//#ifdef CAMERA_INPUT
	CvCapture *capture = cvCaptureFromCAM(0);
	if( !capture ) {
		printf("Erro na camera!\n");
		return 1;
	}
	img = cvQueryFrame(capture);
	#endif
	
	
	char c = 0;
	CvPoint points[9], points_chosen[3];
	CvRect boxes[3];
	CvPoint2D32f *features = new CvPoint2D32f[20],*features2 = new CvPoint2D32f[20];
	char status[20];
	int cont_features = FEATURES;
	bool recalculate = true;
	trajectory trajetoria;
	
	
  skinRange = initializeGlobal();
  
  cvNamedWindow("HSV", CV_WINDOW_AUTOSIZE); 
  cvMoveWindow("HSV", 700, 100);
  
  cvCreateTrackbar("Max Hue","HSV",&skinRange.h_max,255,NULL);
  cvCreateTrackbar("Min Hue","HSV",&skinRange.h_min,255,NULL);
  cvCreateTrackbar("Max Saturation","HSV",&skinRange.s_max,255,NULL);
  cvCreateTrackbar("Min Saturation","HSV",&skinRange.s_min,255,NULL);
	
	//img = cvLoadImage("../imagens/moutza_double_chiracsmall.jpg");//cvQueryFrame(capture);
  /*img = cvLoadImage("../imagens/brain-hand-gestures.jpg");
	if (img == NULL)
  {
    printf("Houston! We gotta a problem!\n");
    exit(0);
  }*/
  
  img2 = cvCreateImage(cvGetSize(img),IPL_DEPTH_8U,1);
	imgaux = cvCreateImage(cvGetSize(img),IPL_DEPTH_8U,3);
	global = cvCreateImage(cvGetSize(img),IPL_DEPTH_8U,3);
	
	int *hist_h = new int[img->height];
	int *hist_v = new int[img->width];
  
	initializeTrajectory(&trajetoria);
	
	while (c != 'q'){
		
		//if ( c == 'a') {
			img = cvQueryFrame(capture);
			if (!img) break;
		//}
		
		cvCvtColor(img,imgaux,CV_RGB2HSV);
				
		updateSkinRange(imgaux,img2,skinRange);
		applyDilateErode(img2,2,5,3);
		
		bla(img2);
		
		if (recalculate){
			computeHistogramProfiles(img2,hist_h,hist_v,img->height,img->width);
			findIntersectionPoints(hist_h,img->height,hist_v,img->width,points);
			choosePoints(img2,points, points_chosen);

			for(int i=0; i < 3; i++) {
				boxes[i] = calculateBox(img2,points_chosen[i]);
			}
			#ifdef TRACKING_KLT
			cont_features = FEATURES;
			
			if (startKLT(img2,boxes[0],&cont_features,features)) {
				printf("Achou %d features\n",cont_features);
				for(int i = 0; i < cont_features; i++){
					features[i].x += boxes[0].x;
					features[i].y += boxes[0].y;
					cvCircle(global,cvPoint(features[i].x , features[i].y),5,cvScalar(0,255,0),-1,8);
				}
			}
			#endif
			recalculate = false;
		}
		
		#ifdef TRACKING_CAMSHIFT
		CvConnectedComp comp;
		CvBox2D box;
		
		camshiftTracking(img2,&(boxes[0]),&comp,&box);
		//boxes[0] = comp.rect;
		//cvEllipseBox( global, box, CV_RGB(255,0,0), 3, CV_AA, 0 );
		
		cvCircle(global,cvPoint(cvRound(box.center.x) , cvRound(box.center.y)),5,cvScalar(0,255,0),-1,8);
		#ifdef DRAWING
		cvRectangle(global,cvPoint(boxes[0].x,boxes[0].y),cvPoint(boxes[0].x + boxes[0].width, boxes[0].y + boxes[0].height),cvScalar(0,0,255),1);
		//cvRectangle(global,cvPoint(boxes[1].x,boxes[1].y),cvPoint(boxes[1].x + boxes[1].width, boxes[1].y + boxes[1].height),cvScalar(0,255,0),1);
		//cvRectangle(global,cvPoint(boxes[2].x,boxes[2].y),cvPoint(boxes[2].x + boxes[2].width, boxes[2].y + boxes[2].height),cvScalar(255,0,0),1);
		#endif
		CvPoint centroid_aux = cvPoint(cvRound(box.center.x),cvRound(box.center.y));
		updateTrajectory(&trajetoria,boxes[0],centroid_aux);
		gestureRecognition(&trajetoria);
		drawTrajectory(global,&trajetoria);
		
		#endif
		
		#ifdef TRACKING_KLT
		if (img3){
			int count = 0;
			kltTracking(img2,img3,features,features2,cont_features,status);
			CvPoint point_min,point_max;
			point_min.x = img->width + 1;
			point_min.y = img->height + 1;
			point_max.x = -1;
			point_max.y = -1;
			for(int i = 0; i < cont_features; i++){
				features[i].x = features2[i].x;
				features[i].y = features2[i].y;
				if (cvRound(features[i].x) < point_min.x) {
					point_min.x = cvRound(features[i].x);
				}
				
				if (cvRound(features[i].y) < point_min.y) {
					point_min.y = cvRound(features[i].y);
				}
				
				if (cvRound(features[i].x) > point_max.x) {
					point_max.x = cvRound(features[i].x);
				}
				
				if (cvRound(features[i].y) > point_max.y) {
					point_max.y = cvRound(features[i].y);
				}
				cvCircle(global,cvPoint(cvRound(features2[i].x) , cvRound(features2[i].y)),5,cvScalar(0,0,255),-1,8);
				if (!status[i])
					count++;
			}
			
			cvRectangle(global,point_min,point_max,cvScalar(0,0,255),1);
			
			
			printf("count = %d\n",count);
			if (count > 10)
				recalculate = true;
		}
		
		img3 = img2;
		#endif
		
		cvShowImage("Caixas", global); 
		cvShowImage("HSV",imgaux);
		
		c = cvWaitKey(66);
	}
	
	cvReleaseImage(&img);
	cvReleaseImage(&img2);
	cvReleaseImage(&imgaux);
	cvReleaseImage(&global);
	cvReleaseCapture( &capture );

	
	return(0);
}