#include <cv.h>
#include <highgui.h>
#include <stdio.h>
#include <time.h>
#include "config.h"

static const char* wndbrg	= "brg"		;
static const char* wdnmotion= "motion"	;
static const char* wdnseuil	= "seuil"	;
static const char* wdnresultv="resultv"	;

// various tracking parameters (in seconds)
const double MHI_DURATION = 1;
const double MAX_TIME_DELTA = 0.5;
const double MIN_TIME_DELTA = 0.05;
// number of cyclic frame buffer used for motion detection
// (should, probably, depend on FPS)
const int N = 4;

// ring image buffer
IplImage **buf = 0;
int last = 0;

// temporary images
IplImage *mhi = 0; // MHI
IplImage *orient = 0; // orientation
IplImage *mask = 0; // valid orientation mask
IplImage *segmask = 0; // motion segmentation map
CvMemStorage* storage = 0; // temporary storage

// parameters:
//  img - input video frame
//  dst - resultant motion picture
//  args - optional parameters
static void  update_mhi( IplImage* img, IplImage* dst, int diff_threshold )
{
    double timestamp = (double)clock()/CLOCKS_PER_SEC; // get current time in seconds
    CvSize size = cvSize(img->width,img->height); // get current frame size
    int i, idx1 = last, idx2;
    IplImage* silh;
    CvSeq* seq;
    CvRect comp_rect;
    double count;
    double angle;
    CvPoint center;
    double magnitude;          
    CvScalar color;

    // allocate images at the beginning or
    // reallocate them if the frame size is changed
    if( 1==1)//!mhi || mhi->width != size.width || mhi->height != size.height )
    {
        if( buf == 0 ) {
            buf = (IplImage**)malloc(N*sizeof(buf[0]));
            memset( buf, 0, N*sizeof(buf[0]));
        }
        
        for( i = 0; i < N; i++ ) {
            cvReleaseImage( &buf[i] );
            buf[i] = cvCreateImage( size, IPL_DEPTH_8U, 1 );
            cvZero( buf[i] );
        }
        cvReleaseImage( &mhi );
        cvReleaseImage( &orient );
        cvReleaseImage( &segmask );
        cvReleaseImage( &mask );
        
        mhi = cvCreateImage( size, IPL_DEPTH_32F, 1 );
        cvZero( mhi ); // clear MHI at the beginning
        orient = cvCreateImage( size, IPL_DEPTH_32F, 1 );
        segmask = cvCreateImage( size, IPL_DEPTH_32F, 1 );
        mask = cvCreateImage( size, IPL_DEPTH_8U, 1 );
    }

    cvCvtColor( img, buf[last], CV_BGR2GRAY ); // convert frame to grayscale

    idx2 = (last + 1) % N; // index of (last - (N-1))th frame
    last = idx2;

    silh = buf[idx2];
    cvAbsDiff( buf[idx1], buf[idx2], silh ); // get difference between frames
    
    cvThreshold( silh, silh, diff_threshold, 1, CV_THRESH_BINARY ); // and threshold it
    cvUpdateMotionHistory( silh, mhi, timestamp, MHI_DURATION ); // update MHI

    // convert MHI to blue 8u image
    cvCvtScale( mhi, mask, 255./MHI_DURATION,
                (MHI_DURATION - timestamp)*255./MHI_DURATION );
    cvZero( dst );
    cvCvtPlaneToPix( mask, 0, 0, 0, dst );
    
    // calculate motion gradient orientation and valid orientation mask
    cvCalcMotionGradient( mhi, mask, orient, MAX_TIME_DELTA, MIN_TIME_DELTA, 3 );
    
    if( !storage )
        storage = cvCreateMemStorage(0);
    else
        cvClearMemStorage(storage);
    
    // segment motion: get sequence of motion components
    // segmask is marked motion components map. It is not used further
    seq = cvSegmentMotion( mhi, segmask, storage, timestamp, MAX_TIME_DELTA );

    // iterate through the motion components,
    // One more iteration (i == -1) corresponds to the whole image (global motion)
    for( i = 0; i < seq->total; i++ )
    {
        comp_rect = ((CvConnectedComp*)cvGetSeqElem( seq, i ))->rect;
        magnitude = (comp_rect.width + comp_rect.height) /2;
        if( comp_rect.width <= 5 || comp_rect.width >50 || comp_rect.height <= 5 || comp_rect.height >50) // reject very small components
            continue;
        color = CV_RGB(255,0,0);
        //magnitude = 30;

        // select component ROI
        cvSetImageROI( silh, comp_rect );
        cvSetImageROI( mhi, comp_rect );
        cvSetImageROI( orient, comp_rect );
        cvSetImageROI( mask, comp_rect );

        // calculate orientation
        angle = cvCalcGlobalOrientation( orient, mask, mhi, timestamp, MHI_DURATION);
        angle = 360.0 - angle;  // adjust for images with top-left origin

        count = cvNorm( silh, 0, CV_L1, 0 ); // calculate number of points within silhouette ROI

        cvResetImageROI( mhi );
        cvResetImageROI( orient );
        cvResetImageROI( mask );
        cvResetImageROI( silh );

        // check for the case of little motion
        if( count < comp_rect.width*comp_rect.height * 0.05 )
            continue;

        // draw a clock with arrow indicating the direction
        center = cvPoint( (comp_rect.x + comp_rect.width/2),
                          (comp_rect.y + comp_rect.height/2) );

        cvCircle( dst, center, cvRound(magnitude), color, 3, CV_AA, 0 );
        cvLine( dst, center, cvPoint( cvRound( center.x + magnitude*cos(angle*CV_PI/180) * 0.8),
                cvRound( center.y - magnitude*sin(angle*CV_PI/180))), color, 3, CV_AA, 0 );
    }
}

static int traitercouleurs(IplImage* brgFrame)
{
	//Copies de l’image pour les transformations…
	IplImage* seuilFrame	= cvCreateImage( cvGetSize(brgFrame), IPL_DEPTH_8U, 1 );
	IplImage* hsvFrame 		= cvCreateImage( cvGetSize(brgFrame), IPL_DEPTH_8U, 3 );
	IplImage* hsvFrame2v	= cvCreateImage( cvGetSize(brgFrame), IPL_DEPTH_8U, 3 );
	IplImage* resultFramev	= cvCreateImage( cvGetSize(brgFrame), IPL_DEPTH_8U, 3 );
	
	//Conversion BGR -> HSV
	cvCvtColor( brgFrame, hsvFrame, CV_BGR2HSV );
	//Pour chaque pixel…
	int x;
	int y;
	for(y = 0; y < (cvGetSize(hsvFrame).height); y++) 
	{
		for(x = 0; x < (cvGetSize(hsvFrame).width); x++) 
		{
			int val_H = (hsvFrame->imageData + hsvFrame->widthStep*y)[x*3  ];
			int val_S = (hsvFrame->imageData + hsvFrame->widthStep*y)[x*3+1];
			int val_V = (hsvFrame->imageData + hsvFrame->widthStep*y)[x*3+2];
			
			(hsvFrame2v->imageData + hsvFrame2v->widthStep*y)[ x*3  ] = val_H;
			(hsvFrame2v->imageData + hsvFrame2v->widthStep*y)[ x*3+1] = 0;
			(hsvFrame2v->imageData + hsvFrame2v->widthStep*y)[ x*3+2] = 0;
			
			(seuilFrame->imageData + seuilFrame->widthStep*y)[x] = 0;
			
			if( (val_H >=-85 && val_H <=-75) ||	//rouge violacé : -85 à -75 (-90 à -75 pour plus grande plage)
				(val_H >=0 && val_H <=10) ) 	//rouge orangé  :   0 à  10
			{
				
				if (val_S<0 ) //peu grisé : saturation < 0 (pas le blanc)
				{
					(hsvFrame2v->imageData + hsvFrame2v->widthStep*y)[ x*3+1] = val_S;
					
					if ( val_V<=0) // image suffisemment claire : valeur <= 0
				    {
						(seuilFrame->imageData + seuilFrame->widthStep*y)[ x    ] = 255;
						(hsvFrame2v->imageData + hsvFrame2v->widthStep*y)[ x*3+2] = val_V;
				    }
				}
			}
		}
	} //fin des boucles
    
	cvCvtColor( hsvFrame2v, resultFramev, CV_HSV2RGB );
	cvCvtColor( resultFramev, resultFramev, CV_RGB2BGR );
	
    cvShowImage( wndbrg		, brgFrame		);
    cvShowImage( wdnseuil	, seuilFrame	);
    cvShowImage( wdnresultv	, resultFramev	);
    
    
    IplImage* motion = 0;
    motion = cvCreateImage( cvSize(resultFramev->width,resultFramev->height), 8, 3 );
    cvZero( motion );
    motion->origin = resultFramev->origin;
    update_mhi( resultFramev, motion, 30 );
    cvShowImage(wdnmotion	, motion	);
	cvReleaseImage( &motion	);
    
	//Libérer l’espace des images
	cvReleaseImage( &seuilFrame	);
	cvReleaseImage( &hsvFrame	);
	cvReleaseImage( &hsvFrame2v	);
	cvReleaseImage( &resultFramev);
    return 0;
}

int test6motion(int argc, char** argv)
{
	IplImage* brgFrame;
    cvNamedWindow( wndbrg		, 2);
    cvNamedWindow( wdnresultv	, 2);
    cvNamedWindow( wdnmotion	, 2);
    cvNamedWindow( wdnseuil		, 2);
	cvMoveWindow(wndbrg		,   0,  500);
	cvMoveWindow(wdnresultv	, 320,  500);
	cvMoveWindow(wdnseuil	, 640,  500);
	cvMoveWindow(wdnmotion	, 920,  500);
	if (argc != 1)
	{
    	double timestamp;
    	double timestamp2;
    	CvCapture* camera;
    	if (argc == 2)
    	{
			//Définir une caméra
			camera = cvCaptureFromCAM( 1 );
			//camera = cvCaptureFromCAM( -1 );
            printf("<test6motion> : caméra\n");
    	}
    	else
    	{
			char nomFilm[40] = "";
			strcat(nomFilm, folder);
			strcat(nomFilm, film);
			//Récupérer des captures provenant d'une vidéo
			//camera = cvCreateFileCapture( nomFilm );
			camera = cvCaptureFromFile( nomFilm );
            printf("<test6motion> : vidéo : %s\n", nomFilm);
    	}
		timestamp = (double)clock()/CLOCKS_PER_SEC; // get current time in seconds
        for(;;)
        {
			//Capturer une image
			cvGrabFrame( camera );
			//Aller chercher l’image sous format IplImage
			brgFrame 		= cvRetrieveFrame( camera );
			traitercouleurs(brgFrame);
            if( cvWaitKey(10) >= 0 )
                break;
            timestamp2 = (double)clock()/CLOCKS_PER_SEC;
            printf("<test6motion> : temps entre deux images : %f\n", timestamp2 - timestamp);
            timestamp = timestamp2;
        }
		//Libérer la caméra
		cvReleaseCapture( &camera	);
		//////cvReleaseImage( &brgFrame ); ////// Not release brgFrame !!!!!!!!!!!!!!!!
	}
	else
	{
		int numImage;
		for( numImage = 0; names[numImage] != 0; numImage++ )
	    {
			char nomImage[40] = "";
	        int touche;
			strcat(nomImage, folder);
			strcat(nomImage, names[numImage]);
	    	brgFrame		= cvLoadImage( nomImage, 1 );
	        if( !brgFrame )
	        {
	            printf("<test6motion> Couldn't load %s\n", names[numImage] );
	            continue;
	        }
			traitercouleurs(brgFrame);
	    	touche = cvWaitKey(0);
	        if( (char)touche == 27 )
	            break;
			cvReleaseImage( &brgFrame);
	    }
	}
	
    cvDestroyWindow( wndbrg		);
    cvDestroyWindow( wdnresultv	);
    cvDestroyWindow( wdnmotion	);
    cvDestroyWindow( wdnseuil	);
	return 0;
}