/*************************************************************************
    Projet  -  Sport Collectif pour Robots
                             -------------------
    Auteur  -  Basile Wolfrom
*************************************************************************/

//---- Realisation de la classe <VideoSource> (fichier VideoSource.cpp) --

//---------------------------------------------------------------- INCLUDE

//-------------------------------------------------------- Include systeme
#ifdef __APPLE__	// mac
#include <opencv/openCV.h>
#include <opencv/highgui.h>
#endif


#ifdef __linux__	// gcc (egcs) on linux
#include <opencv.hpp>
#include <highgui/highgui.hpp>
#endif

#include <pthread.h>
#include "sys/time.h"

//------------------------------------------------------ Include personnel
#include "VideoSource.h"
#include "../config.h"
#include "../util.h"


using namespace cv;

//------------------------------------------------------------- Constantes
#define GUI_FPS						30 				// Nombre d'image par seconde
#define GUI_SLEEP_TIME				1000 / GUI_FPS
#define OUTLIER_THRESHOLD_INIT		100000.0			// seuil pour la phase d'init
#define OUTLIER_THRESHOLD			100000.0			// type flottant
#define SHIFT						0.2				// type flottant

//----------------------------------------------------------- Types prives

struct mouseCallbackPm
{
	VideoSource* pThis;
	void (VideoSource::*pFunc)(int event, int x, int y, int flags);
};

//----------------------------------------------------------- Declarations

extern cv::Vec2d afficheObjectifRobotG;
extern cv::Vec2d afficheObjectifRobotD;

//----------------------------------------------------------------- PUBLIC
//-------------------------------------------------------- Fonctions amies

//----------------------------------------------------- Methodes publiques

bool VideoSource::LancerAnalyse()
{
	if(analyseThreadLoop)
	{
		//le thread est deja lance
		return false;
	}
	analyseThreadLoop = true;

	LOG4CPLUS_INFO(loggerVision, "Lancement Vision");

	//lancer le thread d'analyse d'image
	visionParams = new val_st<VideoSource>;
	visionParams->pThis = this;
	visionParams->pFunc = this->visionThreadWrapper();
	if( pthread_create(&analyseThreadId, NULL, &pThreadWrapper<VideoSource>, static_cast<void*>(visionParams)) != 0 )
	{
		//echec de la creation du thread
		return false;
	}
	return true;
}

int VideoSource::ArreterAnalyse()
{
	if(!analyseThreadLoop) {
		return -1;
	}

	LOG4CPLUS_INFO(loggerVision, "Arret Vision");

	pthread_mutex_lock(&lockAnalyseThreadLoop);
	analyseThreadLoop = false;
	pthread_mutex_unlock(&lockAnalyseThreadLoop);

	//Attendre la terminaison du thread d'analyse d'image
	int cr = pthread_join(analyseThreadId, NULL);

	// Nettoyage
	delete visionParams;

	return cr;
}

//------------------------------------------------- Surcharge d'opérateurs
//VideoSource & VideoSource::operator = ( const $MODULE$ & un$MODULE$ )
// Algorithme :
//
//{
//} //----- Fin de operator =


//-------------------------------------------- Constructeurs - destructeur

VideoSource::VideoSource(int _largeurTerrain, int _longueurTerrain, list<MsgQueue<DataVision>*>& _queueList)
{
	if (pthread_mutex_init(&lockAnalyseThreadLoop, NULL) != 0)
	{
		//error
	}
	analyseThreadLoop = false;
	waiting_corners = true;
	corners = 0;

	bRect_reel = Rect_<double>(0, 0, 104, 165); 	// Valeur terrain foot
	bRect = Rect_<double>(0, 0, 441, 700); 			// Valeur affichage

	//file de messages
	list<MsgQueue<DataVision>*>::iterator i;
	for(i=_queueList.begin(); i != _queueList.end(); ++i)
	{
		queueList.push_back(*i);
	}

	width = _largeurTerrain;
	height = _longueurTerrain;

	previousBaryCentre_1.x = height - (double)SHIFT * (double)height / bRect.height;
	previousBaryCentre_1.y = (double)SHIFT * (double)width / bRect.width;
	previousBaryCentre_2.x = height - (double)SHIFT * (double)height / bRect.height;
	previousBaryCentre_2.y = width - (double)SHIFT * (double)width / bRect.width;

	previousBar_1_blanc.x = previousBaryCentre_1.x;
	previousBar_1_blanc.y = previousBaryCentre_1.y;
	previousBar_2_blanc.x = previousBaryCentre_2.x;
	previousBar_2_blanc.y = previousBaryCentre_2.y;

	LOG4CPLUS_TRACE(loggerVision, "[VI] Appel au constructeur de <VideoSource>");


}


VideoSource::~VideoSource( )
{
	pthread_mutex_destroy(&lockAnalyseThreadLoop);
	LOG4CPLUS_TRACE(loggerVision, "Appel au destructeur de <VideoSource>");

  //  cvReleaseCapture(&capture);
}


void VideoSource::on_mouse(int event, int x, int y, int flags)
{
	if(event==CV_EVENT_LBUTTONDOWN)
	{
		p_corners[corners][0] = x;
		p_corners[corners][1] = y;
		corners++;
		waiting_corners = corners < 4;
	}
}

void pMouseCallbackWrapper(int event, int x, int y, int flags, void* dat_s)
{
        struct mouseCallbackPm* v = static_cast<mouseCallbackPm*>(dat_s);
        VideoSource* pThis = v->pThis;
        void ((VideoSource::*pFunc)(int event, int x, int y, int flags)) = v->pFunc;
        //Launch method
        (pThis->*pFunc)(event, x, y, flags);
}

bool VideoSource::init(CvCapture* capture)
{
	if (!capture)
	{
		LOG4CPLUS_WARN(loggerVision, "Capture is NULL");
		return false;
	}

	// Todo : Factoriser l'acquisition de frame + traitement
	IplImage *frame = 0;
	Ptr<IplImage> image = 0;

	CvPoint2D32f c2[4];
	CvPoint2D32f c1[4] = {cvPoint2D32f(p_corners[0][0],p_corners[0][1]),
			cvPoint2D32f(p_corners[1][0],p_corners[1][1]),
			cvPoint2D32f(p_corners[2][0],p_corners[2][1]),
			cvPoint2D32f(p_corners[3][0],p_corners[3][1])};

	// Creation des variables de retours
	PosKhepera positionsRobots;

	frame = cvQueryFrame(capture);
	if(!image)
	{
		image = cvCreateImage(cvSize(width, height), 8, 3);
		image->origin = frame->origin;
		// Image souhaitee
		c2[0] = cvPoint2D32f(image->width,image->height);
		c2[1] = cvPoint2D32f(image->width,0);
		c2[2] = cvPoint2D32f(0,0);
		c2[3] = cvPoint2D32f(0,image->height);
	}

	// Calcul de la matrice de correspondance
	mmat = cvCreateMat(3, 3, CV_32FC1);
	mmat = cvGetPerspectiveTransform(c1, c2, mmat);
	cvWarpPerspective(frame,image,mmat);

	//Chercher les robots
	positionsRobots = trouverNosKheperas(image, OUTLIER_THRESHOLD_INIT);

	// On initialise le robot gauche et droite
	if(positionsRobots.position1[0] < positionsRobots.position2[0]){
		idRobot1 = (int)ROBOT_GAUCHE;
		idRobot2 = (int)ROBOT_DROITE;
	}
	else{
		idRobot2 = (int)ROBOT_GAUCHE;
		idRobot1 = (int)ROBOT_DROITE;
	}

	return true;
}


bool VideoSource::InitField()
{
	CvCapture* capture =  cvCreateCameraCapture (CV_CAP_ANY); //cvCaptureFromCAM(ParamHandler::handler.getParam<int>("-cam"));
	if (!capture) {
		return false;
	}

	cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_WIDTH, 1600);
	cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_HEIGHT, 1200);

	cvNamedWindow("Terrain",CV_WINDOW_NORMAL);
	cvResizeWindow( "Terrain", 600, 450 );

	//visionParams = new val_st<VideoSource>;
	mouseCallbackPm mouseCallbackParams;
	mouseCallbackParams.pThis = this;
	mouseCallbackParams.pFunc = this->mouseCallbackWrapper();
	cvSetMouseCallback("Terrain", &pMouseCallbackWrapper, static_cast<void*>(&mouseCallbackParams));
	IplImage * frame = 0;

	LOG4CPLUS_INFO(loggerVision, "Cliquez sur les 4 coins du terrain, en commençant par votre côté et dans le sens de la longueur");

	while(waiting_corners)
	{
		frame = cvQueryFrame(capture);
		if(!frame) continue;

		cvShowImage("Terrain",frame);
		//cvResizeWindow( "Terrain", 600, 450 );

		char c = cvWaitKey(GUI_SLEEP_TIME);
		if(c==27)
			break;
	}
	cvReleaseCapture(&capture);
	cvDestroyWindow("Terrain");
	LOG4CPLUS_TRACE(loggerVision, "[VISION] Fin de l'initField");
	return true;
}


//---------------------------------------------------------------- PROTEGE

//----------------------------------------------------- Methodes protegees

bool VideoSource::isAnalyseThreadRunning()
{
	bool cr;
	pthread_mutex_lock(&lockAnalyseThreadLoop);
	cr = analyseThreadLoop;
	pthread_mutex_unlock(&lockAnalyseThreadLoop);
	return cr;
}

void VideoSource::visionThread()
{
	// Khepera
	if(SHOW_WINDOWS)
	{
		cvNamedWindow("Bleu");
		cvResizeWindow( "Bleu", LARGEUR_TERRAIN_IMAGE_X, LONGUEUR_TERRAIN_IMAGE_Y );
		cvNamedWindow("Blanc");
		cvResizeWindow( "Blanc", LARGEUR_TERRAIN_IMAGE_X, LONGUEUR_TERRAIN_IMAGE_Y );
		//cvCreateTrackbar("Tolerance", "Blanc", &COLOR_TOLERANCE_WHITE, 256, 0);
		//cvCreateTrackbar("Tolerance", "Bleu", &COLOR_TOLERANCE_BLUE, 256, 0);
	}

	LOG4CPLUS_TRACE(loggerVision, "[VISION] Lancement de VideoSource");

	CvCapture* capture = NULL;
	capture = cvCreateCameraCapture (CV_CAP_ANY);//cvCaptureFromCAM(ParamHandler::handler.getParam<int>("-cam"));

	if (capture == NULL) {
		LOG4CPLUS_ERROR(loggerVision, "[VISION] Capture failed.");
		return;
	}

	cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_WIDTH, CAM_WIDTH);
	cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_HEIGHT, CAM_HEIGHT);

	if(SHOW_FLUX_VIDEO)
	{
		LOG4CPLUS_TRACE(loggerVision, "[VISION] Name & Resize window");
		cvNamedWindow("FluxVideo");
		cvResizeWindow( "FluxVideo", LARGEUR_TERRAIN_IMAGE_X, LONGUEUR_TERRAIN_IMAGE_Y );
	}

	IplImage *frame = 0;
	Ptr<IplImage> image = 0;

	CvPoint2D32f c2[4];
	CvPoint2D32f c1[4] = {cvPoint2D32f(p_corners[0][0],p_corners[0][1]),
			cvPoint2D32f(p_corners[1][0],p_corners[1][1]),
			cvPoint2D32f(p_corners[2][0],p_corners[2][1]),
			cvPoint2D32f(p_corners[3][0],p_corners[3][1])};

	// Creation des variables de retours
	timeval time;
	long timeStamp;
	Position positionBalle;
	Position positionRobot1;
	Position positionRobot2;
	PosKhepera positionsRobots;
	CvPoint balleP;
	CvPoint robot1;
	CvPoint robot2;
	CvPoint direction1;
	CvPoint direction2;

	CvPoint robot1Obj;
	CvPoint robot2Obj;

	// Initialisation des positions des robots, en les détectant sur la 1ère frame
	init(capture);

	mmat = cvCreateMat(3, 3, CV_32FC1);

	while( (frame = cvQueryFrame(capture)) && isAnalyseThreadRunning() )
	{
		// Si aucune frame n'a été récupéré, on évite le segfault
		if(!frame)
		{
			LOG4CPLUS_WARN(loggerVision, "Pas de frame à afficher.");
			continue;
		}

		gettimeofday(&time, NULL);
		timeStamp = time.tv_sec*1000 + time.tv_usec/1000 + 0.5;
		if(!image)
		{
			image = cvCreateImage(cvSize(width, height), 8, 3);
			image->origin = frame->origin;
			// Image souhaitee
			c2[0] = cvPoint2D32f(image->width,image->height);
			c2[1] = cvPoint2D32f(image->width,0);
			c2[2] = cvPoint2D32f(0,0);
			c2[3] = cvPoint2D32f(0,image->height);
		}

		// Calcul de la matrice de correspondance
		mmat = cvGetPerspectiveTransform(c1, c2, mmat);
		cvWarpPerspective(frame,image,mmat);

		// Chercher la balle
		Vec2d balle = trouverBalleImage(image);

		//Chercher les robots
		positionsRobots = trouverNosKheperas(image, OUTLIER_THRESHOLD);

		if(SHOW_FLUX_VIDEO)
		{
			// On garde la position en pixel
			balleP.x= balle[0];
			balleP.y= balle[1];

			// Positions les robots
			robot1.x= positionsRobots.position1[0];
			robot1.y= positionsRobots.position1[1];

			robot2.x= positionsRobots.position2[0];
			robot2.y= positionsRobots.position2[1];

			direction1.x= positionsRobots.direction1[0]*10 + positionsRobots.position1[0];
			direction1.y= positionsRobots.direction1[1]*10 + positionsRobots.position1[1];

			direction2.x= positionsRobots.direction2[0]*10 + positionsRobots.position2[0];
			direction2.y= positionsRobots.direction2[1]*10 + positionsRobots.position2[1];
		}

		// Modification de l'echelle des valeurs et du 0 de l'axe y
		balle[0] = balle[0] * bRect_reel.width / bRect.width;
		balle[1] = -(balle[1] * bRect_reel.height / bRect.height) + LONGUEUR_TERRAIN_Y;
		positionsRobots.position1[0] = positionsRobots.position1[0] * bRect_reel.width / bRect.width;
		positionsRobots.position1[1] = LONGUEUR_TERRAIN_Y - positionsRobots.position1[1] * bRect_reel.height / bRect.height;
		positionsRobots.position2[0] = positionsRobots.position2[0] * bRect_reel.width / bRect.width;
		positionsRobots.position2[1] = LONGUEUR_TERRAIN_Y - positionsRobots.position2[1] * bRect_reel.height / bRect.height;
		positionsRobots.direction1[1] = -positionsRobots.direction1[1];
		positionsRobots.direction2[1] = -positionsRobots.direction2[1];

		// Ajout des coordonees de la balle et des robots
		positionBalle.coord = balle;
		positionBalle.timestamp = timeStamp;

		positionRobot1.coord = positionsRobots.position1;
		positionRobot1.orientation = positionsRobots.direction1;
		positionRobot1.timestamp = timeStamp;
		positionRobot2.coord = positionsRobots.position2;
		positionRobot2.orientation = positionsRobots.direction2;
		positionRobot2.timestamp = timeStamp;


		// Envoie des mesures

		// Creation du message
		DataVision data;
		data.posBalle = positionBalle;
		data.posRobots[idRobot1] = positionRobot1;
		data.posRobots[idRobot2] = positionRobot2;

		list<MsgQueue<DataVision>*>::iterator i;
		for(i=queueList.begin(); i != queueList.end(); ++i)
		{
			 (*i)->push(data);
		}

		if(SHOW_FLUX_VIDEO)
		{
			// Dessiner la balle
			cvCircle (image, balleP, 20, CV_RGB(0,255,0), 3, 8, 0 );

			// Dessiner le robot 1
			cvCircle (image, robot1, 20, CV_RGB(255,255,0), 3, 8, 0 );
			cvCircle (image, robot2, 20, CV_RGB(0,255,255), 3, 8, 0 );

			// Dessiner le robot 2
			cvCircle (image, direction1, 20, CV_RGB(255,255,255), 3, 8, 0 );
			cvCircle (image, direction2, 20, CV_RGB(0,0,0), 3, 8, 0 );


			// Affichage objectif
			robot1Obj.x = afficheObjectifRobotG[0] * bRect.width / bRect_reel.width;
			robot1Obj.y = (LONGUEUR_TERRAIN_Y - afficheObjectifRobotG[1]) * bRect.height / bRect_reel.height ;
			robot2Obj.x = afficheObjectifRobotD[0] * bRect.width / bRect_reel.width;
			robot2Obj.y = (LONGUEUR_TERRAIN_Y - afficheObjectifRobotD[1]) * bRect.height / bRect_reel.height;

			cvCircle (image, robot1Obj, 10, CV_RGB(111,111,0), 3, 8, 0 );
			cvCircle (image, robot2Obj, 10, CV_RGB(111,111,111), 3, 8, 0 );


			// Affichage de l'image
			cvShowImage("FluxVideo", image);
		}

		char c = cvWaitKey(GUI_SLEEP_TIME);
		if(c==27)
			break;
	}
	cvReleaseCapture(&capture);
	cvDestroyWindow("FluxVideo");
	//return NULL;
}



//------------------------------------------------------------------ PRIVE

//------------------------------------------------------- Methodes privees

Vec2d VideoSource::trouverBalleImage(const Ptr<IplImage> image)
{
	Vec2d position;

	int image_step = image->widthStep;
	int image_channels = image->nChannels;
	uchar * image_data = (uchar *)image->imageData;

	IplImage * filtered = cvCreateImage(cvSize(image->width, image->height), 8, 1);
	filtered->origin = image->origin;
	int filtered_step = filtered->widthStep;
	int filtered_channels = filtered->nChannels;
	uchar * filtered_data = (uchar *)filtered->imageData;

	// On ne garde que la couleur rouge
	for (int i = 0; i < image->height; i++)
	{
		for (int j = 0; j < image->width; j++)
		{
			if (((image_data[i * image_step + j * image_channels + 2]) > (COLOR_TOLERANCE_RED + image_data[i
					* image_step + j * image_channels + 1])) && ((image_data[i * image_step + j
					* image_channels + 2]) > (COLOR_TOLERANCE_RED + image_data[i * image_step + j
					* image_channels + 0])))
				filtered_data[i * filtered_step + j * filtered_channels] = 255;
			else
				filtered_data[i * filtered_step + j * filtered_channels] = 0;
		}
	}


	cvErode(filtered,filtered,0,2);

	// On recherche le barycentre des points conservés
	int sum_x = 0;
	int sum_y = 0;
	int count = 0;
	for(int i=0;i<image->height; i++)
	{
		for(int j=0; j< image->width; j++)
		{
			if(filtered_data[i*filtered_step + j*filtered_channels]==255)
			{
				sum_x += i;
				sum_y += j;
				count ++;
			}
		}
	}
	if(count!=0)
	{
		// Inversion des x et y.
		position[1] = (int)(sum_x / count);
		position[0] = (int)(sum_y / count);
	}
	cvReleaseImage(&filtered);

	return position;
}


PosKhepera VideoSource::trouverNosKheperas(const IplImage * const image, double outlier_threshold)
{
	int image_step = image->widthStep;
	int image_channels = image->nChannels;
	uchar * image_data = (uchar *)image->imageData;

	IplImage * filtered = cvCreateImage(cvSize(image->width, image->height), 8, 1);
	filtered->origin = image->origin;
	int filtered_step = filtered->widthStep;
	int filtered_channels = filtered->nChannels;
	uchar * filtered_data = (uchar *)filtered->imageData;

	// On ne garde que la couleur bleue
	for (int i = image->height / 2 ; i < image->height; i++)
	{
//		for (int j = image->width ; j < image->width; j++)
		for (int j = 0; j < image->width; j++)
		{
			if (((image_data[i * image_step + j * image_channels]) > (COLOR_TOLERANCE_BLUE + image_data[i
					* image_step + j * image_channels + 1])) && ((image_data[i * image_step + j
					* image_channels]) > (COLOR_TOLERANCE_BLUE + image_data[i * image_step + j
					* image_channels + 2])))
				filtered_data[i * filtered_step + j * filtered_channels] = 255;
			else
				filtered_data[i * filtered_step + j * filtered_channels] = 0;
		}
	}

	//cvErode(filtered,filtered,0,2);

	// Clusterisation sur deux points : récupération de la position des deux Kheperas
	CvPoint point, barycentre_1, barycentre_2;
	CvPoint bar_1_blanc, bar_2_blanc;


	barycentre_1 = previousBaryCentre_1;
	barycentre_2 = previousBaryCentre_2;
	bar_1_blanc = previousBar_1_blanc;
	bar_2_blanc = previousBar_2_blanc;

	int sum_1_x;
	int sum_1_y;
	int count_1;
	int sum_2_x;
	int sum_2_y;
	int count_2;
	for(int h = 0; h<NOMBRE_ITERATION_CLUSTERING; h++)
	{
		sum_1_x = 0;
		sum_1_y = 0;
		sum_2_x = 0;
		sum_2_y = 0;
		count_1 = 0;
		count_2 = 0;

		for(int i= filtered->height / 2;i< filtered->height;i++)
		{
			for(int j=0;j<filtered->width;j++)
			{
				if(filtered_data[i*filtered_step + j*filtered_channels]==255)
				{
					if(barycentre_1.x==-1)
					{
						barycentre_1.x = i;
						barycentre_1.y = j;
						sum_1_x = i;
						sum_1_y = j;
						count_1++;
					}
					else if(barycentre_2.x==-1)
					{
						barycentre_2.x = i;
						barycentre_2.y = j;
						sum_2_x = i;
						sum_2_y = j;
						count_2++;
					}
					else
					{
						// Il faut déterminer à quel groupe ce point appartient
						point.x=i;
						point.y=j;
						if(squareDistance(&barycentre_1, &point) < squareDistance( &barycentre_2, &point))
						{
							sum_1_x += i;
							sum_1_y += j;
							count_1++;
						}
						else
						{
							sum_2_x += i;
							sum_2_y += j;
							count_2++;
						}
					} // else
				} // si point bleu
			} // for j
		} // for i
		if(count_1!=0 && count_2!=0)
		{
			barycentre_1.x = (int)(sum_1_x/count_1);
			barycentre_1.y = (int)(sum_1_y/count_1);
			barycentre_2.x = (int)(sum_2_x/count_2);
			barycentre_2.y = (int)(sum_2_y/count_2);
	    if (SHOW_WINDOWS)
		    cvShowImage("Bleu",filtered);
		}
	}// for h

	// On ne garde maintenant que la couleur blanche
//	for (int i = 0; i < image->height / 2; i++)
	for (int i = image->height / 2; i < image->height; i++)
	{
		for (int j = 0; j < image->width; j++)
		{
			if (image_data[i * image_step + j * image_channels] + COLOR_TOLERANCE_WHITE >= 255
				&& image_data[i * image_step + j* image_channels + 1] + COLOR_TOLERANCE_WHITE >= 255
				&& image_data[i * image_step + j* image_channels + 2] + COLOR_TOLERANCE_WHITE >= 255 )
			// On met du vert a la place du blanc
			/*if (((image_data[i * image_step + j * image_channels + 1]) > (COLOR_TOLERANCE_GREEN + image_data[i
				    * image_step + j * image_channels + 0])) && ((image_data[i * image_step + j
					* image_channels + 1]) > (COLOR_TOLERANCE_GREEN + image_data[i * image_step + j
					* image_channels + 2])))*/
				filtered_data[i * filtered_step + j * filtered_channels] = 255;
			else
				filtered_data[i * filtered_step + j * filtered_channels] = 0;
		}
	}

	//cvErode(filtered,filtered,0,2);
	IplImage * affiche;
	  if (SHOW_WINDOWS){
		affiche = cvCreateImage(cvSize(image->width, image->height), 8, 1);
		cvCopy(filtered,affiche);
	 }

	sum_1_x = 0;
	sum_1_y = 0;
	count_1 = 0;
	sum_2_x = 0;
	sum_2_y = 0;
	count_2 = 0;
	for(int i=barycentre_1.x-TAILLE_RECTANGLE_KHEPERA;i<barycentre_1.x+TAILLE_RECTANGLE_KHEPERA;i++)
	{
		for(int j=barycentre_1.y-TAILLE_RECTANGLE_KHEPERA;j<barycentre_1.y+TAILLE_RECTANGLE_KHEPERA;j++)
		{
		   if (SHOW_WINDOWS && i >= 0 && i < image->height && j>=0 && j<image->width){
		    	CvPoint point;
		    	point.y=i;
		    	point.x=j;
		    	cvCircle(affiche,point ,5, cvScalar(125), 1, 8, 0 );
		    }
			if(i >= image->height / 2 && i < image->height && j>=0 && j<image->width && filtered_data[i * filtered_step + j * filtered_channels]==255)
			{
				sum_1_x += i;
				sum_1_y += j;
				count_1++;
			}
		}
	}
	if(count_1!=0)
	{
		bar_1_blanc.x = (int)(sum_1_x / count_1);
		bar_1_blanc.y = (int)(sum_1_y / count_1);
	}
	for(int i=barycentre_2.x-TAILLE_RECTANGLE_KHEPERA;i<barycentre_2.x+TAILLE_RECTANGLE_KHEPERA;i++)
	{
		for(int j=barycentre_2.y-TAILLE_RECTANGLE_KHEPERA;j<barycentre_2.y+TAILLE_RECTANGLE_KHEPERA;j++)
		{
			if(i >= image->height / 2 && i < image->height && j>=0 && j<image->width && filtered_data[i * filtered_step + j * filtered_channels]==255)
			{
				sum_2_x += i;
				sum_2_y += j;
				count_2++;
			}
		}
	}

	if(count_2!=0)
	{
		bar_2_blanc.x = (int)(sum_2_x / count_2);
		bar_2_blanc.y = (int)(sum_2_y / count_2);
	}


    Vec2d position;
    Vec2d direction;

    position[1] = (double)barycentre_1.x;
    position[0] = (double)barycentre_1.y;

    previousBaryCentre_1 = barycentre_1;

    direction[1] = (double)bar_1_blanc.x;
    direction[0] = (double)bar_1_blanc.y;

    previousBar_1_blanc = bar_1_blanc;

    Vec2d temp(
    		(position[0] + direction[0])/2,
    		(position[1] + direction[1])/2 );

    double size = norm(direction - position);
	direction[0] = (direction[0] - position[0]) / size;
	direction[1] = (direction[1] - position[1]) / size;

	position = temp;

	PosKhepera positionsRetour;
	positionsRetour.position1=position;
	positionsRetour.direction1=direction;

	position[1] = (double)barycentre_2.x;
	position[0] = (double)barycentre_2.y;

	previousBaryCentre_2 = barycentre_2;
	direction[1] = (double)bar_2_blanc.x;
	direction[0] = (double)bar_2_blanc.y;

	previousBar_2_blanc = bar_2_blanc;

    temp = Vec2d(
			(position[0] + direction[0])/2,
			(position[1] + direction[1])/2 );

	size = norm(direction - position);
	direction[0] = (direction[0] - position[0]) / size;
	direction[1] = (direction[1] - position[1]) / size;

	position = temp;

	positionsRetour.position2=position;
	positionsRetour.direction2=direction;


   if (SHOW_WINDOWS)
   {
	    cvShowImage("Blanc",affiche);
	    cvReleaseImage(&affiche);
    }
	cvReleaseImage(&filtered);

    return positionsRetour;
}
