#include "controler.h"

	controler::controler() {
		this->controlLoading = false;
		this->currentFrame = 0;
	}
	void controler::initBD(QString filename) {
		bdsequence = new sqlprocessor(filename);
	}
	void controler::extractData() {
		return bdsequence->extractionInformations();
	}
	QString controler::recupererInformations() {
		return bdsequence->extractionInformationsXML();
	}
	void controler::recupererInformationsBase() {
		nbplan = bdsequence->getNombrePlan();
		nbimage = bdsequence->getNombreImage();
		nbobjet = bdsequence->getNombreObjet();
		nbbriques = bdsequence->getNombreBriquesElementaires();
		nbbriquesreelles =  bdsequence->getNombreBrique();
	}
	int controler::getNBPlan() {
		return nbplan;
	}
	int controler::getNBImage() {
		return nbimage;
	}
	int controler::getNBObjet() {
		return nbobjet;
	}
	int controler::getNBBrique() {
		return nbbriques;
	}
	int controler::getNBBrik() {
		return nbbriquesreelles;
	}
	void controler::initialiazeVideoProcessing(QString filename){
		// video processing
		this->videoFilename = filename;
		this->stop = false;
		QByteArray encodedFileName = videoFilename.toUtf8();
		char * charFName = encodedFileName.data();
		this->videoCapture = cvCaptureFromAVI(charFName);
		this->controlLoading = true;
		this->videoFramerate = (int) cvGetCaptureProperty(videoCapture, CV_CAP_PROP_FPS );
		this->videoSize.height = (int) cvGetCaptureProperty(videoCapture, CV_CAP_PROP_FRAME_HEIGHT );
		this->videoSize.width = (int) cvGetCaptureProperty(videoCapture, CV_CAP_PROP_FRAME_WIDTH );
		fps = (int) cvGetCaptureProperty(videoCapture, CV_CAP_PROP_FPS );
		cvGrabFrame(videoCapture);
		cvSetCaptureProperty(videoCapture, CV_CAP_PROP_POS_AVI_RATIO, 1. );
		this->numberFrame = (int) cvGetCaptureProperty(videoCapture, CV_CAP_PROP_POS_FRAMES );
		cvSetCaptureProperty(videoCapture, CV_CAP_PROP_POS_FRAMES, 0. );
		IplImage *image;
		image=cvRetrieveFrame(videoCapture);
		this->depth=image->depth;
		this->channels=image->nChannels;
		this->formatVideo = (int) cvGetCaptureProperty(videoCapture, CV_CAP_PROP_FOURCC );
		//std::cout<<"----------------DEBUG-----------------"<<std::endl;
		//std::cout<<"VIDEO FORMAT "<<cvGetCaptureProperty(videoCapture, CV_CAP_PROP_FOURCC )<<std::endl;
		//std::cout<<"XVID "<<CV_FOURCC('X', 'V', 'I', 'D')<<std::endl;
		 /*
		   CV_FOURCC('P','I','M','1')    = MPEG-1 codec
		   CV_FOURCC('M','J','P','G')    = motion-jpeg codec (does not work well)
		   CV_FOURCC('M', 'P', '4', '2') = MPEG-4.2 codec
		   CV_FOURCC('D', 'I', 'V', '3') = MPEG-4.3 codec
		   CV_FOURCC('D', 'I', 'V', 'X') = MPEG-4 codec
		   CV_FOURCC('U', '2', '6', '3') = H263 codec
		   CV_FOURCC('I', '2', '6', '3') = H263I codec
		   CV_FOURCC('F', 'L', 'V', '1') = FLV1 codec
		   CV_FOURCC('X', 'V', 'I', 'D') = XVID codec
		 */


		IplImage *resize;
		resize = cvCreateImage(cvSize(200,200),image->depth,image->nChannels);
		cvResize(image,resize);
		this->pix1 = QPixmap::fromImage(IplToQImage(resize));
		this->initializeVideoProcessor();
		processor.initialize(videoSize);
		if (mode){
			// xml processing
			//resultat = new xmlprocessor("resultat.xml");
			//resultat->insertInformations();
			//params = new xmlprocessor("parametre.xml");
			std::ostringstream temp_oss;
			temp_oss<<"VIDEOBASE/";
			temp_oss<<indexSequence;
			temp_oss<<".avi";
			writer0 = cvCreateVideoWriter(temp_oss.str().data(),CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer1 = cvCreateVideoWriter("result/FO.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer2 = cvCreateVideoWriter("result/CMVT.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer3 = cvCreateVideoWriter("result/SIP.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer4 = cvCreateVideoWriter("result/SIPC.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer5 = cvCreateVideoWriter("result/STIP.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer6 = cvCreateVideoWriter("result/STIPC.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer7 = cvCreateVideoWriter("result/LINES.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer8 = cvCreateVideoWriter("result/CIRCLES.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer9 = cvCreateVideoWriter("result/FACEBODY.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer10 = cvCreateVideoWriter("result/MAPSIP.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
			writer11 = cvCreateVideoWriter("result/MAPSTIP.avi",CV_FOURCC('c', 'v', 'i', 'd'),25. ,videoSize);
		}
	}
	void controler::initialiazeVideoDatabase(QString filename){
		std::ostringstream oss;
		oss<<fps*numberFrame;
		if (processor.cameraMode==0)
			indexSequence = bdsequence->insertSequence("avi",QString(oss.str().data()),numberFrame,filename,QString("mobile"));
		else
			indexSequence = bdsequence->insertSequence("avi",QString(oss.str().data()),numberFrame,filename,QString("fixe"));

	}

	QPixmap controler::getPixmapVideo(int type){
		switch(type) {
			case 1: return this->pix1; break;
			case 2: return this->pix2; break;
			case 3: return this->pix3; break;
			case 4: return this->pix4; break;
			case 5: return this->pix5; break;
			case 6: return this->pix6; break;
			case 7: return this->pix7; break;
			case 8: return this->pix8; break;
			case 9: return this->pix9; break;
			//case 13: return this->preview; break;
			default: return this->pix1; break;
		}
	}
	int controler::getVideoFramerate(){
				return this->videoFramerate;
			}
		int controler::getFormatImageDepth(){
			return depth;
				}
		int controler::getFormatImageChannels(){
			return channels;
		}
		int controler::getFPS(){
			return fps;
		}
		int controler::getFormatVideo(){
					return this->formatVideo;
		}
		int controler::getCompressionVideo(){
					return this->compressionVideo;
		}
	double controler::getCurrentFramerate(){
		return this->currentFramerate;
	}
	int controler::getNumberFrame(){
		return this->numberFrame;
	}
	int controler::getCurrentFrame(){
		return this->currentFrame;
	}
	QString controler::getResolution(){
		return mytools.convertResolutionIntegerToQString(videoSize.height,videoSize.width);
	}
	void controler::preprocessingFrame(){
		processor.setCapture(videoCapture);
		processor.setCurrentFrame(currentFrame);
		processor.preprocessing();
	}
	void controler::processingFrame(){
			//this->initializeParameter();
			QueryPerformanceFrequency(&Frequency);
			QueryPerformanceCounter(&Before);
			if(!videoCapture)
			{
				qDebug()<<"Impossible to read the file";
			}
			if (currentFrame!=numberFrame-1){

				IplImage *resize;
				IplImage *resizeNB;
				IplImage *resizeNB8U;
				// Image capture
				processor.setCapture(videoCapture);
				processor.setCurrentFrame(currentFrame);
				processor.processing(mode);

				// PRE PROCESS
				resize = cvCreateImage(cvSize(200,200),processor.getOriginalImage()->depth,processor.getOriginalImage()->nChannels);
				resizeNB = cvCreateImage(cvSize(200,200),processor.getOriginalImage()->depth,1);
				resizeNB8U = cvCreateImage(cvSize(200,200),8,1);
				// 11 ORIGINAL
				cvResize(processor.getOriginalImage()/*getMask()*/,resize);//getOriginalImage(),resize/resizeNB/resizeNB8U);
				this->pix1 = QPixmap::fromImage(IplToQImage(resize));
				// 12 CMVT
				if (mode){
					cvResize(processor.getCompensationMouvement(),resize);
					this->pix2 = QPixmap::fromImage(IplToQImage(resize));
				// 13 HISTO
				cvResize(processor.getHistogramme(),resize);
				this->pix3 = QPixmap::fromImage(IplToQImage(resize));
				// 21 Retine
				cvResize(processor.getMeanImage(),resizeNB);
				this->pix4 = QPixmap::fromImage(IplToQImage(resizeNB));
				// 22 Masque
				//cvResize(processor.getObjectImage(),resizeNB8U);
				//this->pix5 = QPixmap::fromImage(IplToQImage(resizeNB8U));
				// 23 FO
				cvResize(processor.getOpticFlowImage(),resize);
				this->pix6 = QPixmap::fromImage(IplToQImage(resize));
				// 31 SIP
				cvResize(processor.getSIPIImage(),resizeNB);
				this->pix7 = QPixmap::fromImage(IplToQImage(resizeNB));
				// 32 STIP
				if (processor.getSTIPIImage()->nChannels==3){
					cvResize(processor.getSTIPIImage(),resize);
					this->pix8 = QPixmap::fromImage(IplToQImage(resize));
				} else {
					cvResize(processor.getSTIPIImage(),resizeNB);
					this->pix8 = QPixmap::fromImage(IplToQImage(resizeNB));
				}
				// 33 HOUGH
				cvResize(processor.getHoughLine(),resize);
				this->pix9 = QPixmap::fromImage(IplToQImage(resize));




				cvReleaseImage(&resize);
				cvReleaseImage(&resizeNB);
				cvReleaseImage(&resizeNB8U);

				// recorder
				cvWriteFrame(writer0,processor.getOriginalImage()); // original

				IplImage *temp1 = cvCloneImage(processor.getOpticFlowImage());
				cvConvertImage(processor.getOpticFlowImage(),temp1, CV_CVTIMG_FLIP);
				cvWriteFrame(writer1,temp1); //FO
				cvReleaseImage(&temp1);

				IplImage *temp2 = cvCloneImage(processor.getCompensationMouvement()); //CMVT
				cvWriteFrame(writer2,temp2); //CMVT
				cvReleaseImage(&temp2);

				IplImage *temp3 = cvCloneImage(processor.getSIPIImage());
				//cvConvertImage(this->,temp3, CV_CVTIMG_FLIP);
				//cvWriteFrame(writer3,temp3); //SIP
				cvReleaseImage(&temp3);

				//cvWriteFrame(writer4,tempmouvement);
				//cvReleaseImage(&tempmouvement);

				IplImage *temp5 = cvCloneImage(processor.getSTIPIImage());
				cvWriteFrame(writer5,temp5); //STIP
				cvReleaseImage(&temp5);

				//cvWriteFrame(writer6,processor.getSegmentImage()); //Retina

				IplImage *temp7 = cvCloneImage(processor.getHoughLine());;
				cvConvertImage(processor.getHoughLine(),temp7, CV_CVTIMG_FLIP);
				cvWriteFrame(writer7,temp7); //LINES
				cvReleaseImage(&temp7);

				cvWriteFrame(writer8,processor.getHoughCircle()); // CIRCLES

				//cvWriteFrame(writer9,processor.getFaceBodyDetection()); // DETECTION

				cvWriteFrame(writer10,processor.getSTIPIImage());//processor.getHistogramme());

				IplImage *temp11 = cvCloneImage(processor.getSTIPMAPImage());
				//cvConvertImage(processor.getSTIPMAPImage(),temp11, CV_CVTIMG_FLIP);
				cvWriteFrame(writer11,temp11); //MAPSTIP
				cvReleaseImage(&temp11);

				//////////
				// Pre processing of data
				objetNumber=processor.getNombreDeBox();//processor.getFOAverageSpeed(),//double fo_intensity,
				sip_number_points=processor.getSIPNumberPoints();//double stip_nbpoints,
				stip_number_points=processor.getSTIPNumberPoints();//double stip_nbpoints,
				hough_lines=processor.getHoughLineNumber();//double hough_nblines,
				hough_circles=processor.getHoughCircleNumber();//double hough_nbcircles,
				//hough_focalisation=processor.getHoughFocalisation();
				hough_orientation=processor.getHoughOrientation();
				if (processor.cameraMode==0){
					cmvt_orientation=processor.getCMVTOrientation();//double cmvt_orientation,
					cmvt_percentage=processor.getCMVTPercentage();//double cmvt_sens,
					cmvt_intensity=processor.getCMVTIntensity();//double cmvt_intensity,
				}
				color_dominant1=processor.getColorDominant1();//double color_dominant1,
				color_dominant2=processor.getColorDominant2();//double color_dominant2,
				color_luminance=processor.getColorLuminance();//double color_luminance)
				// sensmvt
				// TODO sensmvt
				QString sensmvtH, sensmvtV;

				// foorientation
				// TODO calcul de la foorientation
				//int foorientation = 0;


				/*std::cout<<"----------------DEBUG-----------------"<<std::endl;
				std::cout<<"FRAME "<<currentFrame<<std::endl;
				std::cout<<"nb object  : "<<objetNumber+1<<std::endl;
				std::cout<<"nb sip     : "<<sip_number_points<<std::endl;
				std::cout<<"nb stip    : "<<stip_number_points<<std::endl;
				std::cout<<"hough line : "<<hough_lines<<std::endl;
				std::cout<<"hough circ : "<<hough_circles<<std::endl;
				std::cout<<"hough orie : "<<hough_orientation<<std::endl;
				std::cout<<"cmvt orien : "<<cmvt_orientation<<std::endl;
				std::cout<<"cmvt perce : "<<cmvt_percentage<<std::endl;
				std::cout<<"cmvt inten : "<<cmvt_intensity<<std::endl;
				std::cout<<"color dom1 : "<<color_dominant1<<std::endl;
				std::cout<<"color dom2 : "<<color_dominant2<<std::endl;
				std::cout<<"color lumi : "<<color_luminance<<std::endl;*/

//				resultat->insertImages(indexSequence,currentFrame,
//						  processor.getNombreDeBox(),//+1  processor.getFOAverageSpeed(),//double fo_intensity,
//						  processor.getSIPNumberPoints(),//double stip_nbpoints,
//						  processor.getSTIPNumberPoints(),//double stip_nbpoints,
//						  processor.getHoughLineNumber(),//double hough_nblines,
//						  processor.getHoughCircleNumber(),//double hough_nbcircles,
//						  //processor.getHoughFocalisation(),
//						  processor.getHoughOrientation(),
//						  processor.getCMVTOrientation(),//double cmvt_orientation,
//						  processor.getCMVTPercentage(),//double cmvt_sens,
//						  processor.getCMVTIntensity(),//double cmvt_intensity,
//						  processor.getColorDominant1(),//double color_dominant1,
//						  processor.getColorDominant2(),//double color_dominant2,
//						  processor.getColorDominant3(),//double color_dominant3,
//						  processor.getColorLuminance());//double color_luminance)
//
				bdsequence->insertImages(indexSequence,currentFrame,
								 processor.getNombreDeBox(),//+1  processor.getFOAverageSpeed(),//double fo_intensity,
								 processor.getSIPNumberPoints(),//double stip_nbpoints,
								 processor.getSTIPNumberPoints(),//double stip_nbpoints,
								 processor.getHoughLineNumber(),//double hough_nblines,
								 processor.getHoughCircleNumber(),//double hough_nbcircles,
							     //processor.getHoughFocalisation(),
								 processor.getHoughOrientation(),
								 processor.getCMVTOrientation(),//double cmvt_orientation,
								 processor.getCMVTPercentage(),//double cmvt_sens,
								 processor.getCMVTIntensity(),//double cmvt_intensity,
								 processor.getColorDominant1(),//double color_dominant1,
								 processor.getColorDominant2(),//double color_dominant2,
								 processor.getColorDominant3(),//double color_dominant3,
								 processor.getColorLuminance(),
								 sensmvtV,
								 sensmvtH);//double color_luminance)
				}
				if (processor.getNombreDeBox()!=0){
					for(int compteurObjet=0;compteurObjet<processor.getNombreDeBox();compteurObjet++){
						bdsequence->insertObjectInformation(bdsequence->countInformations(),compteurObjet+1,
							  processor.getListeBoundingBox()[compteurObjet*2].x,
							  processor.getListeBoundingBox()[compteurObjet*2].y,
							  processor.getListeBoundingBox()[compteurObjet*2+1].x,
							  processor.getListeBoundingBox()[compteurObjet*2+1].y,
							  processor.getSIPNumberPointsByObject()[compteurObjet*4],//sip1 processor.getSIPNumberPoints()
							  processor.getSIPNumberPointsByObject()[compteurObjet*4+1],//sip2
							  processor.getSIPNumberPointsByObject()[compteurObjet*4+2],//sip3
							  processor.getSIPNumberPointsByObject()[compteurObjet*4+3],//sip4
                              processor.getSTIPNumberPointsByObject()[compteurObjet*4],//stip1 processor.getSTIPNumberPoints()
                              processor.getSTIPNumberPointsByObject()[compteurObjet*4+1],//stip2
                              processor.getSTIPNumberPointsByObject()[compteurObjet*4+2],//stip3
                              processor.getSTIPNumberPointsByObject()[compteurObjet*4+3],//stip4
                              processor.getFOIntensityByObject()[compteurObjet*4],//foi1
                              processor.getFOIntensityByObject()[compteurObjet*4+1],//foi2
                              processor.getFOIntensityByObject()[compteurObjet*4+2],//foi3
                              processor.getFOIntensityByObject()[compteurObjet*4+3],//foi4
                              processor.getFOOrientationByObject()[compteurObjet*4],//foo1
                              processor.getFOOrientationByObject()[compteurObjet*4+1],//foo2
                              processor.getFOOrientationByObject()[compteurObjet*4+2],//foo3
                              processor.getFOOrientationByObject()[compteurObjet*4+3],//foo4
                              processor.getCompacityByObject()[compteurObjet],
                              processor.getOrientation()[compteurObjet],
                              (processor.getListeBoundingBox()[compteurObjet*2].x+processor.getListeBoundingBox()[compteurObjet*2+1].x)/2,
                              (processor.getListeBoundingBox()[compteurObjet*2].y+processor.getListeBoundingBox()[compteurObjet*2+1].y)/2,
                              0
							  );
//						resultat->insertObjectInformation(bdsequence->countInformations(),compteurObjet+1,
//								  processor.getListeBoundingBox()[compteurObjet*2].x,
//								  processor.getListeBoundingBox()[compteurObjet*2].y,
//								  processor.getListeBoundingBox()[compteurObjet*2+1].x,
//								  processor.getListeBoundingBox()[compteurObjet*2+1].y,
//								  processor.getSIPNumberPointsByObject()[compteurObjet*4],//sip1 processor.getSIPNumberPoints()
//								  processor.getSIPNumberPointsByObject()[compteurObjet*4+1],//sip2
//								  processor.getSIPNumberPointsByObject()[compteurObjet*4+2],//sip3
//								  processor.getSIPNumberPointsByObject()[compteurObjet*4+3],//sip4
//	                              processor.getSTIPNumberPointsByObject()[compteurObjet*4],//stip1 processor.getSTIPNumberPoints()
//	                              processor.getSTIPNumberPointsByObject()[compteurObjet*4+1],//stip2
//	                              processor.getSTIPNumberPointsByObject()[compteurObjet*4+2],//stip3
//	                              processor.getSTIPNumberPointsByObject()[compteurObjet*4+3],//stip4
//	                              processor.getFOIntensityByObject()[compteurObjet*4],//foi1
//	                              processor.getFOIntensityByObject()[compteurObjet*4+1],//foi2
//	                              processor.getFOIntensityByObject()[compteurObjet*4+2],//foi3
//	                              processor.getFOIntensityByObject()[compteurObjet*4+3],//foi4
//	                              processor.getFOOrientationByObject()[compteurObjet*4],//foo1
//	                              processor.getFOOrientationByObject()[compteurObjet*4+1],//foo2
//	                              processor.getFOOrientationByObject()[compteurObjet*4+2],//foo3
//	                              processor.getFOOrientationByObject()[compteurObjet*4+3],//foo4
//	                              processor.getCompacityByObject()[compteurObjet],
//	                              processor.getOrientation()[compteurObjet]);

					}
				}

				currentFrame=currentFrame+1;

			} else {
				stop = true;
				if (mode){
//				resultat->closeXML();
//				params->insertParameter(
//					processor.SIP_SIGMA_SPATIAL,
//					processor.SIP_THRESHOLD,
//					processor.OF_NB_FEATURES,
//					processor.OF_MINIMUM_QUALITY,
//					processor.OF_MINIMUM_DISTANCE,
//					processor.OF_WINDOW_SIZE,
//					processor.OF_NB_PYRAMID,
//					processor.LINE_RHO,
//					processor.LINE_THETA,
//					processor.LINE_THRESHOLD,
//					processor.CIRCLE_DP,
//					processor.CIRCLE_RADIUS_MIN,
//					processor.CIRCLE_RADIUS_MAX);
//				params->closeXML();
				cvReleaseVideoWriter( &writer0 );
				cvReleaseVideoWriter( &writer1 );
				cvReleaseVideoWriter( &writer2 );
				cvReleaseVideoWriter( &writer3 );
				cvReleaseVideoWriter( &writer4 );
				cvReleaseVideoWriter( &writer5 );
				cvReleaseVideoWriter( &writer6 );
				cvReleaseVideoWriter( &writer7 );
				cvReleaseVideoWriter( &writer8 );
				cvReleaseVideoWriter( &writer9 );
				cvReleaseVideoWriter( &writer10 );
				cvReleaseVideoWriter( &writer11 );
				}
			}
			// update informations
		    cvSetCaptureProperty(videoCapture, CV_CAP_PROP_POS_FRAMES, currentFrame );
			QueryPerformanceCounter(&After);
			time = 1000 * (After.QuadPart - Before.QuadPart) / Frequency.QuadPart;
			currentFramerate = 1000/time;
		}


	QImage controler::IplToQImage(const IplImage *newImage)
	{
		QImage qtemp;
		if(newImage && newImage->width>0)
		{
			int x;
			int y;
			char* data = newImage->imageData;
			qtemp= QImage(newImage->width, newImage->height,QImage::Format_RGB32 );

			for( y = 0; y < newImage->height; y++, data +=newImage->widthStep )
				for( x = 0; x < newImage->width; x++)
				{
					uint *p = (uint*)qtemp.scanLine(newImage->height-y-1) +x;
					*p = qRgb(data[x * newImage->nChannels+2],
							data[x * newImage->nChannels+1],
							data[x * newImage->nChannels]);
				}
		}
		return qtemp;
	}

	bool controler::getStop(){
		return this->stop;
	}
	void controler::initStop(){
		this->stop=false;
	}

	void controler::zeroCurrentFrame(){
			this->currentFrame=0;
	}

	void controler::initializeVideoProcessor(){
		processor = videoprocessor();
		processor.SIP_SIGMA_SPATIAL=1.5;
		processor.SIP_THRESHOLD=150;
		processor.OF_NB_FEATURES=400;
		processor.OF_MINIMUM_QUALITY=0.01;
		processor.OF_MINIMUM_DISTANCE=0.01;
		processor.OF_WINDOW_SIZE=3;
		processor.OF_NB_PYRAMID=5;
		processor.LINE_RHO=1;
		processor.LINE_THETA=CV_PI/180;
		processor.LINE_THRESHOLD=100;
		processor.CIRCLE_DP=2;
		processor.CIRCLE_RADIUS_MIN=100;
		processor.CIRCLE_RADIUS_MAX=200;
		processor.FILTRAGE=15;
		processor.SIZEMIN=10;
	}
	double controler::get_nb_objects(){
		return objetNumber;
	}
	double controler::get_sip_number_points(){
		return sip_number_points;
	}
	double controler::get_stip_number_points(){
		return stip_number_points;
	}
	double controler::get_hough_lines(){
		return hough_lines;
	}
	double controler::get_hough_circles(){
		return hough_circles;
	}
	double controler::get_hough_orientation(){
		return hough_orientation;
	}
	//double controler::get_hough_focalisation(){
	//	return hough_focalisation;
	//}
	double controler::get_cmvt_intensity(){
		return cmvt_intensity;
	}
	double controler::get_cmvt_orientation(){
		return cmvt_orientation;
	}
	double controler::get_cmvt_percentage(){
		return cmvt_percentage;
	}
	double controler::get_color_luminance(){
		return color_luminance;
	}
	double controler::get_color_dominant1(){
		return color_dominant1;
	}
	double controler::get_color_dominant2(){
		return color_dominant2;
	}
	bool controler::getSTIPCalculating(){
		return this->processor.getSTIPCalculating();
	}
	int controler::countSequences(){
		return bdsequence->countSequences();
	}
	int controler::set_Mode(bool value){
		return mode=value;
	}

	void controler::eraseAllBD(){
		bdsequence->eraseAll();
	}
	void controler::eraseLastBD(){
		bdsequence->eraseLast();
	}
	QString controler::frameNameAvecIndice(int indice){
		std::string temp_s;
		std::ostringstream temp_oss;
		temp_oss<<".\\support\\";
		if (indice<10){
			temp_oss<<"00000";
			temp_oss<<indice;
			temp_oss<<".png";
		} else if (indice <100){
			temp_oss<<"0000";
			temp_oss<<indice;
			temp_oss<<".png";
		} else if (indice < 1000){
			temp_oss<<"000";
			temp_oss<<indice;
			temp_oss<<".png";
		} else if (indice < 10000){
			temp_oss<<"00";
			temp_oss<<indice;
			temp_oss<<".png";
		} else if (indice < 100000){
			temp_oss<<"0";
			temp_oss<<indice;
			temp_oss<<".png";
		} else {
			temp_oss<<indice;
			temp_oss<<".png";
		}
		return temp_oss.str().data();
	}
