#include "vcCodec.h"
#include <highgui.h>
#include <iostream>
#include <stdio.h>
#include "nxColor.h"
#include <math.h>
#include <sys/timeb.h>
#include "Configuration/Robot.h"


#ifndef PI
#define PI 3.141592653
#endif

using namespace std;

vcCodec::vcCodec()
{ 
	mVision = new CMVision();
	//allocate memory for points
	imagePoints = (CvPoint*)malloc( 4 * sizeof(imagePoints[0]));
	realPoints = (CvPoint2D32f*)malloc( 4 * sizeof(realPoints[0]));
	erasePoints();
	
	
} 

bool vcCodec::getPoint(int pointIndex, CvPoint* imagePoint, CvPoint2D32f* realPoint)
{
	if(pointIndex>3 || pointIndex<0)
		return false;

	*imagePoint = imagePoints[pointIndex];
	*realPoint = realPoints[pointIndex];

	return true;
}
CvPoint* vcCodec::getImagePoints()
{
	return imagePoints;
}

CvPoint2D32f* vcCodec::getRealPoints()
{
	return realPoints;
}


bool vcCodec::setPoint(int pointIndex,CvPoint imagePoint,CvPoint2D32f realPoint)
{
	if(pointIndex>3 || pointIndex<0)
		return false;

	imagePoints[pointIndex] = imagePoint;
	realPoints[pointIndex] = realPoint;

	return true;
}
void vcCodec::erasePoints()
{
	imagePoints[0] = cvPoint(0,0);
	imagePoints[1] = cvPoint(0,0);
	imagePoints[2] = cvPoint(0,0);
	imagePoints[3] = cvPoint(0,0);
	
	realPoints[0] = cvPoint2D32f(0,0);
	realPoints[1] = cvPoint2D32f(0,0);
	realPoints[2] = cvPoint2D32f(0,0);
	realPoints[3] = cvPoint2D32f(0,0);
	
}

//basado de "DESARROLLO DE UN SISTEMA SEÑALADOR ARA PRESENTACIONES" D. David Millan Escriva
CvMat* vcCodec::getProjectionMatrix()
{
	/*Using proyective Geometry. We have to find the matrix which allows us to 
	 * convert points from imagepoints to real points. 
	 * using the previously defined four points are just enough to calculate the matrix.
	 * We use........ TODO: Comentar y dar un link, para entender esta parte
	 * 
	*/
	//Matrix U
	
	//array used for matrix U
	double uM[8] = {realPoints[0].x,realPoints[0].y,realPoints[1].x,realPoints[1].y,
					realPoints[2].x,realPoints[2].y,realPoints[3].x,realPoints[3].y};
	
	//array used for matrix A				
	double aM[] = { imagePoints[0].x, imagePoints[0].y, 1, 0, 0, 0, -realPoints[0].x*imagePoints[0].x,-realPoints[0].x*imagePoints[0].y,
					0, 0, 0, imagePoints[0].x, imagePoints[0].y, 1, -realPoints[0].y*imagePoints[0].x,-realPoints[0].y*imagePoints[0].y,
					imagePoints[1].x, imagePoints[1].y, 1, 0, 0, 0, -realPoints[1].x*imagePoints[1].x,-realPoints[1].x*imagePoints[1].y,
					0, 0, 0, imagePoints[1].x, imagePoints[1].y, 1, -realPoints[1].y*imagePoints[1].x,-realPoints[1].y*imagePoints[1].y,
					imagePoints[2].x, imagePoints[2].y, 1, 0, 0, 0, -realPoints[2].x*imagePoints[2].x,-realPoints[2].x*imagePoints[2].y,
					0, 0, 0, imagePoints[2].x, imagePoints[2].y, 1, -realPoints[2].y*imagePoints[2].x,-realPoints[2].y*imagePoints[2].y,
					imagePoints[3].x, imagePoints[3].y, 1, 0, 0, 0, -realPoints[3].x*imagePoints[3].x,-realPoints[3].x*imagePoints[3].y,
					0, 0, 0, imagePoints[3].x, imagePoints[3].y, 1, -realPoints[3].y*imagePoints[3].x,-realPoints[3].y*imagePoints[3].y};

	

	for(int i=0;i<8;i++)
	{
		for(int j=0;j<8;j++)
		{
			printf("%f ",aM[i*8+j]);
		}
		printf("\n");
	}
	//matrices used with opencv to easily define operations between them
	CvMat MU,MA;
	
	
	cvInitMatHeader( &MA, 8, 8, CV_64FC1, aM );
	cvInitMatHeader( &MU, 8, 1, CV_64FC1, uM );
	
	//we have to invert matrix MA
	CvMat MAinv;
	double aInv[64];
	cvInitMatHeader( &MAinv, 8, 8, CV_64FC1, aInv );
	cvmInvert(&MA,&MAinv);
	
	
	//obtenemos la matriz h
	CvMat Mh;
	double h[8];
	
	
	cvInitMatHeader( &Mh, 8, 1, CV_64FC1, h );
	cvmMul(&MAinv,&MU,&Mh);
	//Creamos la matriz H;
	for(int i=0;i<8;i++)
	            hM[i]=h[i];
	hM[8]=1.0;
	
	for(int i=0;i<3;i++)
	{
		for(int j=0;j<3;j++)
		{
			printf("%f ",hM[i*3+j]);
		}
		printf("\n");
	}
	
	//Almacenamos la matriz H
	cvInitMatHeader(&transMatrix,3,3,CV_64FC1,hM);
	
	//Calculate invert matrix
	//cvmInvert(&transMatrix,&invTransMatrix);
	
	return &transMatrix;
}

double* vcCodec::getMatrixProyArray()
{
	return hM;

}

//tomado de "DESARROLLO DE UN SISTEMA SEÑALADOR ARA PRESENTACIONES" D. David Millan Escriva
CvPoint2D32f vcCodec::TransformPoint(CvPoint point)
{
	CvPoint2D32f aux;
	CvMat Maux,Maux1;

	
	//Creamos los arrays necesarios para almacenar los puntos
	double mp[]={point.x,point.y,1};//Punto dado
	double mp1[3];//punto resultado de la equación p'=H·p
	//Inicializamos las estructuras de matrices de openCV
	cvInitMatHeader( &Maux, 3, 1, CV_64FC1, mp);
	cvInitMatHeader( &Maux1, 3, 1, CV_64FC1, mp1);
	//Realizamos la operación MH·Mp=Mp'
	//cvmMul(&transMatrix,&Maux,&Maux1);
	cvMatMulAdd( &transMatrix, &Maux, 0, &Maux1 );
	//El resultado devuelto será una matriz p1 de 3 filas y una columna, de donde el punto equvalente x'=p1[0]/p1[2] y'=p1[1]/p1[2]
	//p1[2] es el factor escala o como se definia en la funcion lambda
	aux.x=mp1[0]/mp1[2];
	aux.y=(mp1[1]/mp1[2]);
	//Devolvemos el punto obtenido
	return (aux);
}
	



vcCodec::~vcCodec()
{ 
	delete mVision;
}
 
IplImage* vcCodec::selectImgArea(IplImage *cvImg,CvPoint init,CvPoint final)
{

	if(cvImg)
	{
		//cout<<init.x<<"-"<<init.y<<" "<<final.x<<"-"<<final.y<<endl;
		//IplImage* img = cvCloneImage(cvImg);
		if(cvImg->width>=final.x && cvImg->height>=final.y && init.x!=final.x && init.y!=final.y)
		{
			//Get former ROI of the image, for consecutive zoom in
			CvRect initPointRect = cvGetImageROI(cvImg);
			
			//Select Region of interest in IplImage
			cvSetImageROI( cvImg , cvRect(init.x+initPointRect.x,init.y+initPointRect.y,(int)(final.x-init.x),(int)(final.y-init.y)) );
	
			return cvImg;
		}
		else
			cout<<"vcCodec.pp: Error on selected area, bigger than image!"<<endl;
			
		
	}
	return NULL;
}







wxBitmap vcCodec::cvImg2wxBitmap(IplImage *cvImg, int oWidth, int oHeight)
{
	
	
	if(cvImg){
		//IplImage* RGBImg = cvCloneImage(cvImg);
		IplImage* RGBImg = cvImg;
		
		//Size of image ROI (REgion of interest, in case zoom is being used)
		
		CvSize size; 
		int step;
		
		
		//get rawdata info of the image
		unsigned char *rawData;
		cvGetRawData( RGBImg, &rawData,&step,&size);

		//number of channels of image
		int channels  = RGBImg->nChannels;
		wxImage wxImg;
		
		
		//cvResetImageROI(RGBImg);

		//TODO: esta funcion no debe hacer esto, se debe hallar otra forma, un copy de la imagen tal vez?
		
		//Organize Raw Data for correct conversion in wxImage
		unsigned char data[size.height*size.width*channels];
		
	
		for(int i=0;i<size.height;i++)
		{
			for(int j=0;j<size.width*channels;j++)
			{
				
				data[i*size.width*channels +j]=rawData[i*RGBImg->width*channels+j];
	
			}
			
		}
			
	
		wxImg = wxImage( size.width, size.height, data,TRUE );
	
				
		
			// convert to bitmap to be used by the window to draw
		if(oWidth!=-1 && oHeight!=-1)
			return wxBitmap( wxImg.Scale(oWidth,oHeight));
		else
			return wxBitmap((char*)data,size.width, size.height);
			//return wxBitmap(wxImg.Scale(oWidth,oHeight),1);
			//return NULL;
		
	}
	else{
		//No image loaded
		return NULL;
	} 
}
double vcCodec::getElapsedSeconds()
{
	// Compute the elapsed time in seconds since the previous 
	// time this function was called.
 
	static timeb timeStamp;
	ftime(&timeStamp);
	double currentTime = timeStamp.time + timeStamp.millitm * 0.001;
	static double lastTime = currentTime;
	double elapsedSeconds = currentTime - lastTime;
	lastTime = currentTime;
 
	return elapsedSeconds;
}

double vcCodec::calculateFps()
{
	static int frameCounter = 0;
	static double timeCounter = 0;
	double fps=-1;
	double dt = getElapsedSeconds();
	++frameCounter;
	timeCounter += dt;
	if (timeCounter > 1)
	{
		fps = frameCounter / timeCounter;
		
		frameCounter = 0;
		timeCounter -= 1;
		
		
		
	}
	return fps;
}

bool vcCodec::CMvisionInitialize(int width, int height)
{
	//mVision->enable(CMV_DENSITY_MERGE);
	return mVision->initialize(width,height);

}

void vcCodec::loadCMVisionColors(nxColor* colorList[],int colorListIndex)
{
	CMVision::color_info colorX;
	nxColor *color;
	
	
	for(int i = 0; i<colorListIndex;i++)
	{
		color = colorList[i];
		
		rgb colorRGB;
		colorRGB.red = (unsigned char) color->getMeanValue(RGB_CODE).val[0];
		colorRGB.green = (unsigned char) color->getMeanValue(RGB_CODE).val[1];
		colorRGB.blue = (unsigned char) color->getMeanValue(RGB_CODE).val[2];
		
		printf("R:%d G:%d B:%d\n", colorRGB.red, colorRGB.green, colorRGB.blue);

		colorX.color = colorRGB;
		colorX.name = color->getName();
		colorX.merge = 0;
		colorX.expected_num = 1;
		
		colorX.y_low = (int) color->getLowUmbral(HSV_CODE).val[0];
		colorX.u_low = (int) color->getLowUmbral(HSV_CODE).val[1];
		colorX.v_low = (int) color->getLowUmbral(HSV_CODE).val[2];
		

		colorX.y_high = (int) color->getHighUmbral(HSV_CODE).val[0];
		colorX.u_high = (int) color->getHighUmbral(HSV_CODE).val[1];
		colorX.v_high = (int) color->getHighUmbral(HSV_CODE).val[2];
		
		mVision->setThreshold(i, colorX.y_low,colorX.y_high, colorX.u_low,colorX.u_high, colorX.v_low,colorX.v_high);
		mVision->setColorInfo(i,colorX);
		
	}

}

bool vcCodec::saveCMVisionConf(char *filename)
{
	return mVision->saveOptions(filename);
}
 
void vcCodec::identifyRobots(IplImage* imgDst, RobotIdentification* rid,bool isProyected, bool lensCorrEn)
{
	//get total of robots to be identified
	int numRobots = rid->getRobotsNumber();
	
	//radius and color of center of robot
	int radius = 2;
	CvScalar color = cvScalarAll(40);
	
	 
	
	//font for name of robot
	CvFont laFont;
	cvInitFont(&laFont,CV_FONT_HERSHEY_SIMPLEX,0.3f,0.3f);
	
	for(int i=0;i<numRobots;i++)
	{
		//get identified robot
		Robot* robot = rid->getRobot(i);
		
		
		//if it was successfully identified, draw center in image
		if(robot!=NULL)
		{
			//get basecolor pos
			CvPoint baseColorPos = robot->getBaseColorPos();
			//get robotcolor pos
			CvPoint robotColorPos = robot->getRobotColorPos();
			
			//get center of robot
			CvPoint center = robot->getCenter();
			
			
				
			//get robotName
			char* name = robot->getName();
			
			//get RobotAngle
			double angle = robot->getOrientation(ANGLE_DEGREES);
			char* text = new char[50];
			
			
			
			if(lensCorrEn)
			{
				
				CvPoint corrCenter;
				correctLensDist(center,&corrCenter);
				
				CvPoint baseColor;
				CvPoint robotColor;
				
				correctLensDist(baseColorPos,&baseColor);
				correctLensDist(robotColorPos,&robotColor);
				
				//robot->setBaseColorPos(baseColor);
				//robot->setRobotColorPos(robotColor);
				
				//Convert center to coordinates of plane
				
				if(isProyected)
				{
					CvPoint2D32f baseColorCenter;
					CvPoint2D32f robotColorCenter;
					CvPoint2D32f centerR;
					
					float angle;
					
					baseColorCenter = TransformPoint(baseColor);
					robotColorCenter = TransformPoint(robotColor);
					
					centerR = cvPoint2D32f((baseColorCenter.x +robotColorCenter.x) / 2,(baseColorCenter.y +robotColorCenter.y) / 2);
					angle = atan2((baseColorCenter.y - robotColorCenter.y) , (baseColorCenter.x - robotColorCenter.x));
					
					sprintf(text,"%s-(%4.1f, %4.1f)-a:%4.1f",name,centerR.x,centerR.y,angle);
				}
				else
					sprintf(text,"%s-(%d, %d)-a:%4.1f",name,corrCenter.x,corrCenter.y,angle);
				
				cvPutText(imgDst,text,corrCenter,&laFont,cvScalarAll(255));
				cvCircle(imgDst, corrCenter,radius,color,CV_FILLED);
				cvLine(imgDst,baseColor,robotColor,cvScalarAll(100)); 
			}
			else
			{
				if(isProyected)
				{
					CvPoint2D32f centerR = TransformPoint(center);
					CvPoint2D32f baseColorCenter = TransformPoint(baseColorPos);
					CvPoint2D32f robotColorCenter = TransformPoint(robotColorPos);
					
					//FIXED: Estaba mostrando el angulo sin correccion
					//float angle = robot->getOrientation(ANGLE_DEGREES);
					
					float angle;

					//ahora el angulo si esta corregido
					angle = atan2((baseColorCenter.y - robotColorCenter.y) , (baseColorCenter.x - robotColorCenter.x));

					angle = 180 * angle / PI + 45;

					sprintf(text,"%s-(%4.1f, %4.1f)-a:%4.1f",name,centerR.x,centerR.y,angle);
				}
				else
					sprintf(text,"%s-(%d, %d)-a:%4.1f",name,center.x,center.y,angle);
				
				cvPutText(imgDst,text,center,&laFont,cvScalarAll(255));
				cvCircle(imgDst, center,radius,color,CV_FILLED);
				cvLine(imgDst,baseColorPos,robotColorPos,cvScalarAll(100)); 
			}

			
			
			
		}
	}

}


void vcCodec::identifyColors(CMVision* mVision, IplImage* imgDst, int colorsNumber, int minArea, int maxArea, bool lensCorrEn)
{
	//position of blob centers
	float x;
	float y;

	CvScalar colorError = cvScalarAll(180);
	//font for numbers of blobs
	CvFont laFont;
	cvInitFont(&laFont,CV_FONT_HERSHEY_SIMPLEX,0.3f,0.3f);
	
	//search for all colors		
	for (int ch = 0; ch < colorsNumber; ++ch)
	{
		// Get the descriptive color
		rgb c = mVision->getColorVisual(ch);
		char* text = new char[5];
		
		
		
		// Grab the regions for this color
		CMVision::region* r = NULL;
		
		//index to numerate the blobs
		int ix=0;
		//get all regions of this color
		for (r = mVision->getRegions(ch); r != NULL; r = r->next)
		{

			sprintf(text,"%d",ix);
			// get the area first
			int area   = (r->area);
			
			if(area>=minArea && area<=maxArea)
			{
				//center of blob
				x      = r->cen_x;
				y      = r->cen_y;
				
				
				//bounding box of blob     
				int left   = (r->x1);
				int right  = (r->x2);
				int top    = (r->y1);
				int bottom = (r->y2);
				
				if(lensCorrEn)
				{
				//point with correction of lens distortion    
					CvPoint corrCenter = cvPoint((int)x,(int)y);
					correctLensDist(corrCenter,&corrCenter);
					
					CvPoint corrUpper= cvPoint(left,top);
					CvPoint corrLower = cvPoint(right,bottom);
					
					correctLensDist(corrUpper,&corrUpper);
					correctLensDist(corrLower,&corrLower);
					
					cvRectangle( imgDst, corrUpper, corrLower, cvScalar(c.red,c.green,c.blue),CV_FILLED);
					cvCircle(imgDst, corrCenter,1,cvScalarAll(0),CV_FILLED);
					cvPutText(imgDst,text,corrCenter,&laFont,cvScalarAll(255));
				}
				else
				{								
					cvRectangle( imgDst, cvPoint(left,top), cvPoint(right,bottom), cvScalar(c.red,c.green,c.blue),CV_FILLED);
					cvCircle(imgDst, cvPoint((int)x,(int)y),1,cvScalarAll(0),CV_FILLED);
					cvPutText(imgDst,text,cvPoint((int)x,(int)y),&laFont,cvScalarAll(255));
				}
				
			}
			ix++;

		}
	}	
}

bool vcCodec::correctLensDist(CvPoint pointSrc,CvPoint *pointDst)
{
	float* distParams;
	float* intrinsicParams;
	
	distParams = getDistParams();
	intrinsicParams = getIntrinsicParams();
	
	
	if(distParams!=NULL && intrinsicParams!=NULL)
	{
		
		//get center of image
		float u0 = intrinsicParams[2];
		float v0 = intrinsicParams[5];
		//get f/dx and f/dy
		float fx = intrinsicParams[0];
		float fy = intrinsicParams[4];
		
		
		float x = (pointSrc.x-u0);
		float y = (pointSrc.y-v0);
		
		//now we have the coordinates related to the lens
		x/=fx;
		y/=fy;
		
		float r = x * x + y * y;  

		float radialX = x/(1 + distParams[0]*r + distParams[1]*r*r);
		
		pointDst->x = (int)(radialX*fx + u0);
		
		float radialY = y/(1 + distParams[0]*r + distParams[1]*r*r);
		
		pointDst->y = (int)(radialY*fy + v0);
		
		return true;
	}
	else
		return false;
	
}

void vcCodec::setCalibParams()
{
	bool state = mCalibFilter->LoadCameraParams("calibrationcv.ini");
	
	cameraParams = (CvCamera *) mCalibFilter->GetCameraParams(0);
		if(cameraParams != 0){
			printf("Intrinsic %i:\n", 0);
			for(int i=0; i<3; ++i) {
				for(int j=0; j<3; ++j) {
					printf("%5.2f ",cameraParams->matrix[i*3 + j]);
				}
				printf( "\n");
			}
			
			for(int i=0;i<2;i++)
			{
				for(int j=0;j<2;j++)
				{
					printf("%5.5f ",cameraParams->distortion[i*2 +j]);
				}
				printf("\n");
			}
		}
		
		
	
	if(!state)
		cout<<"No Success!"<<endl;
	else
		cout<<"Success!"<<endl;
	

}

void vcCodec::enableCalib(bool enable)
{
	isCalibEnabled = enable;

}

double* vcCodec::getEtalonParams()
{
	return EtalonParams;
}
void vcCodec::setEtalonParams(double *params)
{
	EtalonParams = params;
}


list<string> vcCodec::GetFilesInFolder(const char *pFoldername, const char* pPrefix, const char* pSuffix){
	DIR  *pDir;
	struct dirent *ptdirent;
	pDir = opendir(pFoldername);
	bool bMatch;
	list<string> vFileList;
	if (pDir)
	{
		while ((ptdirent = readdir(pDir)) != NULL)
		{
			bMatch = true;
			string strFilename(ptdirent->d_name);
			if(pPrefix != NULL){
				if((strFilename.size() >= strlen(pPrefix)) && (strFilename.compare(0, strlen(pPrefix), pPrefix) != 0)){
					bMatch = false;
				}
			}
			if(pSuffix != NULL){
				if((strFilename.size() >= strlen(pSuffix)) && (strFilename.compare(strFilename.size()-strlen(pSuffix), strlen(pSuffix), pSuffix) != 0)){
					bMatch = false;
				}
			}
			if(strFilename.compare(0,1,".") == 0){
					bMatch = false;
			}
			if(bMatch){
				strFilename.insert(0, pFoldername);
				vFileList.push_back(strFilename);
			}
		}
		closedir(pDir);
	}
	vFileList.sort();
	if(vFileList.empty()){
		return vFileList;
	} else {
		return vFileList;
	}
}


vector<string> vcCodec::getImageFiles (  )
{
	vector<string> files;
	list<string> vFilelist2 = GetFilesInFolder(calibFilesPath, "calib-", ".png");

	
	if(vFilelist2.size()){
		
		list<string>::iterator it2 = vFilelist2.begin();
		for ( unsigned int i = 0; i < vFilelist2.size(); i++,it2++ ){
			
			string right = *it2;
			
			files.push_back (right);
		printf("%s\n", it2->data());
		}
	}
	return files;
}

void vcCodec::calibrateCam()
{
	int width=-1,height=-1;
	//Get the path of the images
	vector<string> files=getImageFiles ( );

	//Load imagesvcCodec::
	// Your should place first the left image and then the right image, eg, left0, right0, left1, right1, ..., leftn-1 , rightn-1

	IplImage **images=new IplImage*[files.size() ];
	for ( unsigned int i=0;i<files.size();i++ )
	{
		images[i]=cvLoadImage ( files[i].c_str() );
		assert ( images[i]!=NULL );
	//check all images have equal size
		if ( width==-1 ) {
			width=images[i]->width; 
			height=images[i]->height;
			cout<<width<<endl;
		}
		else {
			if( !(width==images[i]->width && height==images[i]->height ))
			{
				
				cout<<"Images are not of the same size!"<<endl;
				return;
			}
		}
	}
	
	//Configure calibration paramters
	//First, set the pattern information
	mCalibFilter->SetEtalon ( CV_CALIB_ETALON_CHESSBOARD, EtalonParams );
	//sets the number of cameras a calibrar, no se afectan los resultados si fuera solo una por una
	mCalibFilter->SetCameraCount ( 1 );
	//Sets the number of images avaialable
	mCalibFilter->SetFrames ( files.size() );
	

	for ( unsigned int i=0 ; i<files.size(); i++ )
	{
		mCalibFilter->FindEtalon ( images+i );//detect chessboard
		mCalibFilter->DrawPoints ( images+i ); //draw the detected points in the image
		mCalibFilter->Push(); //add the points to the set of points detected
	}
		

	if(!mCalibFilter->IsCalibrated())
	{
		cout<<"Calibration did not work. Try again with new images!"<<endl;
		return;
	}else
		cout<<"Camera Calibrated!"<<endl;
		
	
	cameraParams = (CvCamera *) mCalibFilter->GetCameraParams(0);
	if(cameraParams != 0){
		printf("Intrinsic %i:\n", 0);
		for(int i=0; i<3; ++i) {
			for(int j=0; j<3; ++j) {
				printf("%2.2f ",cameraParams->matrix[i*3 + j]);
			}
			printf( "\n");
		}
	}
	//Ok, now everithing is done. Save the results
	mCalibFilter->SaveCameraParams ( "calibrationcv.ini" );	
	
}

char* vcCodec::getImgDir()
{
	return calibFilesPath;
}

void vcCodec::setImgDir(char* dir)
{
	calibFilesPath = dir;
}

float* vcCodec::getDistParams()
{
	/*
	if(cameraParams != NULL){
		return cameraParams->distortion;
	}
	else*/
		return NULL;
}

float* vcCodec::getIntrinsicParams()
{
	/*
	if(cameraParams != NULL){
		return cameraParams->matrix;
	}
	else*/
		return NULL;
}

/*
void vcCodec::applyAllColorsFilter(IplImage *imgSrc,IplImage* imgMsk, IplImage* imgColor, vcColor** colorList,int colorNumber,int colorCode)
{
	
	cvResetImageROI(imgSrc);
	
	IplImage* cvImg = cvCreateImage(cvGetSize(imgSrc),8,3);
	cvCopy(imgSrc,cvImg);
	//Converting to colorCode chosen for filter
	switch(colorCode)
	{
		case HLS_CODE:
			cvCvtColor(imgSrc,cvImg,CV_RGB2HLS);
			break;
		case HSV_CODE:
			cvCvtColor(imgSrc,cvImg,CV_RGB2HSV);
			break;
		case RGB_CODE:
			//nothing to do, already in RGB code!
			break;
	}
			
	
	IplImage* maskTemp = cvCreateImage(cvGetSize(cvImg),8,1);
	IplImage* maskOr = cvCreateImage(cvGetSize(cvImg),8,1);
	IplImage* colorTemp = cvCreateImage(cvGetSize(cvImg),8,3);
	
	vcColor* color;		
	
	for(int i=0;i<colorNumber;i++) 
	{
		color=colorList[i];
		if(i==0){
			cvInRangeS( cvImg,color->getLowUmbral(colorCode), color->getHighUmbral(colorCode), imgMsk );
			fillColor(imgMsk,imgColor,color);
			
		}
		else
		{
			cvInRangeS( cvImg,color->getLowUmbral(colorCode), color->getHighUmbral(colorCode), maskTemp );
			fillColor(maskTemp,colorTemp,color);
			cvNot(imgMsk,maskOr);
			cvOr(imgMsk,maskTemp,imgMsk,maskOr);
			cvXor(imgColor,colorTemp,imgColor);
		}
	
	}
	cvReleaseImage(&maskTemp);
	cvReleaseImage(&maskOr);
	
	cvReleaseImage(&colorTemp);
	
	cvReleaseImage(&cvImg);

}

void vcCodec::applyColorFilter(IplImage *imgSrc,IplImage* imgMsk,vcColor* color,int colorCode)
{
	
	cvResetImageROI(imgSrc);
	
	IplImage* cvImg = cvCreateImage(cvGetSize(imgSrc),8,3);
	cvCopy(imgSrc,cvImg);
	//Converting to colorCode chosen for filter
	switch(colorCode)
	{
		case HLS_CODE:
			cvCvtColor(imgSrc,cvImg,CV_RGB2HLS);
			break;
		case HSV_CODE:
			cvCvtColor(imgSrc,cvImg,CV_RGB2HSV);
			break;
		case RGB_CODE:
			//nothing to do, already in RGB code!
			break;
	}
			
	
	
	
	cvInRangeS( cvImg,color->getLowUmbral(colorCode), color->getHighUmbral(colorCode), imgMsk );
	
	cvReleaseImage(&cvImg);

}
	
void vcCodec::fillColor(IplImage* imgMsk,IplImage* imgColor,vcColor* color)
{
	cvSetZero(imgColor);
	cvSet(imgColor,color->getMeanValue(RGB_CODE),imgMsk);	
}	 
	
void vcCodec::findColor(IplImage *imgMsk, IplImage* imgDst,int threshold, int minArea, int maxArea)	
{	//cvShowImage("smooth", imgCpy1);
	CBlobResult blobs;
	blobs = CBlobResult( imgMsk, NULL, threshold, false );
	
	//cvSet(imgDst,cvScalar(0,0,0));
	//cvSet(imgDst,color->getMeanValue(RGB_CODE),imgMsk);
	
	int i;
	// delare a single blob
	CBlob Blob;
	// some varscvReleaseImage(&imgCv);
	blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_GREATER, minArea );// area <150
	blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_LESS, maxArea );// area <150
	int iMaxx, iMinx, iMaxy, iMiny;
	// for each blob
	for  (i=0; i<blobs.GetNumBlobs(); ++i)
	{
		// get the blob info
		Blob = blobs.GetBlob(i);
		// get max, and min co-ordinates
		CBlobGetXCenter getXc;
		CBlobGetYCenter getYc;
		double blobCentre[2];
		blobCentre[0] = getXc(Blob);
		blobCentre[1] = getYc(Blob);
		//printf("\n\t\tCentre: (%.0f,%.0f)", blobCentre[0], blobCentre[1]);
		
		
		iMaxx=(int)Blob.MaxX();
		iMinx=(int)Blob.MinX();
		iMaxy=(int)Blob.MaxY();
		iMiny=(int)Blob.MinY();
		
		
		// mark centre
		cvLine( imgDst, cvPoint((int)blobCentre[0], (int)blobCentre[1]), cvPoint((int)blobCentre[0], (int)blobCentre[1]), CV_RGB(50, 50 , 50), 4, 8, 0 );
		
		// mark box around blob
		cvRectangle( imgDst, cvPoint(iMinx , iMiny ), cvPoint ( iMaxx, iMaxy ), CV_RGB(150, 150, 150), 1, 8, 0);
		// print the blob centres
		//printf("\n%d, X: %d, Y: %d\n", i, iMeanx, iMeany);
	}// each blob
	 

	
}

void vcCodec::removeBckGndGRAY(IplImage *imgSrc,IplImage *imgDst,int lowGray, int highGray,int colorCode)
{

	cvResetImageROI(imgSrc);
	
	
	IplImage* cvImg = cvCreateImage(cvGetSize(imgSrc),8,3);
	
	cvCopy(imgSrc,cvImg);
	//Converting to RGB
	switch(colorCode)
	{
		case HLS_CODE:
			cvCvtColor(imgSrc,cvImg,CV_HLS2RGB);
			break;
		case HSV_CODE:
			cvCvtColor(imgSrc,cvImg,CV_HSV2RGB);
			break;
		case RGB_CODE:
			//nothing to do, already in RGB code!
			break;
	}

	int height = cvGetDimSize(cvImg,0);
	int width = cvGetDimSize(cvImg,1);
	
	for(int i=0;i<height;i++)
		{
			for(int j=0;j<width;j++)
			{
				
				CvScalar val = cvGet2D(cvImg,i,j);
				
				double R =val.val[0];
				double G =val.val[1];
				double B =val.val[2];
				
				
				double comp1 = fabs(R-G);
				double comp2 = fabs(G-B);
				
				if(R>lowGray && G>lowGray && B>lowGray && R<highGray && G<highGray && B<highGray) 
				{
					if(comp1<CMP_DIF && comp2<CMP_DIF){ 
						cvSet2D(cvImg,i,j,cvScalar(0,0,0));
					}
				}
				
			}
			
		}
	cvCopy(cvImg,imgDst);
	cvReleaseImage(&cvImg);

}
*/
