/*
Finger Paint Program written for the IEEE student Chapter of UIUC.
We are using this program to demo our multi-touch screen using Total Internal Reflection
This test program takes video input of an FTIR touchscreen, processes it, and creates
green 'paint' wherever fingers are located. 
Erik Johnson, 2/20/2008

Use: Run OpenCVTest.exe
Watch output from input video file
Press 'b' to capture the background and get a clearer image
press 'c' to clear the display
*/

//include files
#include "cv.h"
#include "highgui.h"
#include <stdio.h>
#include "BlobResult.h"


//Global Variable declarations

CBlobResult blobs; //Results of blob detection from BlobResult.cpp
IplImage* display; //Display buffer of 800 by 600 pixels

int displayHeight=600; //Display characteristics- Modify to change display height
int displayWidth=800; //Display chracterirstics- Modify to change dispaly width

//No need to define cicle image, we are using cvDrawCircle
//IplImage* circle;
//int circleSize=30;

//threshold values
int upperthreshold=240; //upper limit of brightness to be filtered out- max=255
int lowerthreshold=70;  //lower limit of brightness to be filtered out- min=0

IplImage *background; //background image that will be used for background subtraction

//Rolling average background is not being used- just static background subtraction
/*
//size of background buffer
int backgroundSize=50;
IplImage *background [50];
//average frame
IplImage *avgFrame;
int lastFrameIndex;
*/

/*Program State
 * 1= Configuration/Initialization
 * 2= Main loop
 * negative number= Debug
 Starting State is 1*/ 

int state=1;

//This function is run during the initialization of the camera, to set up screen boundaries
//It displays four dots, on at the top, bottom, left and right of the screen
//the user presses the screen at the projected locations, and this allows mapping from the
//camera image to the output screen
/*void configure()
{}*/

//The Backround variable stores the last 100 frames of the image
//each time a new frame comes in, it replaces the oldest frame
//the new background is averaged over the last 100 frames to get an average background
//the average background is then subracted from the current frame and the new frame is returned
/*
IplImage* backgroundSubtraction (IplImage* currentFrame)
{
	IplImage *subtractedFrame;
	IplImage *currentBackground;
	int currentHeight, currentWidth, currentStep, currentChannels;
	uchar *currentData;
	uchar *subtractedData;
	uchar *avgData;
	uchar *lastFrameData;
	uchar *currentBackgroundData;
	double subtractionData1, subtractionData2;
			
	int i,j,k;
	
	//current frame data
	currentHeight    = currentFrame->height;
	currentWidth     = currentFrame->width;
	currentStep      = currentFrame->widthStep;
	currentChannels  = currentFrame->nChannels;
	currentData      = (uchar *)currentFrame->imageData;
	//subtracted frame data
	subtractedFrame= cvCreateImage(cvSize(currentWidth,currentHeight),IPL_DEPTH_8U,3);
	subtractedData      = (uchar *)subtractedFrame->imageData;
	//average frame data
	avgData      = (uchar *)avgFrame->imageData;
	lastFrameData = (uchar *)background[lastFrameIndex]->imageData;
	
	//subtract background
	for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	{
		//if (currentData[i*currentStep+j*currentChannels+k]-avgData[i*currentStep+j*currentChannels+k]>0)
			if (currentData[i*currentStep+j*currentChannels+k]-50>=0)
		    //subtractedData[i*currentStep+j*currentChannels+k]=currentData[i*currentStep+j*currentChannels+k]-avgData[i*currentStep+j*currentChannels+k];
				subtractedData[i*currentStep+j*currentChannels+k]=currentData[i*currentStep+j*currentChannels+k]-50;
		else
			subtractedData[i*currentStep+j*currentChannels+k]=255;
	}
	
	background[lastFrameIndex]=currentFrame;
	if (lastFrameIndex<backgroundSize)
	{
		lastFrameIndex++;
	
	}
	else
	{
		lastFrameIndex=0;
	}
	for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	{
		
		subtractionData1=lastFrameData[i*currentStep+j*currentChannels+k]/(double)backgroundSize;
		subtractionData2=avgData[i*currentStep+j*currentChannels+k];
		avgData[i*currentStep+j*currentChannels+k]=subtractionData2-subtractionData1;//NOTE:a lot of resolution is probably lost here by dividing by 100- use floats?
		
		subtractionData1=currentData[i*currentStep+j*currentChannels+k]/(double)backgroundSize;
		subtractionData2=avgData[i*currentStep+j*currentChannels+k];
		avgData[i*currentStep+j*currentChannels+k]=subtractionData2+subtractionData1;//NOTE:a lot of resolution is probably lost here by dividing by 100- use floats?
	
		
		int sum=0;
		int value=0;
		for (int b=0; b<backgroundSize; b++)
		{
			currentBackgroundData      = (uchar *)background[b]->imageData;
			sum=sum+currentBackgroundData[i*currentStep+j*currentChannels+k];
		}
		value=sum/backgroundSize;
		avgData[i*currentStep+j*currentChannels+k]=value;
	}

	return subtractedFrame;
}

void backgroundInitialization(IplImage *blankFrame)
{
	int blankHeight, blankWidth, blankStep, blankChannels;
		uchar *blankData;
		uchar *avgData;
		
		int i,j,k;
		
		//get circle buffer data
		blankHeight    = blankFrame->height;
			     blankWidth     = blankFrame->width;
			     blankStep      = blankFrame->widthStep;
			     blankChannels  = blankFrame->nChannels;
			     blankData      = (uchar *)blankFrame->imageData;
		for(i=0;i<blankHeight;i++) for(j=0;j<blankWidth;j++) for(k=0;k<blankChannels;k++)
	      blankData[i*blankStep+j*blankChannels+k]=0;
		
		for(i=0; i<backgroundSize; i++)
		{
			background[i]=blankFrame;
		}	
		lastFrameIndex=0;
		avgFrame= cvCreateImage(cvSize(blankWidth,blankHeight),IPL_DEPTH_8U,3);
		avgData      = (uchar *)avgFrame->imageData;
		for(i=0;i<blankHeight;i++) for(j=0;j<blankWidth;j++) for(k=0;k<blankChannels;k++)
			      avgData[i*blankStep+j*blankChannels+k]=0;
}
*/


//This function recieves the current frame and applies a threshold filter
//values below a certain intensity value are set to zero, values above are set to one (255)
//There is also an upper threshold to filter out bright lights or sunlight
IplImage* thresholdFilter (IplImage* currentFrame)
{
	int i,j,k;
	int currentHeight, currentWidth, currentStep, currentChannels;
	uchar *currentData;
			
	currentHeight    = currentFrame->height;
	currentWidth     = currentFrame->width;
	currentStep      = currentFrame->widthStep;
	currentChannels  = currentFrame->nChannels;
	currentData      = (uchar *)currentFrame->imageData;
			     
	for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	{
		//When the pixel is between the two threshold values, set it to white
		if (currentData[i*currentStep+j*currentChannels+k]<upperthreshold && currentData[i*currentStep+j*currentChannels+k]>lowerthreshold)
			currentData[i*currentStep+j*currentChannels+k]=255;
		else //when the pixel is not between the two values, set it to black
			currentData[i*currentStep+j*currentChannels+k]=0;
	}
	return currentFrame;
}

//This function manages background subtraction. The background image is subtracted
//pixel by pixel from the current image. The subtracted image is then return
IplImage* backgroundSubtraction (IplImage* currentFrame)
{
	int i,j,k;
	int currentHeight, currentWidth, currentStep, currentChannels;
		uchar *currentData;
		uchar *backgroundData;
	currentHeight    = currentFrame->height;
		     currentWidth     = currentFrame->width;
		     currentStep      = currentFrame->widthStep;
		     currentChannels  = currentFrame->nChannels;
		     currentData      = (uchar *)currentFrame->imageData;
	backgroundData = (uchar *)background->imageData;
	
	IplImage *subtractedFrame;
	subtractedFrame= cvCreateImage(cvSize(currentWidth,currentHeight),IPL_DEPTH_8U,3);
	uchar *subtractedData= (uchar *)subtractedFrame->imageData;
	
	 for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	 {
		 //If the values of the current pixel minus the background pixel is greater than 0, set the subtracted image to that pixel
		 if (currentData[i*currentStep+j*currentChannels+k]-backgroundData[i*currentStep+j*currentChannels+k]>=0)
			 subtractedData[i*currentStep+j*currentChannels+k]=currentData[i*currentStep+j*currentChannels+k]-backgroundData[i*currentStep+j*currentChannels+k];
		 //Otherwise, set it to zero
		 else
			 subtractedData[i*currentStep+j*currentChannels+k]=0;
	 }
	 return subtractedFrame;
}


//using cvDrawCircle, these functions are not needed
/*
//Initializes Circle buffer so we don't waste time drawing circles all the time
void initializeCircle()
{
	int circleHeight, circleWidth, circleStep, circleChannels;
	uchar *circleData;
	
	int i,j,k;
	
	//get circle buffer data
	circleHeight    = circle->height;
		     circleWidth     = circle->width;
		     circleStep      = circle->widthStep;
		     circleChannels  = circle->nChannels;
		     circleData      = (uchar *)circle->imageData;
	int circleRadius=circleSize/2;
    for(i=0;i<circleHeight;i++) for(j=0;j<circleWidth;j++) for(k=0;k<circleChannels;k++)
    {
      if (((i-circleRadius)*(i-circleRadius)+(j-circleRadius)*(j-circleRadius))<circleRadius*circleRadius)
      {  
    	  circleData[i*circleStep+j*circleChannels+k]=1;
      }
      else
      {
    	  circleData[i*circleStep+j*circleChannels+k]=0;
      }
    }  
}

//draws a circle with the desired color at the desired xpos and ypos
//the desired pos is at the upper left corner of the circle to be drawn
//color is added to the display buffer, not overwritten
void drawCircle (int red, int blue, int green, int xpos, int ypos)
{
	//circle data values
	int circleHeight, circleWidth, circleStep, circleChannels;
	uchar *circleData;
	
	//display data values
		int displayHeight, displayWidth, displayStep, displayChannels;
		uchar *displayData;
	
	//navigation indexes
	int i,j;
	int displayIndex, circleIndex;
	
	//get circle buffer data
	circleHeight    = circle->height;
		     circleWidth     = circle->width;
		     circleStep      = circle->widthStep;
		     circleChannels  = circle->nChannels;
		     circleData      = (uchar *)circle->imageData;
	
	displayHeight    = display->height;
	     displayWidth     = display->width;
	     displayStep      = display->widthStep;
	     displayChannels  = display->nChannels;
	     displayData      = (uchar *)display->imageData;
	//fill channels of display with circle value times the red, blue and green multipliers
	for(i=0;i<circleHeight;i++) for(j=0;j<circleWidth;j++)
	{
	          displayIndex=(i+ypos)*displayStep+(j+xpos)*displayChannels;
	          circleIndex=i*circleStep+j*circleChannels;
	          if(circleData[circleIndex]==1)
	          {	  
	        	  	displayData[displayIndex]=blue;
	        	  	displayData[displayIndex+1]=green;
	        	  	displayData[displayIndex+2]=red;
	          }
	}
}
*/
int distance(int x1, int x2, int y1, int y2)
{
	int xdistance=x1-x2;
	int ydistance=y1-y2;
	xdistance=xdistance*xdistance;
	ydistance=ydistance*ydistance;
	return sqrt(xdistance+ydistance);
}


// MAIN
int main() {

	//display text variables
	CvFont font;
	double hscale=1.0;
	double vscale=1.0;
	int lineWidth=1;
	cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX|CV_FONT_ITALIC, hscale, vscale,0,lineWidth);
	
	//Configuration variables
	int centerx, centery;
	int upperleftx, upperlefty;
	int upperrightx, upperrighty;
	int lowerleftx, lowerlefty;
	int lowerrightx, lowerrighty;
	

	
	int configstate=0;
	
	//keyboard stroke
	int key;
	
	//variables for navigating through images
	 int height,width,step,channels;
		  uchar *data;
		  
	//variables for navigating through the display image
	int displayStep,displayChannels;
		  uchar *displayData;		  

		  //loop variables
		  int i,j,k;
	
	//variables for tracking blob centers
	double x, y;
		  

	//framebuffer for backgroundsubtraction
	//rectified frame for threshold filter
	IplImage *subtractedFrame;
	IplImage *rectifiedFrame;
		  
	//
	
	//Display output as 800x600 8 bit three channel image	  
	display= cvCreateImage(cvSize(displayWidth,displayHeight),IPL_DEPTH_8U,3);
	//gather information about display image
	     displayStep      = display->widthStep;
	     displayChannels  = display->nChannels;
	     displayData      = (uchar *)display->imageData;
	
	//initalize display to white backgound (255)
    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
      displayData[i*displayStep+j*displayChannels+k]=255;
   
    
	//CHANGE THIS FOR USING THE CAMERA, THIS USES TEST VIDEO
    CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
    if( !capture ) {
      fprintf( stderr, "ERROR: capture is NULL \n" );
      getchar();
      return -1;
    }
//initialize background
      //Grab one test frame
      // Get one frame
      IplImage* frame = cvQueryFrame( capture );
      if( !frame ) {
        fprintf( stderr, "ERROR: frame is null...\n" );
        getchar();
      }
      //get data about the test frame
      height    = frame->height;
       width     = frame->width;
       step      = frame->widthStep;
       channels  = frame->nChannels;
       data      = (uchar *)frame->imageData;
      
      //create a blank background for intitialization 
      background= cvCreateImage(cvSize(width,height),IPL_DEPTH_8U,channels);
      uchar *backgroundData=(uchar *)background->imageData;
      for(i=0;i<height;i++) for(j=0;j<width;j++) for(k=0;k<3;k++)
            backgroundData[i*step+j*channels+k]=0;      
 
      
  // Create a window in which the captured images will be presented
  cvNamedWindow( "FingerPaint", CV_WINDOW_AUTOSIZE );

  // Show the image captured from the camera in the window and repeat
  while( 1 ) {
    // Get one frame
    IplImage* frame = cvQueryFrame( capture );
    if( !frame ) {
      fprintf( stderr, "ERROR: frame is null...\n" );
      getchar();
      break;
    }
	//subtract background
	subtractedFrame=backgroundSubtraction(frame);
	//create rectified image to recieve filter data. Note that it is grayscale (1 channel) this is necessary for blobdetect
	rectifiedFrame= cvCreateImage(cvSize(width,height),IPL_DEPTH_8U,1);

	
	
	//convert subtractedframe to grayscale
	cvCvtColor(subtractedFrame, rectifiedFrame, CV_BGR2GRAY);

	//filter rectified image
	rectifiedFrame=thresholdFilter(rectifiedFrame);
	// Do not release the frame!

	cvFlip(rectifiedFrame, rectifiedFrame,-1);
	
	//find blobs in rectified frame
	blobs = CBlobResult( rectifiedFrame, NULL, 250, true );

	//only include blobs with area less than 100
	blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_LESS, 15 );
	//include any blobs with area greater than 1
	blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_GREATER, 5 );
	
    if (state==1)//initialization state. 
    {
    	if (configstate==0)
    	{
    		cvPutText(display, "Welcome to FingerPaint", cvPoint (displayWidth/2-200, displayHeight/2-50),&font,cvScalar(0,0,0));
    		cvPutText(display, "Please press 's' to start configuring the program", cvPoint (displayWidth/2-200, displayHeight/2+50),&font,cvScalar(0,0,0));
    		//cvCircle(display, cvPoint(displayWidth/2,displayHeight/2), 10, cvScalar(0,0,255),-1);
        	cvShowImage( "FingerPaint", display );   
        	//Navigate through the blobs
    	}
    	//upper left
    	else if (configstate==1)
    	    	{
    	    		cvCircle(display, cvPoint(10,10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display);
    	        	CBlob currentBlob;     
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        		if (currentBlob.Area()<20)
    	        		{
    	        	    upperleftx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    upperlefty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    configstate=2;
    	        	    displayStep      = display->widthStep;
    	        	    displayChannels  = display->nChannels;
    	        	    displayData      = (uchar *)display->imageData;
    	        	       	        	       		
    	        	    //initalize display to white backgound (255)
    	        	    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	    	displayData[i*displayStep+j*displayChannels+k]=255;
    	        		}
    	        	}
    	    	}
    	//upperright
    	else if (configstate==2)
    	    	{
    	    		cvCircle(display, cvPoint(displayWidth-10,10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display );
    	        	CBlob currentBlob;     
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        	    upperrightx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    upperrighty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	   
    	        	    if (distance (upperrightx,upperleftx,upperrighty,upperlefty)>50)
    	        	    {
    	        	    	configstate=3;
    	        	        displayStep      = display->widthStep;
    	        	        displayChannels  = display->nChannels;
    	        	       	displayData      = (uchar *)display->imageData;
    	        	       		
    	        	        //initalize display to white backgound (255)
    	        	       	for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	       	    displayData[i*displayStep+j*displayChannels+k]=255;
    	        	    }
    	        	}
    	    	}
    	//lowerleft
    	else if (configstate==3)
    	    	{
    	    		cvCircle(display, cvPoint(10,displayHeight-10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display );
    	        	CBlob currentBlob;     
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        	    lowerleftx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    lowerlefty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    if (distance (lowerleftx,upperleftx,lowerlefty,upperlefty)>50 && distance(lowerleftx,upperrightx,lowerlefty,upperrighty)>50)
    	        	    {
    	        	        configstate=4;
    	        	        displayStep      = display->widthStep;
    	        	        displayChannels  = display->nChannels;
    	        	        displayData      = (uchar *)display->imageData;
    	        	        	        	       		
    	        	        //initalize display to white backgound (255)
    	        	        for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	            displayData[i*displayStep+j*displayChannels+k]=255;
    	        	        }
    	        	}
    	    	}
    	//lower right
    	else if (configstate==4)
    	    	{
    	    		cvCircle(display, cvPoint(displayWidth-10,displayHeight-10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display );
    	        	CBlob currentBlob;     
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        	    lowerrightx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    lowerrighty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	   
    	
    	        	    if (distance (lowerrightx,upperleftx,lowerrighty,upperlefty)>50 && distance(lowerrightx,upperrightx,lowerrighty,upperrighty)>50 && distance(lowerrightx,lowerleftx,lowerrighty,lowerlefty)>50)
    	        	    {
    	        	    	state=2;
    	        	    	displayStep      = display->widthStep;
    	        	    	displayChannels  = display->nChannels;
    	        	    	displayData      = (uchar *)display->imageData;
    	        	    	    	        	       		
    	        	    	//initalize display to white backgound (255)
    	        	    	for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	    	displayData[i*displayStep+j*displayChannels+k]=255;
    	        	     }
    	        	    
    	        	}
    	    	}
    }
    else if (state==2)//main program state
    {
    	//blobs.PrintBlobs("filteredblobs.txt");

    	//calculate display/camera ratio
    	double ratio=displayWidth/width;
    	double displayLocationX=0;
    	double displayLocationY=0;
    	
    	CBlob currentBlob;
    
    	// from the filtered blobs, get the blob with biggest perimeter
    	//blobs.GetNthBlob( CBlobGetPerimeter(), 0, blobWithBiggestPerimeter );
     
    	//Navigate through the blobs
    	for (i=0;i<blobs.GetNumBlobs();i++)
    	{
    		//get next blob
    		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    		//find position of next blob
    		x=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    		//find y position
    		y=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    		//Draw a circle at that x and y point
    		//First transform camera coordinates into display coordinates
    		displayLocationX=x-upperleftx;
    		displayLocationY=y-upperlefty;
    		//scale it up for display
    		displayLocationX=displayLocationX*ratio;
    		displayLocationY=displayLocationY*ratio;
    		//currentBlob.FillBlob(display,cvScalar(0,255,0),upperleftx,upperlefty);
    		cvCircle(display, cvPoint(x,y), 10, cvScalar(0,255,0),-1);
    	}
    	//show the display image. This can be changed to view different stages of the output
    	cvShowImage( "FingerPaint", display );

    }
    else// debug state
    {
    	cvShowImage( "FingerPaint", rectifiedFrame );
    }
    	//wait for key input
    key=cvWaitKey(10);
    if (key=='b')//capture background image
    {
    	data      = (uchar *)frame->imageData;
    	backgroundData      = (uchar *)background->imageData;
   		for(i=0;i<height;i++) for(j=0;j<width;j++) for(k=0;k<3;k++)
   			backgroundData[i*step+j*channels+k]=data[i*step+j*channels+k];
    }
    else if (key=='c') //clear background
    {
    		     displayStep      = display->widthStep;
    		     displayChannels  = display->nChannels;
    		     displayData      = (uchar *)display->imageData;
    		
    		//initalize display to white backgound (255)
    	    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	      displayData[i*displayStep+j*displayChannels+k]=255;
    }
    else if (key=='d') //debug state
    {
       	state=-1*state;//inverting state will toggle into and out of debug mode
    }
    else if (key=='s') //start configuration
    {
           	configstate=1;//inverting state will toggle into and out of debug mode
    }
   
	if(frame)
		delete frame;
	
    //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version),
    //remove higher bits using AND operator
    if( (cvWaitKey(10) & 255) == 27 ) break;
    
	if(frame)
		delete frame;
  }

  if(frame)
	  delete frame;
  if(subtractedFrame)
	delete subractedFrame;
  if(rectifiedFrame)
	delete rectifiedFrame;
  if(displayData)
	 delete displayData;
  if(data)
	 delete data;

  
  // Release the capture device housekeeping
  cvReleaseCapture( &capture );
  cvDestroyWindow( "FingerPaint" );
  
  if(capture)
	 delete capture;
  
  return 0;
}
