/*
Finger Paint Program written for the IEEE student Chapter of UIUC.
We are using this program to demo our multi-touch screen using Total Internal Reflection
This test program takes video input of an FTIR touchscreen, processes it, and creates
green 'paint' wherever fingers are located. 
Erik Johnson, 2/20/2008

Use: Run OpenCVTest.exe
A window will open with a white background, containing some text.
This initial state waits for the user to input 's'.
When the user inputs 's', four red circles will appear in succession. The user must touch these circles in order
for the program to synch the display and the camera.

Once this is done, the user is free to draw.
The default color is black, and by touching the colored squares in the upper right, the user can select different colors

During program operation, the user can hit the following keyboard inputs

Press 'd' for debug mode, which will display the filter output image
Press 'b' to capture the background and get a clearer image
press 'c' to clear the display

VERSION 2.0- Color Selection Added, old code removed
*/

//include files
#include "cv.h"
#include "highgui.h"
#include <stdio.h>
#include <math.h>
#include "BlobResult.h"

#define PI 3.14159265


//Global Variable declarations

CBlobResult blobs; //Results of blob detection from BlobResult.cpp
CBlob currentBlob; //temporary current blob variable for tracking through blobs
IplImage* display; //Display buffer of 800 by 600 pixels

int displayHeight=600; //Display characteristics- Modify to change display height
int displayWidth=800; //Display chracterirstics- Modify to change dispaly width

//threshold values
int upperthreshold=255; //upper limit of brightness to be filtered out- max=255
int lowerthreshold=70;  //lower limit of brightness to be filtered out- min=0

IplImage *background; //background imagc
/*Program State
 * 1= Configuration/Initialization
 * 2= Main loop
 * negative number= Debug
 Starting State is 1*/ 

int state=1;

//This function recieves the current frame and applies a threshold filter
//values below a certain intensity value are set to zero, values above are set to one (255)
//There is also an upper threshold to filter out bright lights or sunlight
IplImage* thresholdFilter (IplImage* currentFrame)
{
	int i,j,k;
	int currentHeight, currentWidth, currentStep, currentChannels;
	uchar *currentData;
			
	currentHeight    = currentFrame->height;
	currentWidth     = currentFrame->width;
	currentStep      = currentFrame->widthStep;
	currentChannels  = currentFrame->nChannels;
	currentData      = (uchar *)currentFrame->imageData;
			     
	for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	{
		//When the pixel is between the two threshold values, set it to white
		if (currentData[i*currentStep+j*currentChannels+k]<upperthreshold && currentData[i*currentStep+j*currentChannels+k]>lowerthreshold)
			currentData[i*currentStep+j*currentChannels+k]=255;
		else //when the pixel is not between the two values, set it to black
			currentData[i*currentStep+j*currentChannels+k]=0;
	}
	return currentFrame;
}

//This function manages background subtraction. The background image is subtracted
//pixel by pixel from the current image. The subtracted image is then return
IplImage* backgroundSubtraction (IplImage* currentFrame)
{
	int i,j,k;
	int currentHeight, currentWidth, currentStep, currentChannels;
		uchar *currentData;
		uchar *backgroundData;
	currentHeight    = currentFrame->height;
		     currentWidth     = currentFrame->width;
		     currentStep      = currentFrame->widthStep;
		     currentChannels  = currentFrame->nChannels;
		     currentData      = (uchar *)currentFrame->imageData;
	backgroundData = (uchar *)background->imageData;
	
	IplImage *subtractedFrame;
	subtractedFrame= cvCrcceateImage(cvSize(currentWidth,currentHeight),IPL_DEPTH_8U,3);
	uchar *subtractedData= (uchar *)subtractedFrame->imageData;
	
	 for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	 {
		 //If the values of the current pixel minus the background pixel is greater than 0, set the subtracted image to that pixel
		 if (currentData[i*currentStep+j*currentChannels+k]-backgroundData[i*currentStep+j*currentChannels+k]>=0)
			 subtractedData[i*currentStep+j*currentChannels+k]=currentData[i*currentStep+j*currentChannels+k]-backgroundData[i*currentStep+j*currentChannels+k];
		 //Otherwise, set it to zero
		 else
			 subtractedData[i*currentStep+j*currentChannels+k]=0;
	 }
	 return subtractedFrame;
}

//Function to return the distance between two points
double distance(double x1, double x2, double y1, double y2)
{
	double xdistance=x1-x2;
	double ydistance=y1-y2;
	xdistance=xdistance*xdistance;
	ydistance=ydistance*ydistance;
	return sqrt(xdistance+ydistance);
}


// MAIN
int main() {

	//color values, initialized to black
	int blue=0;
	int green=0;
	int red=0;
	
	//display text variables
	CvFont font, smallFont;
	double hscale=1.0;
	double vscale=1.0;
	int lineWidth=1;
	cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX|CV_FONT_ITALIC, hscale, vscale,0,lineWidth);
	cvInitFont(&smallFont, CV_FONT_HERSHEY_SIMPLEX, .5*hscale, .5*vscale,0,lineWidth);
	
	//Configuration variables- these will be filled in during the config state to synch the display and camera
	double upperleftx, upperlefty;
	double upperrightx, upperrighty;
	double lowerleftx, lowerlefty;
	double lowerrightx, lowerrighty;
	
	//configstate
	//0 is waiting for the user to begin configuration
	//1 is determining the upper left corner
	//2 is determining the upper right corner
	//3 is determining the lower left corner
	//4 is the final state and will determine the lower right corner then proceed immediately to the main state
	int configstate=0;
	
	//keyboard stroke
	int key;
	
	//variables for navigating through images
	 int height,width,step,channels;
		  uchar *data;
		  
	//variables for navigating through the display image
	int displayStep,displayChannels;
		  uchar *displayData;		  

	//loop variables
	int i,j,k;
	
	//variables for tracking blob centers
	double x, y;

	//variables for mapping camera blob locations to the display screen
	//Beta measures the angle by which the active camera area is rotated from the screen
	//Theta measures the angle by which the current touch is rotated from the screen
	//vertRatio and horRation measure the number of display pixels to camera pixels in the vertical and horizontal directions.
	double angleBeta;
	double angleTheta;
	double radius;
	double vertRatio;
	double horRatio;
	double displayLocationX;
	double displayLocationY;

	//framebuffer for backgroundsubtraction
	//rectified frame for threshold filter
	IplImage *subtractedFrame;
	IplImage *rectifiedFrame;
	
	//Display output as 800x600 8 bit three channel image	  
	display= cvCreateImage(cvSize(displayWidth,displayHeight),IPL_DEPTH_8U,3);
	//gather information about display image
	     displayStep      = display->widthStep;
	     displayChannels  = display->nChannels;
	     displayData      = (uchar *)display->imageData;
	
	//initalize display to white backgound (255)
    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
      displayData[i*displayStep+j*displayChannels+k]=255;
   
    
	//CHANGE THIS FOR USING THE CAMERA, THIS USES TEST VIDEO
    CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
    if( !capture ) {
      fprintf( stderr, "ERROR: capture is NULL \n" );
      getchar();
      return -1;
    }
//initialize background
      //Grab one test frame
      // Get one frame
      IplImage* frame = cvQueryFrame( capture );
      if( !frame ) {
        fprintf( stderr, "ERROR: frame is null...\n" );
        getchar();
      }
      //get data about the test frame
      height    = frame->height;
       width     = frame->width;
       step      = frame->widthStep;
       channels  = frame->nChannels;
       data      = (uchar *)frame->imageData;
      
      //create a blank background for intitialization 
      background= cvCreateImage(cvSize(width,height),IPL_DEPTH_8U,channels);
      uchar *backgroundData=(uchar *)background->imageData;
      for(i=0;i<height;i++) for(j=0;j<width;j++) for(k=0;k<3;k++)
            backgroundData[i*step+j*channels+k]=0;      
 
      //initialize rectified frame as a grayscale image (1 channel) with height and width equal to the camera
      //it must be grayscale for use with blob detect
  	rectifiedFrame= cvCreateImage(cvSize(width,height),IPL_DEPTH_8U,1);
      
      
  // Create a window in which the captured images will be presented
  cvNamedWindow( "FingerPaint", CV_WINDOW_AUTOSIZE );
  cvNamedWindow( "Camera", CV_WINDOW_AUTOSIZE );
  
  // Show the image captured from the camera in the window and repeat
  while( 1 ) {
    // Get one frame
    IplImage* frame = cvQueryFrame( capture );
    if( !frame ) {
      fprintf( stderr, "ERROR: frame is null...\n" );
      getchar();
      break;
    }
	//subtract background
	subtractedFrame=backgroundSubtraction(frame);
	
	//convert subtractedframe to grayscale
	cvCvtColor(subtractedFrame, rectifiedFrame, CV_BGR2GRAY);

	
	//filter rectified image
	rectifiedFrame=thresholdFilter(rectifiedFrame);

	cvFlip(rectifiedFrame, rectifiedFrame,-1);
	
	//find blobs in rectified frame
	blobs = CBlobResult( rectifiedFrame, NULL, 250, true );

	//only include blobs with area less than 25
	blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_LESS, 100 );
	//include any blobs with area greater than 5
	blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_GREATER, 10 );
	
    if (state==1)//initialization state. 
    {
    	//Wait for user to begin program by pressing 's'
    	if (configstate==0)
    	{
    		cvPutText(display, "Welcome to FingerPaint", cvPoint (displayWidth/2-200, displayHeight/2-50),&font,cvScalar(0,0,0));
    		cvPutText(display, "Please press 's' to start configuring the program", cvPoint (displayWidth/2-200, displayHeight/2+50),&font,cvScalar(0,0,0));
        	cvShowImage( "FingerPaint", display );   
    	}
    	//configure upper left corner
    	else if (configstate==1)
    	    	{
    	    		cvCircle(display, cvPoint(10,10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display);     
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        		if (currentBlob.Area()<20)
    	        		{
    	        	    upperleftx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    upperlefty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    configstate=2;
    	        	    //initalize display to white backgound (255)
    	        	    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	    	displayData[i*displayStep+j*displayChannels+k]=255;
    	        		}
    	        	}
    	    	}
    	//configure upperright corner
    	else if (configstate==2)
    	    	{
    	    		cvCircle(display, cvPoint(displayWidth-10,10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display );     
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        	    upperrightx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    upperrighty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	   
    	        	    //make sure it doesn't register the upperleft corner as the upperright corner
    	        	    if (distance (upperrightx,upperleftx,upperrighty,upperlefty)>50)
    	        	    {
    	        	    	configstate=3;
    	        	        //initalize display to white backgound (255)
    	        	       	for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	       	    displayData[i*displayStep+j*displayChannels+k]=255;
    	        	    }
    	        	}
    	    	}
    	//lowerleft
    	else if (configstate==3)
    	    	{
    	    		cvCircle(display, cvPoint(10,displayHeight-10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display );  
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        	    lowerleftx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    lowerlefty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //make sure it doesnt register previous touches as the lower left corner
    	        	    if (distance (lowerleftx,upperleftx,lowerlefty,upperlefty)>50 && distance(lowerleftx,upperrightx,lowerlefty,upperrighty)>50)
    	        	    {
    	        	        configstate=4;
    	        	        	        	       		
    	        	        //initalize display to white backgound (255)
    	        	        for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	            displayData[i*displayStep+j*displayChannels+k]=255;
    	        	        }
    	        	}
    	    	}
    	//lower right
    	else if (configstate==4)
    	    	{
    	    		cvCircle(display, cvPoint(displayWidth-10,displayHeight-10), 10, cvScalar(0,0,255),-1);
    	        	cvShowImage( "FingerPaint", display );    
    	        	//Navigate through the blobs
    	        	for (i=0;i<blobs.GetNumBlobs();i++)
    	        	{
    	        		//get next blob
    	        		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	        	    //find position of the blob
    	        	    lowerrightx=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	    //find y position
    	        	    lowerrighty=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	        	   
    	        	    //make sure it doesn't use previous touches as lower right corner
    	        	    if (distance (lowerrightx,upperleftx,lowerrighty,upperlefty)>50 && distance(lowerrightx,upperrightx,lowerrighty,upperrighty)>50 && distance(lowerrightx,lowerleftx,lowerrighty,lowerlefty)>50)
    	        	    {
    	        	    	state=2;
    	        	    	    	        	       		
    	        	    	//initalize display to white backgound (255)
    	        	    	for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	        	    	displayData[i*displayStep+j*displayChannels+k]=255;
    	        	    	//calculate angle of rotation using right triangle trig on the upper left and right points
    	    	        	angleBeta=asin((upperlefty-upperrighty)/distance(upperrightx,upperleftx,upperrighty,upperlefty));//beta in radians, measures how much camera is rotated compared to display
    	    	        	//using asin, we assume the camera is not rotated more than ninety degrees, so there is no need to check the quadrant
    	    	        	//calculate horizontal and vertical scaling ratios
    	    	        	horRatio=(displayWidth-20)/(distance(upperrightx,upperleftx,upperrighty,upperlefty));//Find the number of display pixels per camera pixel in active region
    	    	        	vertRatio=(displayHeight-20)/(distance(upperleftx,lowerleftx,upperlefty,lowerlefty));
    	    	        	int betadegree=180/PI*angleBeta;
    	    	        	printf("angleBeta=%d",betadegree);
    	    	        	printf("horRatio=%d",(int)horRatio);
    	    	        	printf("vertRatio=%d",(int)vertRatio);
    	        	     }
    	        	    
    	        	}
    	        	
    	    	}
    }
    else if (state==2)//main program state
    {
    	//display color selection boxes, boxed in black if they are selected, i.e. color=255;
    	    if (blue==255)
    	    {
    	    	cvRectangle(display, cvPoint(10,10), cvPoint(80,80), cvScalar(0,0,0), -1);
    	   	}
    	    else
    	    {
    	    	cvRectangle(display, cvPoint(10,10), cvPoint(80,80), cvScalar(255,255,255), -1);
    	    }
    	    if(green==255)
    	   	{
    	   		cvRectangle(display, cvPoint(10,90), cvPoint(80,160), cvScalar(0,0,0), -1);
    	   	}
    	   	else
    	   	{
    	   		cvRectangle(display, cvPoint(10,90), cvPoint(80,160), cvScalar(255,255,255), -1);
    	   	}
    	   	if(red==255)
    	   	{
        		cvRectangle(display, cvPoint(10,170), cvPoint(80,240), cvScalar(0,0,0), -1);
   	    	}
    	   	else
    	   	{
    	   		cvRectangle(display, cvPoint(10,170), cvPoint(80,240), cvScalar(255,2550,255), -1);
    	   	}
  	    	
    	   	//draw color boxes
   	    	cvRectangle(display, cvPoint(20,20), cvPoint(70,70), cvScalar(255,0,0), -1);
   	    	cvRectangle(display, cvPoint(20,100), cvPoint(70,150), cvScalar(0,255,0), -1);
    	   	cvRectangle(display, cvPoint(20,180), cvPoint(70,230), cvScalar(0,0,255), -1);
    	   	cvRectangle(display, cvPoint(20,260), cvPoint(70,310), cvScalar(0,0,0),1);
    	   	cvPutText (display,"Clear",cvPoint(25,290), &smallFont, cvScalar(0,0,0));
     
    	//Navigate through the blobs
    	for (i=0;i<blobs.GetNumBlobs();i++)
    	{
    		//get next blob
    		blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    		//find position of next blob in camera image
    		x=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    		//find y position in camera image
    		y=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    		
    		/*THIS SECTION ROTATES CAMERA IMAGE TO SYNC IT WITH THE DISPLAY
    		FIX FOR Version 3.0, move this section to a seperate function*/
    		
    		//Find angle of rotation (theta) and radius from upperleft corner
    		radius=distance(upperleftx,x,upperlefty,y);
    		angleTheta=asin((upperlefty-y)/(radius));
    		//NOW WE NEED TO CHECK FOR QUADRANTS, As theta may be greater than 90 degrees, depending on beta
    		if (x<upperleftx)
    		{
    			angleTheta=-(PI+angleTheta);
    		}
    		//Now, shift angle over by beta
    		int thetadegree=180/PI*angleTheta;
    		//printf("angleTheta=%d",thetadegree);
    		angleTheta=angleTheta-angleBeta;
    		//thetadegree=180/PI*angleTheta;
    		 //   	printf("angleTheta=%d",thetadegree);
    		//Check for Validity of angle, the corrected angle must be between 0 and -pi/2
    		if (angleTheta<0 && angleTheta>-PI/2)
    		{
    			//Now, recompute x and y coordinates as the angle is valid
    			x=radius*cos(angleTheta);//it will now be normalized with upper left corner as 0,0 point
    			y=-radius*sin(angleTheta);//it will now be normalized with upper left corner as 0,0 point
    		
    			//printf("x=%d", (int)x);
    			//printf("y=%d",(int)y);
    			
    			displayLocationX=x*horRatio;//scale up x and y to match display size
    			displayLocationY=y*horRatio;
    			
    			displayLocationX=displayLocationX+10;//shift over by 10 to allow a slight margin at the display edge
    			displayLocationY=displayLocationY+10;
    		
    			//printf("displayx=%d",(int)displayLocationX);
    			//printf("displayy=%d",(int)displayLocationY);
    			
    			//check 'buttons' to see if they've been hit
    			if (displayLocationX<70 && displayLocationX>20 && displayLocationY>20 && displayLocationY<70)//red
    		    {
    	   			blue=255;
  		  		}
   		  		else if (displayLocationX<70 && displayLocationX>20 && displayLocationY>100 && displayLocationY<150)//blue
   		   		{
   		   			green=255;
   		   		}
  		   		else if (displayLocationX<70 && displayLocationX>20 && displayLocationY>180 && displayLocationY<230)//green
  		   		{
  		   			red=255;
    			}
   		   		else if (displayLocationX<70 && displayLocationX>20 && displayLocationY>260 && displayLocationY<310)//clear
  		   		{
   	    			red=0;
  		   			blue=0;
   	    			green=0;
  		   		}	
   		   		else{
    			//Draw a circle at that x and y points
    			cvCircle(display, cvPoint((int)displayLocationX,(int)displayLocationY), 20, cvScalar(blue,green,red),-1);
   		   		}
    		}
    	}
    	//show the display image. This can be changed to view different stages of the output
    	cvShowImage( "FingerPaint", display );
    }
    else// debug state
    {
    	cvShowImage( "FingerPaint", rectifiedFrame );
    }
    	//wait for key input
    key=cvWaitKey(10);
    if (key=='b')//capture background image
    {
    	data      = (uchar *)frame->imageData;
    	backgroundData      = (uchar *)background->imageData;
   		for(i=0;i<height;i++) for(j=0;j<width;j++) for(k=0;k<3;k++)
   			backgroundData[i*step+j*channels+k]=data[i*step+j*channels+k];
    }
    else if (key=='c') //clear background
    {		
    		//initalize display to white backgound (255)
    	    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	      displayData[i*displayStep+j*displayChannels+k]=255;
    }
    else if (key=='d') //debug state
    {
       	state=-1*state;//inverting state will toggle into and out of debug mode
    }
    else if (key=='s') //start configuration
    {
           	configstate=1;//inverting state will toggle into and out of debug mode
    }
    //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version),
    //remove higher bits using AND operator
    
    
    if( (cvWaitKey(10) & 255) == 27 ) break;
    cvShowImage( "Camera", frame );
    
    if(frame)
    	delete frame;
  }

  // Release the capture device housekeeping
  cvReleaseCapture( &capture );
  cvDestroyWindow( "FingerPaint" );
  
  if(displayData)
	  delete displayData;
  if(capture)
	  delete capture;
  if(frame)
	  delete frame;
  
  return 0;
}