/*
Finger Paint Program written for the IEEE student Chapter of UIUC.
We are using this program to demo our multi-touch screen using Total Internal Reflection
This test program takes video input of an FTIR touchscreen, processes it, and creates
green 'paint' wherever fingers are located. 
Erik Johnson, 2/20/2008

Use: Run OpenCVTest.exe
Watch output from input video file
Press 'b' to capture the background and get a clearer image
press 'c' to clear the display

VERSION 1.0
*/

//include files
#include "cv.h"
#include "highgui.h"
#include <stdio.h>
#include "BlobResult.h"


//Global Variable declarations

CBlobResult blobs; //Results of blob detection from BlobResult.cpp
IplImage* display; //Display buffer of 800 by 600 pixels

int displayHeight=600; //Display characteristics- Modify to change display height
int displayWidth=800; //Display chracterirstics- Modify to change dispaly width

//No need to define cicle image, we are using cvDrawCircle
//IplImage* circle;
//int circleSize=30;

//threshold values
int upperthreshold=240; //upper limit of brightness to be filtered out- max=255
int lowerthreshold=70;  //lower limit of brightness to be filtered out- min=0

IplImage *background; //background image that will be used for background subtraction

//Rolling average background is not being used- just static background subtraction
/*
//size of background buffer
int backgroundSize=50;
IplImage *background [50];
//average frame
IplImage *avgFrame;
int lastFrameIndex;
*/
/*int state=0;
*/
//This function is run during the initialization of the camera, to set up screen boundaries
//It displays four dots, on at the top, bottom, left and right of the screen
//the user presses the screen at the projected locations, and this allows mapping from the
//camera image to the output screen
/*void configure()
{}*/

//The Backround variable stores the last 100 frames of the image
//each time a new frame comes in, it replaces the oldest frame
//the new background is averaged over the last 100 frames to get an average background
//the average background is then subracted from the current frame and the new frame is returned
/*
IplImage* backgroundSubtraction (IplImage* currentFrame)
{
	IplImage *subtractedFrame;
	IplImage *currentBackground;
	int currentHeight, currentWidth, currentStep, currentChannels;
	uchar *currentData;
	uchar *subtractedData;
	uchar *avgData;
	uchar *lastFrameData;
	uchar *currentBackgroundData;
	double subtractionData1, subtractionData2;
			
	int i,j,k;
	
	//current frame data
	currentHeight    = currentFrame->height;
	currentWidth     = currentFrame->width;
	currentStep      = currentFrame->widthStep;
	currentChannels  = currentFrame->nChannels;
	currentData      = (uchar *)currentFrame->imageData;
	//subtracted frame data
	subtractedFrame= cvCreateImage(cvSize(currentWidth,currentHeight),IPL_DEPTH_8U,3);
	subtractedData      = (uchar *)subtractedFrame->imageData;
	//average frame data
	avgData      = (uchar *)avgFrame->imageData;
	lastFrameData = (uchar *)background[lastFrameIndex]->imageData;
	
	//subtract background
	for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	{
		//if (currentData[i*currentStep+j*currentChannels+k]-avgData[i*currentStep+j*currentChannels+k]>0)
			if (currentData[i*currentStep+j*currentChannels+k]-50>=0)
		    //subtractedData[i*currentStep+j*currentChannels+k]=currentData[i*currentStep+j*currentChannels+k]-avgData[i*currentStep+j*currentChannels+k];
				subtractedData[i*currentStep+j*currentChannels+k]=currentData[i*currentStep+j*currentChannels+k]-50;
		else
			subtractedData[i*currentStep+j*currentChannels+k]=255;
	}
	
	background[lastFrameIndex]=currentFrame;
	if (lastFrameIndex<backgroundSize)
	{
		lastFrameIndex++;
	
	}
	else
	{
		lastFrameIndex=0;
	}
	for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	{
		
		subtractionData1=lastFrameData[i*currentStep+j*currentChannels+k]/(double)backgroundSize;
		subtractionData2=avgData[i*currentStep+j*currentChannels+k];
		avgData[i*currentStep+j*currentChannels+k]=subtractionData2-subtractionData1;//NOTE:a lot of resolution is probably lost here by dividing by 100- use floats?
		
		subtractionData1=currentData[i*currentStep+j*currentChannels+k]/(double)backgroundSize;
		subtractionData2=avgData[i*currentStep+j*currentChannels+k];
		avgData[i*currentStep+j*currentChannels+k]=subtractionData2+subtractionData1;//NOTE:a lot of resolution is probably lost here by dividing by 100- use floats?
	
		
		int sum=0;
		int value=0;
		for (int b=0; b<backgroundSize; b++)
		{
			currentBackgroundData      = (uchar *)background[b]->imageData;
			sum=sum+currentBackgroundData[i*currentStep+j*currentChannels+k];
		}
		value=sum/backgroundSize;
		avgData[i*currentStep+j*currentChannels+k]=value;
	}

	return subtractedFrame;
}

void backgroundInitialization(IplImage *blankFrame)
{
	int blankHeight, blankWidth, blankStep, blankChannels;
		uchar *blankData;
		uchar *avgData;
		
		int i,j,k;
		
		//get circle buffer data
		blankHeight    = blankFrame->height;
			     blankWidth     = blankFrame->width;
			     blankStep      = blankFrame->widthStep;
			     blankChannels  = blankFrame->nChannels;
			     blankData      = (uchar *)blankFrame->imageData;
		for(i=0;i<blankHeight;i++) for(j=0;j<blankWidth;j++) for(k=0;k<blankChannels;k++)
	      blankData[i*blankStep+j*blankChannels+k]=0;
		
		for(i=0; i<backgroundSize; i++)
		{
			background[i]=blankFrame;
		}	
		lastFrameIndex=0;
		avgFrame= cvCreateImage(cvSize(blankWidth,blankHeight),IPL_DEPTH_8U,3);
		avgData      = (uchar *)avgFrame->imageData;
		for(i=0;i<blankHeight;i++) for(j=0;j<blankWidth;j++) for(k=0;k<blankChannels;k++)
			      avgData[i*blankStep+j*blankChannels+k]=0;
}
*/


//This function recieves the current frame and applies a threshold filter
//values below a certain intensity value are set to zero, values above are set to one (255)
//There is also an upper threshold to filter out bright lights or sunlight
IplImage* thresholdFilter (IplImage* currentFrame)
{
	int i,j,k;
	int currentHeight, currentWidth, currentStep, currentChannels;
	uchar *currentData;
			
	currentHeight    = currentFrame->height;
	currentWidth     = currentFrame->width;
	currentStep      = currentFrame->widthStep;
	currentChannels  = currentFrame->nChannels;
	currentData      = (uchar *)currentFrame->imageData;
			     
	for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	{
		//When the pixel is between the two threshold values, set it to white
		if (currentData[i*currentStep+j*currentChannels+k]<upperthreshold && currentData[i*currentStep+j*currentChannels+k]>lowerthreshold)
			currentData[i*currentStep+j*currentChannels+k]=255;
		else //when the pixel is not between the two values, set it to black
			currentData[i*currentStep+j*currentChannels+k]=0;
	}
	return currentFrame;
}

//This function manages background subtraction. The background image is subtracted
//pixel by pixel from the current image. The subtracted image is then return
IplImage* backgroundSubtraction (IplImage* currentFrame)
{
	int i,j,k;
	int currentHeight, currentWidth, currentStep, currentChannels;
		uchar *currentData;
		uchar *backgroundData;
	currentHeight    = currentFrame->height;
		     currentWidth     = currentFrame->width;
		     currentStep      = currentFrame->widthStep;
		     currentChannels  = currentFrame->nChannels;
		     currentData      = (uchar *)currentFrame->imageData;
	backgroundData = (uchar *)background->imageData;
	
	IplImage *subtractedFrame;
	subtractedFrame= cvCreateImage(cvSize(currentWidth,currentHeight),IPL_DEPTH_8U,3);
	uchar *subtractedData= (uchar *)subtractedFrame->imageData;
	
	 for(i=0;i<currentHeight;i++) for(j=0;j<currentWidth;j++) for(k=0;k<currentChannels;k++)
	 {
		 //If the values of the current pixel minus the background pixel is greater than 0, set the subtracted image to that pixel
		 if (currentData[i*currentStep+j*currentChannels+k]-backgroundData[i*currentStep+j*currentChannels+k]>=0)
			 subtractedData[i*currentStep+j*currentChannels+k]=currentData[i*currentStep+j*currentChannels+k]-backgroundData[i*currentStep+j*currentChannels+k];
		 //Otherwise, set it to zero
		 else
			 subtractedData[i*currentStep+j*currentChannels+k]=0;
	 }
	 return subtractedFrame;
}


//using cvDrawCircle, these functions are not needed
/*
//Initializes Circle buffer so we don't waste time drawing circles all the time
void initializeCircle()
{
	int circleHeight, circleWidth, circleStep, circleChannels;
	uchar *circleData;
	
	int i,j,k;
	
	//get circle buffer data
	circleHeight    = circle->height;
		     circleWidth     = circle->width;
		     circleStep      = circle->widthStep;
		     circleChannels  = circle->nChannels;
		     circleData      = (uchar *)circle->imageData;
	int circleRadius=circleSize/2;
    for(i=0;i<circleHeight;i++) for(j=0;j<circleWidth;j++) for(k=0;k<circleChannels;k++)
    {
      if (((i-circleRadius)*(i-circleRadius)+(j-circleRadius)*(j-circleRadius))<circleRadius*circleRadius)
      {  
    	  circleData[i*circleStep+j*circleChannels+k]=1;
      }
      else
      {
    	  circleData[i*circleStep+j*circleChannels+k]=0;
      }
    }  
}

//draws a circle with the desired color at the desired xpos and ypos
//the desired pos is at the upper left corner of the circle to be drawn
//color is added to the display buffer, not overwritten
void drawCircle (int red, int blue, int green, int xpos, int ypos)
{
	//circle data values
	int circleHeight, circleWidth, circleStep, circleChannels;
	uchar *circleData;
	
	//display data values
		int displayHeight, displayWidth, displayStep, displayChannels;
		uchar *displayData;
	
	//navigation indexes
	int i,j;
	int displayIndex, circleIndex;
	
	//get circle buffer data
	circleHeight    = circle->height;
		     circleWidth     = circle->width;
		     circleStep      = circle->widthStep;
		     circleChannels  = circle->nChannels;
		     circleData      = (uchar *)circle->imageData;
	
	displayHeight    = display->height;
	     displayWidth     = display->width;
	     displayStep      = display->widthStep;
	     displayChannels  = display->nChannels;
	     displayData      = (uchar *)display->imageData;
	//fill channels of display with circle value times the red, blue and green multipliers
	for(i=0;i<circleHeight;i++) for(j=0;j<circleWidth;j++)
	{
	          displayIndex=(i+ypos)*displayStep+(j+xpos)*displayChannels;
	          circleIndex=i*circleStep+j*circleChannels;
	          if(circleData[circleIndex]==1)
	          {	  
	        	  	displayData[displayIndex]=blue;
	        	  	displayData[displayIndex+1]=green;
	        	  	displayData[displayIndex+2]=red;
	          }
	}
}
*/


// MAIN
int main() {

	//keyboard stroke
	int key;
	
	//variables for navigating through images
	 int height,width,step,channels;
		  uchar *data;
		  
	//variables for navigating through the display image
	int displayStep,displayChannels;
		  uchar *displayData;		  

		  //loop variables
		  int i,j,k;
	
	//variables for tracking blob centers
	double x, y;
		  

	//framebuffer for backgroundsubtraction
	//rectified frame for threshold filter
	IplImage *subtractedFrame;
	IplImage *rectifiedFrame;
		  
	//
	
	//Display output as 800x600 8 bit three channel image	  
	display= cvCreateImage(cvSize(displayWidth,displayHeight),IPL_DEPTH_8U,3);
	//gather information about display image
	     displayStep      = display->widthStep;
	     displayChannels  = display->nChannels;
	     displayData      = (uchar *)display->imageData;
	
	//initalize display to white backgound (255)
    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
      displayData[i*displayStep+j*displayChannels+k]=255;
   
    
	//CHANGE THIS FOR USING THE CAMERA, THIS USES TEST VIDEO
    CvCapture* capture = cvCaptureFromAVI( "simple-2point.avi" );//REPLACE THIS VIDEO NAME WITH VIDEO FILE YOU WANT TO USE
      if( !capture ) {
        fprintf( stderr, "ERROR: capture is NULL \n" );
        getchar();
        return -1;
      }
//initialize background
      //Grab one test frame
      // Get one frame
      IplImage* frame = cvQueryFrame( capture );
      if( !frame ) {
        fprintf( stderr, "ERROR: frame is null...\n" );
        getchar();
      }
      //get data about the test frame
      height    = frame->height;
       width     = frame->width;
       step      = frame->widthStep;
       channels  = frame->nChannels;
       data      = (uchar *)frame->imageData;
      
      //create a blank background for intitialization 
      background= cvCreateImage(cvSize(width,height),IPL_DEPTH_8U,channels);
      uchar *backgroundData=(uchar *)background->imageData;
      for(i=0;i<height;i++) for(j=0;j<width;j++) for(k=0;k<3;k++)
            backgroundData[i*step+j*channels+k]=0;      
 
      
  // Create a window in which the captured images will be presented
  cvNamedWindow( "FingerPaint", CV_WINDOW_AUTOSIZE );

  // Show the image captured from the camera in the window and repeat
  while( 1 ) {
    // Get one frame
    IplImage* frame = cvQueryFrame( capture );
    if( !frame ) {
      fprintf( stderr, "ERROR: frame is null...\n" );
      getchar();
      break;
    }
    //subtract background
    subtractedFrame=backgroundSubtraction(frame);
    //create rectified image to recieve filter data. Note that it is grayscale (1 channel) this is necessary for blobdetect
    rectifiedFrame= cvCreateImage(cvSize(width,height),IPL_DEPTH_8U,1);
    
    //convert subtractedframe to grayscale
    cvCvtColor(subtractedFrame, rectifiedFrame, CV_BGR2GRAY);
    
    //filter rectified image
    rectifiedFrame=thresholdFilter(rectifiedFrame);
    // Do not release the frame!
    
    //find blobs in rectified frame
    blobs = CBlobResult( rectifiedFrame, NULL, 250, true );
    
    //only include blobs with area less than 100
    blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_LESS, 100 );
    //include any blobs with area greater than 1
    blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_GREATER, 1 );
    
    //blobs.PrintBlobs("filteredblobs.txt");

    CBlob currentBlob;
    
    // from the filtered blobs, get the blob with biggest perimeter
    //blobs.GetNthBlob( CBlobGetPerimeter(), 0, blobWithBiggestPerimeter );
     
    //Navigate through the blobs
    for (i=0;i<blobs.GetNumBlobs();i++)
    {
    	//get next blob
    	blobs.GetNthBlob( CBlobGetArea(), i, currentBlob );
    	//find position of next blob
    	x=currentBlob.MinX()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	//find y position
    	y=currentBlob.MinY()+(currentBlob.MaxY()-currentBlob.MinY())/2;
    	//Draw a circle at that x and y point
    	cvCircle(display, cvPoint(x,y), 10, cvScalar(0,255,0),-1);
    }
    //show the display image. This can be changed to view different stages of the output
    cvShowImage( "FingerPaint", display );
    
    //wait for key input
    key=cvWaitKey(10);
    if (key=='b')//capture background image
    {
    	 data      = (uchar *)frame->imageData;
    	backgroundData      = (uchar *)background->imageData;
        for(i=0;i<height;i++) for(j=0;j<width;j++) for(k=0;k<3;k++)
              backgroundData[i*step+j*channels+k]=data[i*step+j*channels+k];
    }
    else if (key=='c') //clear background
    {
    		     displayStep      = display->widthStep;
    		     displayChannels  = display->nChannels;
    		     displayData      = (uchar *)display->imageData;
    		
    		//initalize display to white backgound (255)
    	    for(i=0;i<displayHeight;i++) for(j=0;j<displayWidth;j++) for(k=0;k<displayChannels;k++)
    	      displayData[i*displayStep+j*displayChannels+k]=255;
    }
    if(frame)
    	delete frame;
    
    //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version),
    //remove higher bits using AND operator
    if( (cvWaitKey(10) & 255) == 27 ) break;
  }

  // Release the capture device housekeeping
  cvReleaseCapture( &capture );
  cvDestroyWindow( "FingerPaint" );
  return 0;
}
