#include "osCamerahandler.h"

osCamerahandler::osCamerahandler(void)
{
}

osCamerahandler::~osCamerahandler(void)
{
}
void osCamerahandler::init(CvSize origsize)
{
	this->vidGrabber = vidGrabber;
	this->origsize = origsize;
	this->size = cvSize(320,240);
	
	gaussblur = 1;
	#ifdef USEWEBCAM
		this->vidGrabber.initGrabber( size.width,size.height );
	#endif
	size.width = vidGrabber.width;
	size.height = vidGrabber.height;
	
	grayImage.allocate( size.width,size.height );
    grayBg.allocate( size.width,size.height );
	grayDiff.allocate( size.width,size.height );
	colorImg.allocate( size.width,size.height );
	bufferImage.allocate( size.width,size.height );
	chessboardoffsetx = 50;
	chessboardoffsetx = 50;

	bLearnBakground = false;
	threshold = defaultThreshold = 31;
	isCalibrating = false;
	isManualCalibrating = false;
    calib.allocate( this->size, 7, 7 );   
    
    warpLivefeed = false;
	
	blobTTL = 5;
	blobTracker.setListener( this );
	blobTracker.ghost_frames = 30;
	
	isLearningBackground = false;
	//backgroundTexture.allocate(size.width,size.height,true);
	//backgroundTexture.allocate(size.width,size.height,GL_TEXTURE_2D);

	minBlobSize = 300;
	maxBlobSize = 15000;


	int offset = 5;
	screenpoints[0].set(offset,offset,0);
	screenpoints[1].set(size.width-offset,offset,0);
	screenpoints[2].set(size.width-offset,size.height-offset,0);
	screenpoints[3].set(offset,size.height-offset,0);
	

 	
}
void osCamerahandler::update()
{

	ofBackground(0,0,0);
	trackedBlobs.clear();
    vidGrabber.grabFrame();
	if( vidGrabber.isFrameNew() ) {
		
        colorImg.setFromPixels(vidGrabber.getPixels(), size.width,size.height );
		
		if(warpLivefeed && !isCalibrating) {

            /*undistortedImg = colorImg;
            undistortedImg.undistort( calib.distortionCoeffs[0], calib.distortionCoeffs[1],
                                      calib.distortionCoeffs[2], calib.distortionCoeffs[3],
                                      calib.camIntrinsics[0], calib.camIntrinsics[4],
                                      calib.camIntrinsics[2], calib.camIntrinsics[5] ); 

			

			grayImage = undistortedImg;*/
			grayImage = colorImg;
			
			bufferImage.warpIntoMe(grayImage,worldpoints,screenpoints);
			grayImage = bufferImage;
			
			
        }
		else
		{
			grayImage = colorImg;
		}
		
		
		
		
		if (bLearnBakground == true){
			grayBg = grayImage;		// the = sign copys the pixels from grayImage into grayBg (operator overloading)
			isLearningBackground = true;
			bLearnBakground = false;
		}
	

		// take the abs value of the difference between background and incoming and then threshold:
		grayDiff.absDiff(grayBg,grayImage);
		grayImage.blurGaussian(gaussblur); //TODO: before or after absDiff?
		grayDiff.threshold(threshold);
		


		

		// find contours which are between the size of 20 pixels and 1/3 the w*h pixels.
		// also, find holes is set to true so we will get interior contours as well....
		contourFinder.findContours(grayDiff, minBlobSize, maxBlobSize, 10, false);	// find 
		if(isLearningBackground && contourFinder.blobs.size()>0)
			bLearnBakground = true;
		else
			isLearningBackground = false;
		
		
		
		
		//Callibrating
		if(isCalibrating)
		{
			
			cout << "calibrating";
			if(!isManualCalibrating)
			{
				calibPoints.clear();
				
				grayDiff.absDiff(grayBg,grayImage);
				grayDiff.threshold(10);
				contourFinder.findContours(grayDiff, 500, 10000, 10, false);	// find 
				for(int i = 0; i<blobTracker.blobs.size();i++)
				{
					if(blobTracker.blobs[i].firstseen < time(NULL)-2)
					{
						calibPoints.push_back(blobTracker.blobs[i].center);
					}
				}
				
				

			}
			if(calibPoints.size()==4)
			{
				
				isCalibrating = false;
				isManualCalibrating = false;
				bLearnBakground = true;
				isLearningBackground = true;
				findwarppoints(calibPoints);
				warpLivefeed = false;
				coordwarper.calculateMatrix(worldpoints,screenpoints);
				
			}
			
		
		}

		blobTracker.trackBlobs(contourFinder.blobs);

		
		for(int i = 0; i<blobTracker.blobs.size();i++)
		{
			if(blobTracker.blobs[i].firstseen < time(NULL)-blobTTL)
			{
		
				ofCvTrackedBlob n = ofCvTrackedBlob(ofCvBlob());
				n.area = blobTracker.blobs[i].area;
				n.box = blobTracker.blobs[i].box;
				float y = n.box.y+blobTracker.blobs[i].box.height*0.7f; //TODO: revert y?
				float x = blobTracker.blobs[i].center.x;
				n.center = coordwarper.transform(x,y);
				
				//n.center = ofPoint(x,y); 
				n.id = blobTracker.blobs[i].id;

				//fix to screen resolution
				n.center.x *= (float)origsize.width/(float)size.width;
				n.center.y *= (float)origsize.height/(float)size.height;
				trackedBlobs.push_back(n);
			}
		}
		


		
    }


}
void osCamerahandler::findwarppoints(vector<ofPoint> points){
			ofPoint p1;
			ofPoint p2;
			ofPoint p3;
			ofPoint p4;
			p1 = points[0];
			p2 = points[0];
			p3 = points[0];
			p4 = points[0];
			

			
			for(int i = 0; i<points.size();i++)
			{
				if(points[i].y<=p1.y)
					p1 = points[i];
			}
			for(int i = 0; i<points.size();i++)
			{
				if(points[i].y<=p2.y && points[i].x != p1.x)
					p2 = points[i];
			}
			for(int i = 0; i<points.size();i++)
			{
				if(points[i].y != p2.y && points[i].y != p1.y)
					p3 = points[i];
			}
			for(int i = 0; i<points.size();i++)
			{
				if(points[i].y != p2.y && points[i].y != p1.y && points[i].x != p3.x)
					p4 = points[i];
			}
				
				
			
			
			
			ofPoint (&a)[4] = worldpoints;
			if(p1.x<p2.x)
			{
				a[0].set(p1.x,p1.y,0);
				a[1].set(p2.x,p2.y,0);
			}
			else
			{
				a[0].set(p2.x,p2.y,0);
				a[1].set(p1.x,p1.y,0);
			}
			if(p3.x>p4.x)
			{
				a[2].set(p3.x,p3.y,0);
				a[3].set(p4.x,p4.y,0);
			}
			else
			{
				a[3].set(p3.x,p3.y,0);
				a[2].set(p4.x,p4.y,0);
			}
			

			
}
void osCamerahandler::callibrate()
{
isLearningBackground = true;
bLearnBakground = true;
isCalibrating = true;

}
void osCamerahandler::startManualCallibrate()
{
isLearningBackground = true;
bLearnBakground = true;
isCalibrating = true;
isManualCalibrating = true;
calibPoints.clear();
}
void osCamerahandler::draw()
{
	ofSetColor(0x000000);
	ofFill();
	ofRect(0,0,size.width,size.height);

if(isCalibrating && !isLearningBackground)
	{
		
			//drawCalibPoints();
			//drawCheckerboard();
			
	
	
	}

}

void osCamerahandler::drawCheckerboard()
{
	ofSetColor(0xffffff);
	ofFill();
	ofRect(0,0,size.width,size.height);
	int size = min(this->size.width,this->size.height-120);
  for(int i = 0; i<8;i++)
  {
	 for(int j = 0; j<8;j++)
	  {
		  if(((i*9)+j) %2)
			ofSetColor( 0x000000);
		  else
			ofSetColor( 0xffffff);
		  ofFill();
		  ofRect(50+(size/8)*i,50+(size/8)*j,(size/8),(size/8));
	  }
  }

  /*
	if( calib.addImage( colorImg.getCvImage() ))
	{
	  calib.calibrate();
	  calib.undistort();
	}*/
  
}



void osCamerahandler::blobOn( int x, int y, int id, int order ) {
   // cout << "blobOn() - id:" << id << " order:" << order << endl;
}    
void osCamerahandler::blobMoved( int x, int y, int id, int order) {
    //cout << "blobMoved() - id:" << id << " order:" << order << endl;
    
    // full access to blob object ( get a reference)
   // ofCvTrackedBlob blob = blobTracker.getById( id );
   // cout << "area: " << blob.area << endl;

}
void osCamerahandler::blobOff( int x, int y, int id, int order ) {
   // cout << "blobOff() - id:" << id << " order:" << order << endl;
}