// DissertationProject.cpp : Defines the entry point for the console application.
//

#include "stdafx.h"
#include <cv.h>
#include <cxcore.h>
#include <highgui.h>
#include <stdio.h>
#include <math.h>
#include <fstream>

#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/photo/photo.hpp"

#include "RGBColour.h"
#include "ImageStitcher.h"
#include "BackgroundSegmentation.h"
#include "OpticalFlow.h"
#include "ResourceLibrary.h"

class ImageStitcher;
class ImageMethods;
class BgSegmenter;
class OpticalFlow;
class ResourceLibrary;

using namespace cv;
using namespace std;

typedef   IplImage * ImgPtr;
typedef unsigned char     uint8_t;

ImgPtr clean;   //clean back plate
ImgPtr front;   //front plate
ImgPtr diffImage;     //output image containing misclassified pixels
ImgPtr patchImage;     //output image containing misclassified pixels
Mat averageBackgroundImage;     //output image containing misclassified pixels

double mu[3];         //the Gaussian means mu
double sigma[3];      //the Gaussian standard deviations sigma
double gaussians[3];


//returns value between 0 and 255 of pixel at image position (x,y)
unsigned char _getPixel(IplImage* image,    //the image
                        int x,              //abscissa
                        int y               //ordinate
                        )
{
  return ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels];
}

//gets pixel at image position (x,y)
RGBColour _getColourPixel(IplImage* image, int x, int y)
{
   RGBColour rgb;
   rgb.b = ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels + 0];
   rgb.g = ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels + 1];
   rgb.r = ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels + 2];

   return rgb;
}

//sets pixel at image position (x,y)
void _setPixel(IplImage* image,     //the image
               int x,               //abscissa
               int y,               //ordinate
               unsigned char value  //new pixel value
               )
{
  ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels]=value;
}

//sets pixel at image position (x,y)
void _setColourPixel(IplImage* image, int x, int y, RGBColour& rgb)
{
   ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels + 0] = rgb.b;
   ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels + 1] = rgb.g;
   ((unsigned char*)(image->imageData + image->widthStep*y))[x*image->nChannels + 2] = rgb.r;
}

//compare image to ground truth
double _compare(ImgPtr image1, ImgPtr image2, ImgPtr blur1, ImgPtr blur2)
{
	double imageDistance = 0;
	double incorrect = 0;

	diffImage = cvCreateImage(cvSize(image1->width,image1->height),IPL_DEPTH_8U,1); //create image
	patchImage = cvCreateImage(cvSize(image1->width,image1->height),IPL_DEPTH_8U,3); //create image

	for(int x = 0; x < image1->width; x++)
	{
		for(int y = 0; y < image1->height; y++)
		{
			RGBColour px1 = _getColourPixel(image1,x,y);
			RGBColour px2 = _getColourPixel(image2,x,y);
			RGBColour b1 = _getColourPixel(blur1,x,y);
			RGBColour b2 = _getColourPixel(blur2,x,y);

			if (b1.r > (b2.r+(255/15)) || b1.r < (b2.r-(255/15)) || b1.g > (b2.g+(255/15)) || b1.g < (b2.g-(255/15)) || b1.b > (b2.b+(255/15)) || b1.b < (b2.b-(255/15)))
			{
				incorrect++;
				
				//draw white pixel
				RGBColour whitePix = RGBColour(255,255,255);
				_setPixel(diffImage,x,y,255);
				_setColourPixel(patchImage,x,y,px1);
			}
			else
			{
				//draw black pixel
				RGBColour blackPix = RGBColour(0,0,0);
				_setPixel(diffImage,x,y,0);
				
				_setColourPixel(patchImage,x,y,px2);
			}

			RGBColour* h1 = &RGBColour(0.0,0.0,0.0);
			RGBColour* h2 = &RGBColour(0.0,0.0,0.0);
			//RGBColour h1 = ->rgb2hsv(px1);
			//printf("px1 rgb: %f %f %f \n ", px1.r, px1.g, px1.b);
			//printf("px2 rgb: %f %f %f \n ", px2.r, px2.g, px2.b);

			/*
			if (h1->rgb2hsv(px1).r != h2->rgb2hsv(px2).r)
			{
				incorrect++;
				
				//draw black pixel
				RGBColour blackPix = RGBColour(0,0,0);
				_setPixel(errorImage,x,y,blackPix);
			}
			if (h1->rgb2hsv(px1).r == h2->rgb2hsv(px2).r)
			{
				//draw white pixel
				RGBColour whitePix = RGBColour(255,255,255);
				_setPixel(errorImage,x,y,whitePix);
			}*/
		}
	}

	cvNamedWindow("Error Image", CV_WINDOW_AUTOSIZE);
	cvShowImage("Error Image", diffImage);
	cvNamedWindow("Patched Image", CV_WINDOW_AUTOSIZE);
	cvShowImage("Patched Image", patchImage);

	printf("Inorrect: %f \n",incorrect);
	printf("Total: %d \n",image1->width*image1->height);

	imageDistance = (incorrect/(image1->width*image1->height))*100;

	return imageDistance;
}

void createBackPlate(vector<Mat> frames)
{
	Mat curr = frames[0];
	vector<int> hist;
	Mat b;
	Mat g;
	Mat r;
	int pix;
	RGBColour col;
	/*for(int i = 0; i < frames.size(); i++)
	{
		for(int x = 0; x < curr.rows; x++)
		{
			for(int y = 0; y < curr.cols; y++)
			{
				//col = _getColourPixel(curr, x, y);
			}
		}
	}*/

	Mat output1, output2;
	double alpha;
	accumulateWeighted(frames[0], output1, alpha);

	cvNamedWindow("AverageBG", CV_WINDOW_AUTOSIZE);
	//cvShowImage( "AverageBG", output1 );
}

void processVideo()
{
	IplImage* frameArr[420];
	//CvCapture* capture = cvCaptureFromFile("images/Ball.MOV");
	VideoCapture vidCap;
	vidCap.open("images/Ball.MOV");
	//int noframes = (int)capture.get(CV_CAP_PROP_FRAME_COUNT);
	int noframes = (int)vidCap.get(CV_CAP_PROP_FRAME_COUNT);
	printf("/n %i /n", noframes);
	Mat frame;
	vector<Mat> vidFrames; 
    //IplImage* frame = NULL;
    for (int f = 1; f<100;f++)
    {
        vidCap >> frame;
		//vidFrames[f] = frame.clone();
		vidFrames.push_back(frame.clone());
        waitKey(10);
    }

	//createBackPlate(vidFrames);

	cvNamedWindow("Frame1", CV_WINDOW_AUTOSIZE);
	imshow("Frame1", vidFrames[75]);

	IplImage* img1 = new IplImage(vidFrames[0]);
	IplImage* img2 = new IplImage(vidFrames[75]);

	printf("segmentation difference: %f percent",_compare(img1, img2, img1, img2));

	cvWaitKey();
    //cvDestroyWindow("frame");
    //cvReleaseImage(&frame);

}

void runningAverage(Mat f)
{
	double alpha = 0.1;
	Vec3b average;
	Vec3b newFrame;

	for (int r=0;r<f.rows;r++)
	{
		for (int c=0;c<f.cols;c++)
		{
			average = averageBackgroundImage.at<cv::Vec3b>(r,c);
			newFrame = f.at<cv::Vec3b>(r,c);
			averageBackgroundImage.at<cv::Vec3b>(r,c) = ((1-alpha)*average)+(alpha*newFrame);
		}
	}
	//printf("%f \n",averageBackgroundImage.at<cv::Vec3b>(100,100)[0]);
}

void averageBackground()
{
	 VideoCapture capture;

    // capture.open(argv[1]);  // Open file

    capture.open("images/Ball.MOV");        // Open camera device

    if (!capture.isOpened())
    {
        cout << "Cannot open video device or file!" << endl;
    }

    Mat frame;
	Mat startFrame;
	Mat* tempBackground;
    namedWindow("video", CV_WINDOW_AUTOSIZE);
	namedWindow("BG", CV_WINDOW_AUTOSIZE);
	double alpha = 0.05;
	//Initialising averageBackgroundImage
	//capture >> averageBackgroundImage;
	//int count = 0;
	CvCapture* firstFrame = cvCaptureFromFile("images/Ball.MOV");
	startFrame = cvQueryFrame(firstFrame);
	averageBackgroundImage = startFrame;
    while(true)
    {
        capture >> frame;
		//if(count==0)
		//{
			//printf("THIS == 0 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
			//averageBackgroundImage = frame;
			
		//}
        /*if (frame.empty())
            break; */
        //imshow("video", frame);
		imshow("BG", averageBackgroundImage);
        if (waitKey(30) == 'q')
            break;
		//count++;
		runningAverage(frame);
    }
	//imshow("BG", averageBackgroundImage);
}


void saveImages(IplImage* diff, IplImage* patch)
{
	cvSaveImage("diff.png", diff);
	cvSaveImage("patch.png", patch);
}


int _tmain(int argc, _TCHAR* argv[])
{
	//load images
	clean   = cvLoadImage("images/table1.png");   //class 0
	front   = cvLoadImage("images/table2.png");   //class 1

	Mat image1;
    image1 = imread("images/table2.png", CV_LOAD_IMAGE_COLOR);   // Read the file
	Mat test(clean->width,clean->height,1);
	Mat diff(diffImage);
	Mat out;
	resize(image1, test, cvSize(clean->width,clean->height));

	

	IplImage* blurClean = cvCreateImage(cvSize(clean->width,clean->height),IPL_DEPTH_8U,3);
	cvSmooth(clean,blurClean,CV_GAUSSIAN,3,3);

	IplImage* blurFront = cvCreateImage(cvSize(front->width,front->height),IPL_DEPTH_8U,3);
	cvSmooth(front,blurFront,CV_GAUSSIAN,3,3);

	//cvNamedWindow("Blur Clean", CV_WINDOW_AUTOSIZE);
	//cvShowImage("Blur Clean", blurClean);
	//cvNamedWindow("Blur Front", CV_WINDOW_AUTOSIZE);
	//cvShowImage("Blur Front", blurFront);


	//cvNamedWindow("Clean Plate", CV_WINDOW_AUTOSIZE);
	//cvShowImage("Clean Plate", clean);
	//cvNamedWindow("Front Plate", CV_WINDOW_AUTOSIZE);
	//cvShowImage("Front Plate", front);

	//printf("segmentation difference: %f percent",_compare(clean,front));
	//printf("segmentation difference: %f percent",_compare(clean, front, blurClean,blurFront));

	//IplImage* inpaintImage = cvCreateImage(cvSize(clean->width,clean->height),IPL_DEPTH_8U,3);
	//Mat img(Size(1920, 1080), CV_8UC3); /* EXAMPLE OF IMAGE SIZE USING MAT */


	//cvInpaint(front, diffImage, inpaintImage, 5,INPAINT_NS);
	//inpaint(cv::noArray(), cv::noArray(), out, 9,INPAINT_TELEA);
	//Mat _tmp1;
	//cv::inpaint(image1,diff,_tmp1,5.0,INPAINT_TELEA);

	//Mat img = imread("image.jpg");
	//namedWindow("image", CV_WINDOW_AUTOSIZE);
	//imshow("image", img);
	
	//cvNamedWindow("Inpainted", CV_WINDOW_AUTOSIZE);
	//cvShowImage( "Inpainted", inpaintImage );
	
	//saveImages(diffImage, patchImage);
	//Mat img1;
	//Mat img2;
	//img1 = imread("images/lynx.png", CV_LOAD_IMAGE_COLOR);
    //img2 = imread("images/lynx_table.png", CV_LOAD_IMAGE_COLOR);

	BgSegmenter* bgseg = new BgSegmenter();
	bgseg->showImages();
	bgseg->segmentBackground(2,"images/obj_cam_move.MOV",0);
	//ImageStitcher* stitcher = new ImageStitcher();
	//stitcher->stitchImages(img1,img2);
	 
    cvWaitKey();
    cvDestroyWindow("Clean Plate");
    cvReleaseImage(&clean);
	cvDestroyWindow("Blur Clean");
    cvReleaseImage(&blurClean);
	cvDestroyWindow("Blur Front");
    cvReleaseImage(&blurFront);
	cvDestroyWindow("Front Plate");
    cvReleaseImage(&front);

	//processVideo();
	//averageBackground();

    return 0;
}

