#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/nonfree/features2d.hpp"
#include "opencv2/core/core.hpp"

#include "../../Common/BEViewer.h"
#include "../../Common/Geometry/Line2D.h"
#include "../../Common/RoadLane.h"
#include "../../Common/Geometry/BSpline.h"
#include "../../Common/Utility/Measure.h"
#include "../../Common/HsvImage.h"
#include "../../Common/defines.h"

#include <iostream>

#pragma comment(lib, "opencv_core242d")
#pragma comment(lib, "opencv_imgproc242d")
#pragma comment(lib, "opencv_highgui242d")
#pragma comment(lib, "opencv_calib3d242d")
#pragma comment(lib, "opencv_features2d242d")
#pragma comment(lib, "opencv_nonfree242d")

using namespace cv;
using namespace std;

void plotParams(vector<Line2D> v, Scalar color, Mat & acc);
void padTo(std::string &str, const size_t num, const char paddingChar = ' ');
void correctHomography(int key, vector<Point2f> & points);
void UpdateKalman(RoadLane & left, RoadLane & right, Mat & img);
void supressOuterRegion(Mat & src, Line2D & r, Point2d & p);

Rect sampleArea(0,0,100, 100);

clock_t startplot = 0;
Mat erodeElement = getStructuringElement(MORPH_ERODE,  Size(3, 3), Point(1, 1));

Rect ROIRect;

vector<Point2f> currPts = vector<Point2f>(4);
vector<Point2f> reqPts	= vector<Point2f>(4);
bool HomographyInitialized = false;

int main(int argc, char** argv)
{
	int swt = 0;

	string filename = "D:/Bildverarbeitung/TestBilder/Autobahn/Highway_BEV_3.png";
	string folderName = "../../InputFiles/Video/Autobahn/KIT_2011_09_26_drive_0029/image_02/data/";

	 string videoFile = "../../InputFiles/Video/Autobahn/B10_nachtmp4_cut_3.mp4";
	// string videoFile = "../../InputFiles/Video/Autobahn/B10_nacht.mov";
	// string videoFile = "../../InputFiles/Video/Autobahn/B10_fahrt_2/video_2_cut_3.mp4";
	//string videoFile = "../../InputFiles/Video/Autobahn/B10_fahrt_1/Videoaufnahme 5.mov";
	// string videoFile = "../../InputFiles/Video/Autobahn/B10_fahrt_1/Videoaufnahme 3.mov";

	string homografyMatFile		= "../data/BEVHomography.yml";
	string homografyMatIndex	= "VideoProbe1";
	BEViewer beViewer(homografyMatFile, homografyMatIndex);

	Measure* meas = Measure::GetInstance();
	Timer * TotalTimer = meas->CreateNewTimer("TotalTimer");
	Timer * cvtColorTimer = meas->CreateNewTimer("cvtColor");
	Timer * transfTimer = meas->CreateNewTimer("warpPerspective");
	Timer * gaussTimer = meas->CreateNewTimer("gauss filter");
	Timer * CannyTimer = meas->CreateNewTimer("Canny");
	Timer * FindLanesROI = meas->CreateNewTimer("find lanes ROI");
	Timer * FindLanesBPoints = meas->CreateNewTimer("find base points");
	Timer * copyImgTimer = meas->CreateNewTimer("copyImgTimer");
	Timer * polygonTimer = meas->CreateNewTimer("polygonTimer");

	// Load video
	VideoCapture cap;
	cap.open(videoFile);

	if( !cap.isOpened() )
	{
		printf("can not open camera or video file/n");
		return -1;
	}

	Mat videoFrame, videoFrameGray, binaryImg, binImgCannyColor, trsfRoiImg, CannyImg;

	int framesPerSecond	= (int)cap.get(CV_CAP_PROP_FPS);
	int DelayBeforeNextFrame = 1000 / framesPerSecond;
	int restTimeBeforeNextFrame = 1;

	clock_t start, duration;

	cap >> videoFrame;

	/*********************************/
	// Mat leftLaneTemplate = imread("MatchSamples/LeftLane.png", CV_LOAD_IMAGE_GRAYSCALE);
	// Mat rightLaneTemplate = imread("MatchSamples/RightLane.png", CV_LOAD_IMAGE_GRAYSCALE);
	Mat commonLaneTemplate = imread("MatchSamples/commonLane.png", CV_LOAD_IMAGE_GRAYSCALE);
	/*********************************/
	RoadLane leftLane(LEFT_LANE);
	RoadLane rightLane(RIGHT_LANE);

	int frameCounter = 0;

	int cnt = 1;

	while(true)
	{
		start = clock();
		TotalTimer->Start();

		/****************************************************/
		/************		Bild holen		*****************/
		/****************************************************/
		if(swt == 0)
		{
			cap >> videoFrame;

			ROIRect = Rect(20,280,620,200);

			if(!HomographyInitialized)
			{
				currPts[0] = Point2f(195, 0);
				currPts[1] = Point2f(460, 0);
				currPts[2] = Point2f(640, 70);
				currPts[3] = Point2f(10,  70);

				reqPts[0] = Point2f(0,   0);
				reqPts[1] = Point2f(285, 0);
				reqPts[2] = Point2f(285, 330);
				reqPts[3] = Point2f(0,   330);
				HomographyInitialized = true;
			}
		}

		if(swt == 1)
		{
			ostringstream text, filenameStream;
			string filename;
			filenameStream << frameCounter++;
			filename = filenameStream.str();
			padTo(filename, 10, '0');

			text << folderName << filename << ".png";
			//text << folderName << "0000000148.png";
			videoFrame = imread(text.str(), CV_LOAD_IMAGE_COLOR);

			ROIRect = Rect(330,205,550,170);

			if(!HomographyInitialized)
			{
				currPts[0] = Point2f(195, 0);
				currPts[1] = Point2f(460, 0);
				currPts[2] = Point2f(640, 70);
				currPts[3] = Point2f(10,  70);

				reqPts[0] = Point2f(-10,   0);
				reqPts[1] = Point2f(515, 0);
				reqPts[2] = Point2f(415, 285);
				reqPts[3] = Point2f(-40,   285);
				HomographyInitialized = true;
			}

			DelayBeforeNextFrame = 100;
		}

		if (videoFrame.empty())
		{
			break;
		}

		/****************************************************/
		/************		Bild Ausschnitt  ****************/
		/****************************************************/
		Mat RoiImg = videoFrame(ROIRect);

		cvtColorTimer->Start();
		cvtColor(RoiImg, RoiImg, CV_BGR2GRAY);
		cvtColorTimer->Stop();

		// imshow("RoiImg", RoiImg);

		/****************************************************/
		/************		Transformation   ****************/
		/****************************************************/

		Mat homography = findHomography(currPts, reqPts, CV_RANSAC);
		//FileStorage fs("../data/BEVHomography.yml", FileStorage::WRITE);
		//fs << "VideoProbe1" << homography;
		//fs.release();
		beViewer = BEViewer(homography);

		transfTimer->Start();
		beViewer.Transform(RoiImg, trsfRoiImg);
		transfTimer->Stop();

		// imshow("trsfRoiImg", trsfRoiImg);

		copyImgTimer->Start();
		Mat test = trsfRoiImg.clone();
		copyImgTimer->Stop();

		gaussTimer->Start();
		GaussianBlur(trsfRoiImg, CannyImg, Size(3,3), 1.2, 1.2);
		gaussTimer->Stop();

		Mat forCannyImg;
		equalizeHist(CannyImg, forCannyImg);
		threshold(forCannyImg, forCannyImg, 240, 255, CV_THRESH_BINARY);
		erode(forCannyImg, CannyImg, erodeElement);

		/****************************************************/
		/************ Kantendetektion mit Canny *************/
		/****************************************************/
		CannyTimer->Start();
		Canny(CannyImg, CannyImg, 60, 100);
		CannyTimer->Stop();

		cvtColor(CannyImg, binImgCannyColor, CV_GRAY2BGR);
		Mat acc = Mat::zeros(400, 400, CV_8UC3);

		Rect leftHalf(0,0,trsfRoiImg.cols / 2, trsfRoiImg.rows);
		rectangle(binImgCannyColor, leftHalf, COLOR_GREEN, 2);

		Rect rightHalf(leftHalf.width, 0, trsfRoiImg.cols / 2, trsfRoiImg.rows);
		rectangle(binImgCannyColor, rightHalf, COLOR_YELLOW, 2);

		/****************************************************/
		/****************** Bild splitten  ******************/
		/****************************************************/

		// ROI
		Mat leftImage  = CannyImg(leftHalf);
		Mat rightImage = CannyImg(rightHalf);
		FindLanesROI->Start();
		leftLane.Find(leftImage);
		rightLane.Find(rightImage);
		FindLanesROI->Stop();
		
		// Base points
		FindLanesBPoints->Start();
		leftImage  = forCannyImg(leftHalf);
		rightImage = forCannyImg(rightHalf);

		/************* Determine ROI for template matching ******************/
		double xOffset = 25;
		polygonTimer->Start();
		supressOuterRegion(leftImage, leftLane.GetCenterLine(), Point2d(20,0));
		supressOuterRegion(rightImage, rightLane.GetCenterLine(), Point2d(20,0));
		polygonTimer->Stop();

		// Show image with supressed regions
		 Mat filteredImage = Mat::zeros(trsfRoiImg.rows, trsfRoiImg.cols, trsfRoiImg.type());
		leftImage.copyTo(filteredImage(leftHalf));
		rightImage.copyTo(filteredImage(rightHalf));
		imshow("filteredImage", filteredImage); 

		leftLane.Match(leftImage, commonLaneTemplate);
		rightLane.Match(rightImage, commonLaneTemplate);
		FindLanesBPoints->Stop();

		//int leftLaneType = leftLane.DetermineLaneType(leftImage);
		int rightLaneType = rightLane.DetermineLaneType(rightImage);

		vector<Point2d> leftLaneBasePoints = leftLane.GetBasePoints();
		vector<Point2d> rightLaneBasePoints = rightLane.GetBasePoints();

		SplinePoint * leftBSplinePoints  = new SplinePoint[leftLaneBasePoints.size()];
		SplinePoint * rightBSplinePoints = new SplinePoint[rightLaneBasePoints.size()];

		for(unsigned int i = 0; i < leftLaneBasePoints.size(); i++)
		{
			leftBSplinePoints[i].x = leftLaneBasePoints[i].x;
			leftBSplinePoints[i].y = leftLaneBasePoints[i].y;
			rightBSplinePoints[i].x = rightLaneBasePoints[i].x;
			rightBSplinePoints[i].y = rightLaneBasePoints[i].y;
		}

		int bSplinePointCnt = 20;

		int leftMaxErrorInd = -1, rightMaxErrorInd = -1;
		double leftError = 0.0, rightError = 0.0;

		BSpline leftSpline(4, leftBSplinePoints, leftLaneBasePoints.size());
		SplinePoint * LeftResultBsplinePoints = leftSpline.CalcPoints(bSplinePointCnt);
		leftError = leftSpline.FindCtrPointWithMaxError(leftMaxErrorInd);

		BSpline rightSpline(4, rightBSplinePoints, rightLaneBasePoints.size());
		SplinePoint * RightResultBsplinePoints = rightSpline.CalcPoints(bSplinePointCnt);
		rightError = rightSpline.FindCtrPointWithMaxError(rightMaxErrorInd);

		// PLOT BASE POINTS
		for(unsigned int i = 0; i < leftLaneBasePoints.size(); i++)
		{
			Point2d p = leftLaneBasePoints[i];
			circle(binImgCannyColor, p, 2, COLOR_GREEN, 2);
			if(i == leftMaxErrorInd && leftError > 10)
			{
				circle(binImgCannyColor, p, 5, COLOR_PURPLE, 3);
			}

			Line2D::TransformPoint(beViewer.GetInvHomography(), p);
			p.x += ROIRect.x;
			p.y += ROIRect.y;
			circle(videoFrame, p, 2, COLOR_GREEN, 2);
			if(i == leftMaxErrorInd && leftError > 10)
			{
				circle(videoFrame, p, 5, COLOR_PURPLE, 3);
			}
		}

		for(unsigned int i = 0; i < rightLaneBasePoints.size(); i++)
		{
			Point2d p = rightLaneBasePoints[i];
			p.x += rightHalf.x;
			p.y += rightHalf.y;
			circle(binImgCannyColor, p, 2, COLOR_YELLOW, 2);

			Line2D::TransformPoint(beViewer.GetInvHomography(), p);
			p.x += ROIRect.x;
			p.y += ROIRect.y;
			circle(videoFrame, p, 2, COLOR_YELLOW, 2);
		}

		// PLOT BSPLINE POINTS
		for(int i = 0; i < bSplinePointCnt; i++)
		{
			Point2d p(LeftResultBsplinePoints[i].x, LeftResultBsplinePoints[i].y);
			circle(binImgCannyColor, p, 1, COLOR_RED, 1);
			Line2D::TransformPoint(beViewer.GetInvHomography(), p);
			p.x += ROIRect.x;
			p.y += ROIRect.y;
			circle(videoFrame, p, 1, COLOR_RED, 1);
		}

		for(int i = 0; i < bSplinePointCnt; i++)
		{
			Point2d p(RightResultBsplinePoints[i].x, RightResultBsplinePoints[i].y);
			p.x += rightHalf.x;
			p.y += rightHalf.y;
			circle(binImgCannyColor, p, 1, COLOR_TUERKIS, 1);

			Line2D::TransformPoint(beViewer.GetInvHomography(), p);
			p.x += ROIRect.x;
			p.y += ROIRect.y;
			circle(videoFrame, p, 1, COLOR_TUERKIS, 1);
		}

		delete LeftResultBsplinePoints;
		delete RightResultBsplinePoints;

		/***********************************************************************************/
		/******************** Left Lane ****************************************************/
		/***********************************************************************************/

		RoadLane leftLaneCopy(leftLane);
		leftLane.GetCenterLine().PlotParameters(acc, COLOR_GREEN, Point(100, 100));

		/* leftLaneCopy.PlotLineParts(binImgCannyColor, COLOR_GREEN, true);
		leftLaneCopy.Transform(beViewer.GetInvHomography());
		leftLaneCopy.Move(Point2d(ROIRect.x, ROIRect.y));
		leftLaneCopy.PlotLineParts(videoFrame, COLOR_GREEN, true); */
		
		/***********************************************************************************/
		/***************** Right Lane ******************************************************/
		/***********************************************************************************/
		
		RoadLane rightLaneCopy(rightLane);
		rightLane.GetCenterLine().PlotParameters(acc, COLOR_YELLOW, Point(100, 100 + rightHalf.x));
		/* rightLaneCopy.Move(Point2d(rightHalf.x, rightHalf.y));
		rightLaneCopy.PlotLineParts(binImgCannyColor, COLOR_YELLOW, true);
		rightLaneCopy.Transform(beViewer.GetInvHomography());
		rightLaneCopy.Move(Point2d(ROIRect.x, ROIRect.y));
		rightLaneCopy.PlotLineParts(videoFrame, COLOR_YELLOW, true); */


		/***********************************************************************************/
		/***************** Show results ****************************************************/
		/***********************************************************************************/

		imshow("Found lanes", binImgCannyColor);
		imshow("back transformed lines", videoFrame);
		imshow("acc", acc);

		duration = clock() - start;
		restTimeBeforeNextFrame = DelayBeforeNextFrame > duration ? DelayBeforeNextFrame - duration: 1;
		TotalTimer->Stop();

		int key = waitKey(restTimeBeforeNextFrame);

		if(key == 27)
			break;

		if(key == 104)
		{
			ostringstream name;
			name << "../frame_" << cnt++ << ".jpg";
			imwrite(name.str(), videoFrame);
		}

		while(key == 32)
		{
			key = waitKey();
		}

		meas->PlotMeasure(200);
	}
	return 0;
}

void plotParams(vector<Line2D> v, Scalar color, Mat & acc)
{
	if(v.size() == 0)
	{
		return;
	}

	vector<double>rhoVector;
	vector<double>thetaVektor;

	for(unsigned int i = 0; i < v.size(); i++)
	{
		rhoVector.push_back(v[i].Rho);
		thetaVektor.push_back(v[i].Theta);
	}

	sort(rhoVector.begin(), rhoVector.end());
	sort(thetaVektor.begin(), thetaVektor.end());

	double rhoMin	= rhoVector[0];
	double thetaMin	= thetaVektor[0];

	double rhoMax	= rhoVector[rhoVector.size() - 1];
	double thetaMax	= thetaVektor[thetaVektor.size() - 1];

	for(unsigned int i = 0; i < v.size(); i++)
	{
		int x = (int)v[i].Rho;
		int y = (int)v[i].Theta;
		circle( acc, Point(x, y), 5, color, 3, 8);
	}

	rectangle(acc, sampleArea, CV_RGB(255,255,0));
	imshow("HL-Accumulator", acc);
}

void padTo(std::string &str, const size_t num, const char paddingChar)
{
	if(num > str.size())
		str.insert(0, num - str.size(), paddingChar);
}

void supressOuterRegion(Mat & src, Line2D & r, Point2d & p)
{
	Point points[4];
	points[0] = r.Start - p;
	points[1] = r.End - p;
	points[2] = r.End + p;
	points[3] = r.Start + p;

	Mat temp = Mat::zeros(src.rows, src.cols, src.type());
	fillConvexPoly(temp, points, 4, CV_RGB(0,0,255));
	src = src.mul(temp/255);
}

/*
void correctHomography(int key, vector<Point2f> & points)
{
	int step = 30;
	switch(key)
	{

	case 2490368:  // UP
		points[currentCorner].y -= step;
		break;	

	case 2621440:  // DOWN
		points[currentCorner].y += step;
		break;	

	case 2424832:  // LEFT
		points[currentCorner].x -= step;
		break;	

	case 2555904:  // RIGHT
		points[currentCorner].x += step;
		break;	

	case 49: 
		currentCorner = UP_LEFT;
		cout << "Left upper corner is selected" << endl;
		break;

	case 50: 
		currentCorner = UP_RIGHT; 
		cout << "Right upper corner is selected" << endl;
		break;

	case 51: 
		currentCorner = DOWN_RIGHT; 
		cout << "Right below corner is selected" << endl;
		break;

	case 52: 
		currentCorner = DOWN_LEFT; 
		cout << "Left below upper corner is selected" << endl;
		break;
	}
}
*/