#include "stdafx.h"

#include <opencv2/legacy/legacy.hpp>
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/photo/photo.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/nonfree/features2d.hpp"
#include "opencv2/calib3d/calib3d.hpp"

#include "ImageStitcher.h"
#include "RGBColour.h"

using namespace cv;
using namespace std;

ImageStitcher::ImageStitcher(ResourceLibrary* rl)
{
	//-- Create the grey images 
	grayscale1;
	grayscale2;

	resources = rl;

	descriptors_object;
	descriptors_scene;
	keypoints_object;
	keypoints_scene;
	good_matches;
	obj;
	scene;

	homographies;
	warped_corners;

	largestX = 0;
	largestY = 0;
}

ImageStitcher::~ImageStitcher(void)
{
}

/*
*	Image stitching main function. Loads images, checks images are valid
*	Calls necessary methods in the stitching pipeline
*	Skeleton code based on an online tutorial
*	Source: http://docs.opencv.org/opencv_tutorials.pdf Section 6.9
*/

//-- Calculate descriptors (feature vectors)
void ImageStitcher::extractFeatures()
{
	SiftDescriptorExtractor extractor;
	extractor.compute( grayscale1, keypoints_object, descriptors_object );
	extractor.compute( grayscale2, keypoints_scene, descriptors_scene );
}

void ImageStitcher::matchFeatures()
{
	//-- Matching descriptor vectors using FLANN matcher
	FlannBasedMatcher matcher;
	//BruteForceMatcher<L1<uchar> > matcher2;
	vector< DMatch > matches;
	matcher.match( descriptors_object, descriptors_scene, matches );

	double max_dist = 0; 
	double min_dist = 100;

	//-- Quick calculation of max and min distances between keypoints
	for( int i = 0; i < descriptors_object.rows; i++ )
	{	
		double dist = matches[i].distance;
		if( dist < min_dist ) min_dist = dist;
		if( dist > max_dist ) max_dist = dist;
	}

	printf("-- Max dist : %f \n", max_dist );
	printf("-- Min dist : %f \n", min_dist );

	for( int i = 0; i < descriptors_object.rows; i++ )
	{ 
		if( matches[i].distance < 3*min_dist )
		{ 
			//-- Use only "good" matches (i.e. whose distance is less than 3*min_dist )
			good_matches.push_back( matches[i]);
		}
	}
}

void ImageStitcher::drawAllKeypoints(Mat image1, Mat image2)
{
	//-- Draw keypoints
	Mat img_keypoints_1; 
	Mat img_keypoints_2;

	drawKeypoints( image1, keypoints_object, img_keypoints_1, Scalar::all(-1), DrawMatchesFlags::DEFAULT );
	drawKeypoints( image2, keypoints_scene, img_keypoints_2, Scalar::all(-1), DrawMatchesFlags::DEFAULT );

	//-- Show detected (drawn) keypoints
	imshow("Keypoints 1", img_keypoints_1 );
	imshow("Keypoints 2", img_keypoints_2 );
}

void ImageStitcher::printCorners(vector<Point2f> corners)
{
	cout<<corners[0]<<endl;
	cout<<corners[1]<<endl;
	cout<<corners[2]<<endl;
	cout<<corners[3]<<endl;
}

void ImageStitcher::clearVectors()
{
	good_matches.clear();
	keypoints_object.clear();
	keypoints_scene.clear();
	obj.clear();
	scene.clear();
}

void ImageStitcher::storeWarpedCorners(vector<Point2f> corners)
{
	resources->addWarpedCorners(corners);
	//warped_corners.push_back(corners);
}


Mat ImageStitcher::stitchImages(Mat image1, Mat image2)
{
	clearVectors();

	//-- Convert to grayscale values to check that the images have been read in correctly
	cvtColor( image1, grayscale1, CV_RGB2GRAY );
	cvtColor( image2, grayscale2, CV_RGB2GRAY );

	showImage1and2(image2,image1);

	if( !grayscale1.data || !grayscale2.data )
	{ 
		cout<< " --(!) Error reading images " << std::endl; 
		//return ; 
	}

	//-- Detect the keypoints using SIFT Detector
	int minHessian = 500;
	SiftFeatureDetector detector( minHessian );
	//detect grayscale
	detector.detect( grayscale1, keypoints_object ); //-- keypoints in the second frame
	detector.detect( grayscale2, keypoints_scene ); //-- keypoints in the first frame
	
	drawAllKeypoints(image1, image2);
	extractFeatures();
	matchFeatures();
	
	//-- Draw only "good" matches
	Mat img_matches;
	int goodsize = good_matches.size();
	drawMatches( image1, keypoints_object, image2, keypoints_scene,good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );

	//-- Get the keypoints from the good matches
	for( int i = 0; i < good_matches.size(); i++ )
	{
		obj.push_back( keypoints_object[ good_matches[i].queryIdx ].pt );
		scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt );
	}

	//-- Find the Homography Matrix (tranformation between the two images)
	Mat H = findHomography( obj, scene, CV_RANSAC, 10); // ## obj, scene for corners |||| scene, obj for correct warping and layering
	printf("%i \n",H.type());
	cout<<H<<endl;

	//-- Get the corners from the image_1 ( the object to be "detected" )
	std::vector<Point2f> obj_corners(4);
	obj_corners[0] = cvPoint( 0, 0);
	obj_corners[1] = cvPoint( image1.cols, 0 );
	obj_corners[2] = cvPoint( image1.cols, image1.rows );
	obj_corners[3] = cvPoint( 0, image1.rows );
	printCorners(obj_corners);

	vector<Point2f> test_corners(4);
	perspectiveTransform( obj_corners, test_corners, H);

	Point2f corn;
	double minX = 0;
	double minY = 0;
	double countX = 5000;
	double countY = 5000;
	for (int i=0;i<4;i++)
	{
		corn = test_corners[i];
		if( corn.x < minX )
		{
			minX = corn.x;
			countX=i;
		}
		if( corn.y < minY )
		{
			minY = corn.y;
			countY=i;
		}

	}
		
	double data[3][3] = {{1.0, 0.0, abs(minX)},{ 0.0, 1.0, abs(minY)},{ 0.0, 0.0, 1.0}};
	Mat trans(3,3,CV_64FC1,data);
	//H=H*trans;
	cout<<trans<<endl;

	vector<Point2f> scene_corners(4);
	perspectiveTransform( obj_corners, scene_corners, H);
	printCorners(scene_corners);

	storeWarpedCorners(scene_corners);

	//-- Draw lines between the corners (the mapped object in the scene - image_2 )
	line( img_matches, scene_corners[0] + Point2f( image1.cols, 0), scene_corners[1] + Point2f( image1.cols, 0), Scalar( 0, 0, 255), 4 );
	line( img_matches, scene_corners[1] + Point2f( image1.cols, 0), scene_corners[2] + Point2f( image1.cols, 0), Scalar( 0, 0, 255), 4 );
	line( img_matches, scene_corners[2] + Point2f( image1.cols, 0), scene_corners[3] + Point2f( image1.cols, 0), Scalar( 0, 0, 255), 4 );
	line( img_matches, scene_corners[3] + Point2f( image1.cols, 0), scene_corners[0] + Point2f( image1.cols, 0), Scalar( 0, 0, 255), 4 );

	//-- Use the Homography Matrix to warp the images
	Mat result = warpImages(image1,image2,H,obj_corners,scene_corners, minX, minY, countX, countY);
	//warpPerspective(image1,result,H,cv::Size(image1.cols+image2.cols,image1.rows));
	//cv::Mat half(result,cv::Rect(0,0,image2.cols,image2.rows));

	//-- Show detected matches
	imshow( "Good Matches & Object detection", img_matches );
	CvMat imageToSave = img_matches;
	cvSaveImage("Matches.png", &imageToSave);

	//waitKey(100);
	return result;
 }

 void ImageStitcher::showImage1and2(Mat img1, Mat img2)
 {
	 imshow("first image",img1);
	 imshow("second image",img2);
 }

 Mat ImageStitcher::getHomographyMatrix(Mat img1, Mat img2)
 {
	 Mat H = findHomography( img1, img2, CV_RANSAC, 4);
	 printf("-- Homography: %f \n", H );
	 return H;
 }

 void ImageStitcher::printHomographyMatrix(Mat H)
 {
	 printf("\n");
	 for(int y=0;y<3;y++)
	 {
		//for(int y=0;y<3;y++)
		//{
			printf("%d %d %d \n",H.at<double>(0,y),H.at<double>(1,y),H.at<double>(2,y));
			int g = 0;
		//}
	 }
	 printf("\n");
 }


Mat ImageStitcher::warpImages(Mat img1, Mat img2, Mat H, vector<Point2f> obj_corners, vector<Point2f> scene_corners, double minXshift, double minYshift, int indexX, int indexY)
{
	//-- Use the Homography Matrix to warp the images
	double minX = 100000;
	double minY = 100000;
	double maxX = 0;
	double maxY = 0;
	Point2f corner;
	for (int i=0;i<4;i++)
	{
		//-- Identify smallest X and Y values in the warping image
		corner = obj_corners[i];
		if(corner.x < minX)
		{minX = corner.x;}
		if(corner.y < minY)
		{minY = corner.y;}
		if(corner.x > maxX)
		{maxX = corner.x;}
		if(corner.y > maxY)
		{maxY = corner.y;}
		corner = scene_corners[i];
		if(corner.x < minX)
		{minX = corner.x;}
		if(corner.y < minY)
		{minY = corner.y;}
		if(corner.x > maxX)
		{maxX = corner.x;}
		if(corner.y > maxY)
		{maxY = corner.y;}
	}

	float correctionX = 0;
	float correctionY = 0;
	if (indexX != 5000)
	{
		correctionX = scene_corners[indexX].x;
		printf("Please x %f \n",scene_corners[indexX].x);
	}
	if (indexY != 5000)
	{
		correctionY = scene_corners[indexY].y;
		printf("Please y %f \n",scene_corners[indexY].y);
	}

	Mat result;
	int sizeX = floor(maxX+0.5);
	int sizeY = floor(maxY+0.5);
	printf("sizeX %i \n",sizeX);
	printf("sizeY %i \n",sizeY);
	printf("cols %i \n",img2.cols);
	printf("rows %i \n",img2.rows);

	if (sizeX > largestX)
	{
		largestX = sizeX;
	}
	if (sizeY > largestY)
	{
		largestY = sizeY;
	}

	warpPerspective(img1,result,H,cv::Size(/*maxX+abs(correctionX),maxY+abs(correctionY)*/largestX,largestY));

	
	//-- Copies the original image on top of the warped image
	//Rect location(abs(minXshift)+abs(correctionX)+abs(minX),abs(minYshift)+abs(correctionY)+abs(minY),img2.cols,img2.rows);
	//cout<<location<<endl;
	Mat half(result,cv::Rect(/*abs(minXshift)+abs(correctionX)+abs(minX)*/0,/*abs(minYshift)+abs(correctionY)+abs(minY)*/0,img2.cols,img2.rows));
	//img2.copyTo(half);
	imshow( "Result", result );

	return result;
 }

 