/***************************************************************************\
 * Copyright (C) by Keio University
 * RGBDRegistration.cpp created in 10 2012.
 * Mail : fdesorbi@hvrl.ics.keio.ac.jp
 *
 * RGBDRegistration.cpp is part of the HVRL Engine Project.
 *
 * The HVRL Engine Project is free software; you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published by
 * the Free Software Foundation; either version 3 of the License, or
 * (at your option) any later version.
 *
 * The HVRL Engine Project is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 *
 \***************************************************************************/

#include "hvrl/common/Common.hpp"
#include "hvrl/datatypes/PointCloud.hpp"
#include "hvrl/datatypes/RGBDImage.hpp"
#include "hvrl/tools/RigidTransformationEstimation.hpp"
#include "hvrl/tools/OpenCVOperators.hpp"
#include "hvrl/tools/RGBDRegistration.hpp"

namespace hvrl {

namespace tools {

static bool getKeypoints(const cv::Mat3b& colorimage, const std::string& method,
		std::vector<cv::KeyPoint>& keypoints) {

	cv::Ptr<cv::FeatureDetector> detector = cv::FeatureDetector::create(method);
	if (detector.empty()) {
		hvrl::Log::add().error("getKeypoints",
				"No corresponding feature detector found (" + method + ")");
		return false;
	}

	detector->detect(colorimage, keypoints);
	return true;

}

static bool getDescriptors(const cv::Mat3b& colorimage,
		const std::string& method, std::vector<cv::KeyPoint>& keypoints,
		cv::Mat& descriptors) {

	cv::Ptr<cv::DescriptorExtractor> descriptorExtractor =
			cv::DescriptorExtractor::create(method);
	if (descriptorExtractor.empty()) {
		hvrl::Log::add().error("getDescriptors",
				"No corresponding descriptor extractor found (" + method + ")");
		return false;
	}
	descriptorExtractor->compute(colorimage, keypoints, descriptors);
	return true;
}

// Code obtained from the OpenCV sample descriptor_extractor_matcher.cpp
static void crossCheckMatching(
		cv::Ptr<cv::DescriptorMatcher>& descriptorMatcher,
		const cv::Mat& descriptors1, const cv::Mat& descriptors2,
		std::vector<cv::DMatch>& filteredMatches12, int knn = 1) {
	std::vector<std::vector<cv::DMatch> > matches12, matches21;
	descriptorMatcher->knnMatch(descriptors1, descriptors2, matches12, knn);
	descriptorMatcher->knnMatch(descriptors2, descriptors1, matches21, knn);
	for (size_t m = 0; m < matches12.size(); m++) {
		bool findCrossCheck = false;
		for (size_t fk = 0; fk < matches12[m].size(); fk++) {
			cv::DMatch forward = matches12[m][fk];

			for (size_t bk = 0; bk < matches21[forward.trainIdx].size(); bk++) {
				cv::DMatch backward = matches21[forward.trainIdx][bk];
				if (backward.trainIdx == forward.queryIdx) {
					filteredMatches12.push_back(forward);
					findCrossCheck = true;
					break;
				}
			}
			if (findCrossCheck)
				break;
		}
	}
}

static bool getMatches(const cv::Mat& descriptorsA, const cv::Mat& descriptorsB,
		const std::string& methodmatcher, const bool& useCCF,
		std::vector<cv::DMatch>& matchesfinal) {

	cv::Ptr<cv::DescriptorMatcher> descriptorMatcher =
			cv::DescriptorMatcher::create(methodmatcher);

	std::vector<cv::DMatch> matches;

	if (descriptorMatcher.empty()) {
		hvrl::Log::add().error("getMatches",
				"No corresponding descriptor matcher found (" + methodmatcher
						+ ")");
		return false;
	}

	if (useCCF) {
		crossCheckMatching(descriptorMatcher, descriptorsA, descriptorsB,
				matchesfinal);
	} else {
		descriptorMatcher->match(descriptorsA, descriptorsB, matchesfinal);
	}

/*	double max_dist = 0;
	double min_dist = 100;
	//-- Quick calculation of max and min distances between keypoints
	for (int i = 0; i < matches.size(); i++) {
		double dist = matches[i].distance;
		if (dist < min_dist)
			min_dist = dist;
		if (dist > max_dist)
			max_dist = dist;
	}

	//-- Draw only "good" matches (i.e. whose distance is less than 2*min_dist )
	//-- PS.- radiusMatch can also be used here.
	std::vector<cv::DMatch> good_matches;

	for (int i = 0; i < matches.size(); i++) {
		if (matches[i].distance < 2 * min_dist) {
			matchesfinal.push_back(matches[i]);
		}
	}
	matches.clear();*/
	return true;
}

bool getCorrespondences(const cv::Mat3b& colorimageA,
		const cv::Mat3b& colorimageB, std::vector<cv::DMatch>& matches,
		std::vector<cv::KeyPoint>& keypointsA,
		std::vector<cv::KeyPoint>& keypointsB, const std::string& method, const std::string& methodextractor,
		const std::string& methodmatcher, const bool& useCCF) {

	if (!getKeypoints(colorimageA, method, keypointsA)) {
		return false;
	}
	if (!getKeypoints(colorimageB, method, keypointsB)) {
		return false;
	}

	cv::Mat descriptorsA, descriptorsB;
	if (!getDescriptors(colorimageA, methodextractor, keypointsA, descriptorsA)) {
		return false;
	}
	if (!getDescriptors(colorimageB, methodextractor, keypointsB, descriptorsB)) {
		return false;
	}

	if (!getMatches(descriptorsA, descriptorsB, methodmatcher, useCCF,
			matches)) {
		return false;
	}

	return true;
}

bool getCorrespondencesWithDescriptors(const cv::Mat3b& colorimageA,
		const cv::Mat3b& descriptorsB, std::vector<cv::DMatch>& matches,
		std::vector<cv::KeyPoint>& keypointsA, const std::string& method, const std::string& methodextractor,
		const std::string& methodmatcher, const bool& useCCF) {

	if (!getKeypoints(colorimageA, method, keypointsA)) {
		return false;
	}

	cv::Mat descriptorsA;
	if (!getDescriptors(colorimageA, method, keypointsA, descriptorsA)) {
		return false;
	}

	if (!getMatches(descriptorsA, descriptorsB, methodmatcher, useCCF,
			matches)) {
		return false;
	}

	return true;

}


std::vector<cv::DMatch> matchesbis;
/*
 * This function filter the matches based on the 3D distance
 */
bool filterMatches(const std::vector<std::pair<cv::Vec3f, cv::Vec3f> >& matches,
		std::vector<std::pair<cv::Vec3f, cv::Vec3f> >& matchesfiltered) {

	std::vector<std::pair<float, unsigned int> > norms;

	// Samples the norms from the different matches
	std::vector<std::pair<cv::Vec3f, cv::Vec3f> >::const_iterator it =
			matches.begin();
	while (it != matches.end()) {

		float norm = cv::norm(it->first - it->second);
		std::vector<std::pair<float, unsigned int> >::iterator itnorms =
				norms.begin();
		bool found = false;
		while (itnorms != norms.end() && false == found) {
			// This norm belongs to a sample already added
			if (std::fabs(norm - itnorms->first) < 0.1f) {
				itnorms->first += norm;
				itnorms->first *= 0.5f;
				itnorms->second++;
				found = true;
			}
			++itnorms;
		}
		// Find a new sample of norm
		if (false == found) {
			norms.push_back(std::pair<float, unsigned int>(norm, 1));
		}

		++it;
	}

	// Find the best sample
	unsigned int maxnorms = 0;
	float finalnorm = 0.0f;
	std::vector<std::pair<float, unsigned int> >::iterator itnorms =
			norms.begin();
	while (itnorms != norms.end()) {
		if (itnorms->second > maxnorms) {
			maxnorms = itnorms->second;
			finalnorm = itnorms->first;
		}
		++itnorms;
	}

	if (finalnorm == 0.0f || maxnorms <= 3) {
		hvrl::Log::add().error("filterMatches",
				"The number of filtered matches is too small");
		return false;
	}

	// Create the new set of filtered matches
	it = matches.begin();
	std::vector<cv::DMatch>::iterator it2 =  matchesbis.begin();
	while (it != matches.end()) {

		float norm = cv::norm(it->first - it->second);
		if (std::fabs(norm - finalnorm) < 0.1f) {
			matchesfiltered.push_back(*it);
			++it2;
		}else{
			it2 = matchesbis.erase(it2);
		}

		++it;
	}

	return true;

}

/*
 * Compute the 3D rigid transformation based on
 */
bool ransacAlignment(
		const std::vector<std::pair<cv::Vec3f, cv::Vec3f> >& matches,
		const unsigned int& nbiterations, cv::Mat& R, cv::Vec3f& t) {

	if (matches.size() <= 3) {
		hvrl::Log::add().error("ransacAlignment",
				"The number of matches is too small (should be higher than 3)");
		return false;
	}

/*	cv::Mat srcA(3, matches.size(), CV_32F);
	cv::Mat srcB(3, matches.size(), CV_32F);
	for (int k = 0; k < matches.size(); ++k) {
		for (int l = 0; l < 3; ++l) {
			srcA.at<float>(l, k) = matches[k].first[l];
			srcB.at<float>(l, k) = matches[k].second[l];
		}
	}
	if (hvrl::RigidTransformationEstimation::leastSquaresEstimation(srcA,
			srcB, R, t) == false) { return false;}*/
	srand(time(NULL));

	unsigned int pairs[3];

	cv::Mat srcA(3, 3, CV_32F);
	cv::Mat srcB(3, 3, CV_32F);

	float alignment_error = -1.0f;
	int outliers = 0;
	unsigned int nbi = nbiterations;
	if(nbiterations == 0){
		nbi = std::pow(matches.size(),3);
	}

	for (unsigned int i = 0; i < nbi; ++i) {

		// select three different correspondences
		pairs[0] = rand() % matches.size();
		do {
			pairs[1] = rand() % matches.size();
		} while (pairs[1] == pairs[0]);
		do {
			pairs[2] = rand() % matches.size();
			pairs[3] = rand() % matches.size();
			pairs[4] = rand() % matches.size();
		} while (pairs[2] == pairs[0] || pairs[2] == pairs[1]);

		// Create the matrix for computing the rigid transformation
		for (int k = 0; k < 3; ++k) {
			for (int l = 0; l < 3; ++l) {
				srcA.at<float>(l, k) = matches[pairs[k]].first[l];
				srcB.at<float>(l, k) = matches[pairs[k]].second[l];
			}
		}

		// compute the rigid transformation
		cv::Mat Rtmp(3,3,CV_32F);
		cv::Vec3f ttmp;
		int cpt = 0;
		if (hvrl::RigidTransformationEstimation::leastSquaresEstimation(srcA,
						srcB, Rtmp, ttmp) == true) {

			// Evaluation of the validity of the transformation
			std::vector<std::pair<cv::Vec3f, cv::Vec3f> >::const_iterator it =
			matches.begin();
			float error = 0.0f;
			int inliers = 0;
			while (it != matches.end()) {
				cv::Vec3f res = Rtmp * it->first + ttmp;
				float n = cv::norm(it->second - res);
				if(n > 0.05){
					cpt ++;
				}else{
					error += n*n;
					inliers++;
				}
				++it;
			}
			error /= inliers;
			if ((alignment_error < 0.0f || error < alignment_error) && error > 0.0 && inliers > cpt) {
				alignment_error = error;
				R = Rtmp;
				t = ttmp;
				outliers = cpt;
			}
		}

	}


	std::cout << "alignment_error : " << alignment_error << std::endl;
	std::cout << "outliers : " << outliers << std::endl;
	std::cout << "R : " << R << std::endl;
	std::cout << "t : " << t << std::endl;
	alignment_error = 0.0f;
	/*int cpt = 0;
	std::vector<std::pair<cv::Vec3f, cv::Vec3f> >::const_iterator it =
				matches.begin();
				float error = 0.0f;
				while (it != matches.end()) {
					cv::Vec3f res = R * it->first + t;

					float n = cv::norm(it->second - res);
					if(n <= 0.05){
						alignment_error += n*n;
						++cpt;
						std::cout << it->first << " " << res << " " << it->second << " " << R * it->first << std::endl;
					}
					++it;
				}*/

			//	std::cout << "alignment_error : " << alignment_error/cpt << std::endl;

	if (alignment_error < 0.0f) {
		hvrl::Log::add().warning("ransacAlignment",
				"Not correct alignment found between both point clouds");
		return false;
	}
	return true;

}

/*
 * The function converts pairs of keypoints into pairs of 3D points
 */
void convertMatchesTo3D(const PointCloud& source, const PointCloud& target,
		const std::vector<cv::DMatch>& matches,
		const std::vector<cv::KeyPoint>& keypointsA,
		const std::vector<cv::KeyPoint>& keypointsB,
		std::vector<std::pair<cv::Vec3f, cv::Vec3f> >& matches3d) {
	matchesbis.clear();
	Size sizesource = source.getSize();
	const float* pcsource = source.getData();
	Size sizetarget = target.getSize();
	const float* pctarget = target.getData();

	std::vector<cv::DMatch>::const_iterator it(matches.begin());
	while (it != matches.end()) {
		std::pair<cv::Vec3f, cv::Vec3f> p;
		cv::Point2f kp(keypointsA[it->queryIdx].pt);
		bool valid =
				pcsource[(int(kp.y) * sizesource.width + int(kp.x)) * 4 + 3]
						> 0;
		p.first = cv::Vec3f(
				pcsource[(int(kp.y) * sizesource.width + int(kp.x)) * 4],
				pcsource[(int(kp.y) * sizesource.width + int(kp.x)) * 4 + 1],
				pcsource[(int(kp.y) * sizesource.width + int(kp.x)) * 4 + 2]);

		kp = cv::Point2f(keypointsB[it->trainIdx].pt);
		valid &= pctarget[(int(kp.y) * sizetarget.width + int(kp.x)) * 4 + 3]
				> 0;
		p.second = cv::Vec3f(
				pctarget[(int(kp.y) * sizetarget.width + int(kp.x)) * 4],
				pctarget[(int(kp.y) * sizetarget.width + int(kp.x)) * 4 + 1],
				pctarget[(int(kp.y) * sizetarget.width + int(kp.x)) * 4 + 2]);

		bool exist = false;
		std::vector<std::pair<cv::Vec3f, cv::Vec3f> >::iterator itpair = matches3d.begin();
		while (itpair != matches3d.end() && false == exist) {
			if(p.first == itpair->first){
				exist = true;
			}
			++itpair;
		}

		// If the 3D coordinate is correct
		if (true == valid && false == exist) {
			matchesbis.push_back(*it);
			matches3d.push_back(p);
		}

		++it;
	}

}

bool computeRigidTransformationWithRANSAC(const RGBDImage* source,
		const RGBDImage* target, const PointCloud *sourcepc,
		const PointCloud *targetpc, cv::Mat& R, cv::Vec3f& t,
		const std::string& method, const std::string& methodextractor,
		const std::string& methodmatcher,
		const bool& useCCF, const unsigned int& iterations) {

	assert(source.getSize() == sourcepc.getSize());
	assert(target.getSize() == targetpc.getSize());

	/*
	 * Convert input images into opencv structures
	 */
	cv::Mat3b sourceColor(source->color.getSize().height,source->color.getSize().width);
	cv::Mat3b targetColor(target->color.getSize().height,target->color.getSize().width);
	memcpy(sourceColor.data,source->color.getData(),3*source->color.getSize().height*source->color.getSize().width*sizeof(unsigned char));
	memcpy(targetColor.data,target->color.getData(),3*target->color.getSize().height*target->color.getSize().width*sizeof(unsigned char));

	/*
	 * Convert to BGR opencv format
	 */
	cv::Mat targetcvt, sourcecvt;
	cv::cvtColor(sourceColor, sourcecvt, CV_RGB2BGR);
	cv::cvtColor(targetColor, targetcvt, CV_RGB2BGR);

	/*
	 * Create the data structures from input parameters
	 */
	std::vector<cv::DMatch> matches;
	std::vector<cv::KeyPoint> keypointsA, keypointsB;
	if (getCorrespondences(sourcecvt, targetcvt, matches,
			keypointsA, keypointsB, method, methodextractor, methodmatcher, useCCF) == false) {
		matches.clear();
		return false;
	}

	if (matches.size() < 3) {
		hvrl::Log::add().error("computeRigidTransformationWithRANSAC",
				"Not enough matches for computing the rigid transformation");
		matches.clear();
		return false;
	}


	/*cv::Mat3b s;
	cv::drawMatches(sourcecvt, keypointsA, targetcvt, keypointsB, matches, s);
	cv::imwrite("source.png",s);*/

	//cv::Mat img_matches;

	std::vector<std::pair<cv::Vec3f, cv::Vec3f> > matches3d;
	convertMatchesTo3D(*sourcepc, *targetpc, matches, keypointsA, keypointsB,
			matches3d);

	std::vector<std::pair<cv::Vec3f, cv::Vec3f> > matches3dfiltered;
	if (filterMatches(matches3d, matches3dfiltered) == false) {
		matches.clear();
		matches3d.clear();
		matches3dfiltered.clear();
		return false;
	}

	if (matches3dfiltered.size() < 3) {
		hvrl::Log::add().error("computeRigidTransformationWithRANSAC",
				"Not enough matches for computing the rigid transformation after the filtering");
		matches.clear();
		matches3d.clear();
		matches3dfiltered.clear();
		return false;
	}


	/*cv::drawMatches(sourcecvt, keypointsA, targetcvt, keypointsB, matchesbis, s);
	cv::imwrite("target.png",s);*/

	if (ransacAlignment(matches3dfiltered, iterations, R, t) == false) {
		matches.clear();
		matches3d.clear();
		matches3dfiltered.clear();
		return false;
	}
	matches.clear();
	matches3d.clear();
	matches3dfiltered.clear();

	/*for(int i = 0; i < 3 ; ++i){
		for(int j = 0; j < 3 ; ++j){
			tab[i*3+j] = R.at<float>(i,j);
		}
	}*/
	return true;

}

}

}
