#ifndef NMI_HU_H
#define NMI_HU_H

namespace nmihu{

	using namespace std;
	using namespace cv;
	using namespace boost::lambda;
	namespace fs = boost::filesystem;

	static const array<string,6> default_gestures= {"A","B","C","Five","Point","V"};
	static const string image_extensions=".jpg,.tiff,.pnm,.ppm,.png,.jpeg";
	static const size_t hu_size = 4;
	
	class DataNormalizer{
	public:

		DataNormalizer(){}
		DataNormalizer(const Mat_<float>& mat){
			this->setup(mat);
		}
		//normalization (re-scaling) is done in place for a
		//single row vector or for a matrix (a stack of row vectors)
		void operator()( Mat_<float>& x) const{
			for(int i = 0 ; i < x.rows ; ++i)
				for(int j = 0 ; j < x.cols ; ++j)
					if(u[j]!=l[j])
						x(i,j) = 2*(x(i,j) -a[j] - l[j])/(u[j]-l[j]) - 1;
					else
						x(i,j) = 0; //I'm not sure this is the proper thing to do ?!?
		}

		//needs to be called once for many calls of the operator()
		//sets up the info for normalization (average,upper boundary, lower boundary)
		void setup(const Mat_<float>& mat){
			
			Mat_<float> mata,matu,matl,temp;

			reduce(mat,mata,0,CV_REDUCE_AVG);
			temp = mat.clone();
			for(int i = 0 ; i < mat.rows; ++i)
				temp.row(i) = mat.row(i) - mata;

			reduce(temp, matu, 0, CV_REDUCE_MAX);
			reduce(temp, matl, 0, CV_REDUCE_MIN);

			a = vector<float>(mata.begin(),mata.end());
			u = vector<float>(matu.begin(),matu.end());
			l = vector<float>(matl.begin(),matl.end());

			assert(u.size() == mat.cols);
			assert(l.size() == mat.cols);

		}

	private:
		vector<float> a,u,l;
	};

	void train(const string& traindirectory, const vector<string>& gestures, SVM& classifier, DataNormalizer& normalizer);
	void test(const SVM& classifier, const DataNormalizer& normalizerCallable, const string& testdirectory, const vector<string>& gestures);
	
	//expects a single channel input image
	float compute_NMI(const Mat& frame){
		
		if(frame.channels()>1)
			throw exception("compute_NMI is being called with a multichannel image.");

		float nmi = 0.f;
		
		if(!frame.empty())
		{
			float cx, cy, m;
			cx = cy = m = 0;

			for(int i = 0 ; i < frame.rows ; ++i)
			{
				for(int j = 0 ; j < frame.cols ; ++j)
				{
					float pixval = (float)frame.at<byte>(i,j);
					cx += j*pixval;
					cy += i*pixval;
					m += pixval;
				}
			}

			if(!m)
				throw exception("blank segmented image.");

			cx /= m;
			cy /= m;

			for(int i = 0 ; i < frame.rows ; ++i)
			{
				for(int j = 0 ; j < frame.cols ; ++j)
				{
					float pixval = (float)frame.at<byte>(i,j);
					nmi += (pow(j-cx,2) + pow(i-cy,2))*pixval;
				}
			}

			assert(nmi==nmi); //check if it's a NaN
			assert(m>0);
			nmi = pow( nmi*1., 0.5 )/m;
			
			return nmi;

		}else
		{
			throw Exception(2,"Tried to compute NMI for empty frame.",
				__FUNCTION__,__FILE__,__LINE__);
		}
		
		return -1.f;
	}

	float nu(const Mat& frame, int p, int q, float cx, float cy, float mu00){
		
		float mupq = 0;
		for(int i = 0 ; i < frame.rows ; ++i)
		{
			for(int j = 0 ; j < frame.cols ; ++j)
			{
				float pixval = (float)frame.at<byte>( i, j );
				mupq += pow(j-cx,p) * pow(i-cy,q) * pixval;
			}
		}

		assert(mu00>0);
		
		float res = mupq / ( pow( mu00*1., 1+(p+q)/2. ) );

		return res;

	}

	array<float,hu_size> compute_HU(const Mat& frame){
		
		if(frame.channels()>1)
			throw exception("compute_HU is being called with a multichannel image.");

		array<float,hu_size> hu;
		hu.fill(0);

		if(!frame.empty())
		{
			float cx, cy, m;
			cx = cy = m = 0;

			for(int i = 0 ; i < frame.rows ; ++i)
			{
				for(int j = 0 ; j < frame.cols ; ++j)
				{
					float pixval = (float)frame.at<byte>(i,j);
					cx += j*pixval;
					cy += i*pixval;
					m += pixval;
				}
			}
			
			if(!m)
			{
				throw exception("Blank segmented image");
			}

			cx /= m;
			cy /= m;
			
			float nu20 = nu(frame,2,0,cx,cy,m);
			float nu02 = nu(frame,0,2,cx,cy,m);
			float nu10 = nu(frame,1,0,cx,cy,m);
			float nu01 = nu(frame,0,1,cx,cy,m);
			float nu30 = nu(frame,3,0,cx,cy,m);
			float nu03 = nu(frame,0,3,cx,cy,m);
			float nu21 = nu(frame,2,1,cx,cy,m);
			float nu12 = nu(frame,1,2,cx,cy,m);
			float nu11 = nu(frame,1,1,cx,cy,m);
			
			hu[0] = abs(log10(abs(nu20 + nu02)));
			hu[1] = abs(log10(abs(pow(nu20-nu02,2) + 4*pow(2*nu11,2))));
			hu[2] = abs(log10(abs(pow(nu30-3*nu12,2) + pow(3*nu21-nu03,2))));
			hu[3] = abs(log10(abs(pow(nu30+nu12,2) + pow(nu21+nu03,2))));
			
			return hu;

		}else
		{
			throw Exception(2,"Tried to compute HU for empty frame.",
				__FUNCTION__, __FILE__, __LINE__);
		}
		
		return hu;
	}

	Mat_<float> nmihuVector(const Mat& frame){
		
		Mat_<float> fvec(1,hu_size+1);

		fvec(0,0) = compute_NMI(frame); 
		auto hu = compute_HU(frame);

		for(int i = 0; i < hu_size ; ++i)
			fvec(0,i+1) = hu[i];

		return fvec;
	}

	//TODO: implement skin_mask
	Mat skin_mask(Mat img){
		
		Mat temp, temp1, temp2, temp3;
		blur(img,temp,Size(3,3));
		cvtColor(temp, temp, CV_BGR2HSV);
		blur(temp, temp, Size(3,3));

		imshow( "blurred", temp );
		waitKey( 3 );
		Mat lb( temp.size(), temp.type() );
		Mat lu( temp.size(), temp.type() );

		// Red to Orange Hue with High Saturation    
        // Hue 0 to 28 degree and Sat 190 to 200   
		
		lb = Scalar(0,75,0);
		lu = Scalar(14,200,255);
		inRange(temp, lb, lu, temp1);

		// Red Hue with Low Saturation    
        // Hue 0 to 26 degree and Sat 20 to 90

		lb = Scalar(0,20,0);
		lu = Scalar(13,90,255);
		inRange(temp, lb, lu, temp2);

		// Red Hue to Pink with Low Saturation    
		// Hue 340 to 360 degree and Sat 15 to 90

		lb = Scalar(170,15,0);
		lu = Scalar(180,90,255);
		inRange(temp, lb, lu, temp3);

		Mat mask = temp1 | temp2 | temp3;
		imshow("mask",mask);
		waitKey(3);

		return mask;
	}

	void computeFeatureMatrices( const string& dir_string, const vector<string>& labels,
								Mat_<float>& featureVecs, Mat_<int>& responses ){
		
		list<Mat_<float>> vecs;
		vector<int> resps;

		for(int gcode = 0; gcode < labels.size() ; ++gcode)
		{

			//build path to current gesture directory
			fs::path dir_path( dir_string);
			dir_path /= labels[gcode];

			//default directory_iterator ctor initializes to post last file symbol
			fs::directory_iterator end_itr;
			for(fs::directory_iterator itr(dir_path); itr != end_itr ; ++itr){
			
				if(!fs::is_directory(itr->status())){
				
					//check if it is an acceptable image file (if not jump to the next)
					string ext = fs::extension(itr->leaf());
					if( image_extensions.find(ext)==string::npos ) 
						continue;					

					Mat img = imread(itr->string());
					if( img.rows > 100 || img.cols > 100 )
						continue;
					img = skin_mask(img);

					if(!img.empty()){
					
						//imshow( "Test", img );
						//waitKey(2);
						Mat vec;
						try{
							vec = nmihuVector(img);
						}catch(exception& e){
							cout << "Frame " << itr->string() << " dropped because it yielded: " << e.what() << endl;
							continue;
						}

						vecs.push_back(vec);
						resps.push_back(gcode);

					}
				}
			}
		}

		//prepare outputs
		featureVecs = Mat_<float>(vecs.size(),hu_size+1);
		int i = 0;
		for(auto itr = vecs.begin() ; itr!=vecs.end() ; ++itr)
			(*itr).copyTo(featureVecs.row(i++));

		cout << "resps.size(): " << resps.size() << endl;
		responses = Mat_<int>( resps, true);

		vecs.clear();
		resps.clear();

	}

	void train(const string& traindirectory, const vector<string>& gestures, SVM& c, DataNormalizer& normalizer){
		
		Mat_<float> trainVecs;
		Mat_<int> responses;
		
		cout << "Computing training vectors.."; size_t to = clock();
		computeFeatureMatrices( traindirectory, gestures, trainVecs, responses);
		cout <<"...completed in " << (1.0*clock() - to)/CLOCKS_PER_SEC << " seconds." << endl; 
		
		SVMParams params;
		params.kernel_type = SVM::RBF;
		params.svm_type = SVM::C_SVC;

		normalizer.setup(trainVecs);
		normalizer(trainVecs);
		cout << "Train vecs: \n" << trainVecs << endl;

		cout << "Training .."; to = clock();
		c.train_auto( trainVecs, responses, Mat(), Mat(), params);
		cout << "....completed in " << (1.0*clock() - to)/CLOCKS_PER_SEC <<  " seconds." << endl;

	}

	void test(const SVM& classifier, const DataNormalizer& normalize,
				const string& testdirectory, const vector<string>& gestures){
	
		Mat_<float> testVecs;
		Mat_<int> responses;

		string subdir("/complex");
		cout << "Testing with " << subdir << " background...\n";
		auto subdirGestures = gestures;

		//directory path correction using boost lambda functions
		for_each(subdirGestures.begin(), subdirGestures.end(), _1 = _1 + subdir );
		for_each(subdirGestures.begin(), subdirGestures.end(), cout << _1 + "\n");

		cout << "Computing test vectors.."; size_t to = clock();
		computeFeatureMatrices(testdirectory, subdirGestures, testVecs, responses);
		normalize(testVecs);
		cout <<"...completed in " << (1.0*clock() - to)/CLOCKS_PER_SEC << " seconds." << endl;  
		cout << "Testing vecs: " << testVecs << endl;

		assert(responses.rows > responses.cols);
		assert(responses.cols==1);

		Mat_<int> predictions(responses.size());

		cout << "Testing .."; to = clock();
		vector<int> err( gestures.size(), 0);
		vector<int> respsum(err);

		for(int i = 0 ; i < responses.rows ; ++i){
			predictions(i,0) = (int)classifier.predict(testVecs.row(i));
			cout << responses(i,0) << " X " << predictions(i,0) << endl;
			err[responses(i,0)] += responses(i,0)!= predictions(i,0)?1:0;
			respsum[responses(i,0)]++;
		}

		int total_err = 0;
		for_each(err.begin(),err.end(), total_err+=_1);

		cout << "Total error: " << total_err << "/"<< responses.rows << endl; 
		for(int i = 0 ; i < err.size() ; ++i){
			cout <<"Error for " << gestures[i] << ": " << err[i] << "/" << respsum[i] << endl;
		}
		cout << ".... completed in " << (1.0*clock() - to)/CLOCKS_PER_SEC <<
			" seconds." << endl;
	}

	//
	//Tests
	//
	bool test_compute_HU( ){

		string dir_string("C:/Development/datasets/Triesch");
		fs::path dir_path( dir_string);
		fs::directory_iterator end_itr;

		for(fs::directory_iterator itr(dir_path); itr != end_itr ; ++itr)
		{
			if(!fs::is_directory(itr->status())){

				string ext = fs::extension(itr->leaf());
				cout << "ext: " << ext << "   ";
				if( image_extensions.find(ext)==string::npos ) 
					continue;

				cout << "HU for " << itr->leaf();
				Mat img = imread(itr->string());		
				if(!img.empty()){
					
					imshow( "Test", img );
					waitKey(2);
					auto hu = compute_HU( img );
					for( int i = 0 ; i < hu_size; ++i ){
						cout << hu[i] << ", ";
					}
					cout << endl;

				}
				else{
					throw Exception( 1, "Can't read file",
						__FUNCTION__, __FILE__, __LINE__);
				}
			}
		}
		return true;
		
	}

	bool test_compute_NMI( ){
		 
		string dir_string("C:/Development/datasets/Triesch");
		fs::path dir_path( dir_string);
		fs::directory_iterator  itr(dir_path), end_itr; // default ctor initializes to "after the last"

		for(; itr != end_itr ; ++itr)
		{
			if(!fs::is_directory(itr->status())){

				string ext = fs::extension(itr->leaf());
				if( image_extensions.find(ext)==string::npos ) 
					continue;

				cout << "NMI for " << itr->leaf();
				Mat img = imread(itr->string());		
				if(!img.empty()){
					imshow( "Test", img );
					cv::waitKey(2);
					std::cout << ": " << compute_NMI(img) << endl;
				}
				else{
					throw Exception( 1, "Can't read file",
						__FUNCTION__, __FILE__, __LINE__);
				}
			}
		}

		return true;
	}

	bool test_nmi_hu( int argc, char **argv ){

		TCLAP::CmdLine cmd( "Gesture recognition using NMI and HU + SVM.", ' ', "1.0");
		
		TCLAP::ValueArg<string> log_file( "l", "log_file", "Logfile definition.",
									false, "grid_exp.txt", "string", cmd);
		
		TCLAP::ValueArg<string> trainpath( "r", "training_data_path","Directory for the set of training images and labels.", 
									false, "C:/Development/datasets/Marcel-Train", "string", cmd);
		
		TCLAP::ValueArg<string> testpath( "e", "test_data_path", "Directory for the test data path."
			"In case it is empty, the system will try to read frames from the webcam.",
			false, "C:/Development/datasets/Marcel-Test", "string", cmd);
		
		TCLAP::MultiArg<string> gestures("g", "gesture", "Gesture Label", false, "string", cmd);

		try{
			cmd.parse( argc, argv );
		}
		catch( TCLAP::ArgException &ae ){
			cout << "Argument parsing exception: " << ae.what() << endl;
		}

		try{

			vector<string> glist = gestures.getValue();

			//If the gestures were not provided by the user use default ones
			if(glist.size()<=1){
				glist.assign( default_gestures.begin(), default_gestures.end() );
			}

			SVM classifier;
			DataNormalizer normalizer;

			train(trainpath.getValue(), glist, classifier, normalizer);
			test(classifier, normalizer, testpath.getValue(), glist);

		}catch(cv::Exception &cve){

			cout << cve.what() << endl;
		}catch(exception &e){

			cout << e.what();
			return false;
		}

		return true;
	}
}
#endif