#include <iostream>
#include <fstream>

#include <sys/types.h>
#include <cstring>

#include <opencv2/opencv.hpp>
#include <opencv2/flann/config.h>
#include <opencv2/flann/flann.hpp>
#include <opencv2/legacy/legacy.hpp>		// EM
#include <opencv2/contrib/contrib.hpp>		// colormap
#include <opencv2/nonfree/nonfree.hpp>		// SIFT

using namespace cv;
using namespace std;

#include "score_curve.h"

class LC_HEX
{
	public:
		bool VERBOSE;

		CvSVM svm;

		std::vector< Mat >  global_feature;
		std::vector< Mat >  pixel_feature;
		std::vector< Mat >  pixel_label;

		std::string m_score_path;
		std::vector< Mat > feature_array;
		std::vector< Mat > label_array;

		vector <CvDTree> dtree;
		vector<CvRTrees> random_trees;
		vector<CvSVM> svms;
		vector <CvBoost> adaboost;

		vector <EM> em[2];


		Lc_Score_Curve * lc_score_curve;

		String * p_model_code;

		
		Mat classifier_label;

		int frame_n;
		int global_feature_n;

		string root;
		string METHOD;
		int K;

		Mat centers;


		Mat MoveResponsetoimage(Size sz, vector<KeyPoint> keypt,Mat & hx);



		void work()
		{
			unsupervise_learning();
			supervise_learning();
		}

		void init_test(std::string str);

		void trainModel(Mat &data, Mat &lab, string model_filename, string method);

		void init(string n_root, string n_method, string * n_p_model_code, int n_K = 3);



		

		LC_HEX()
		{
			global_feature.clear();
			pixel_feature.clear();
			pixel_label.clear();

			feature_array.clear();
			label_array.clear();


			VERBOSE = true;
			METHOD = "dt";

			lc_score_curve = new(Lc_Score_Curve);

		}

		void predict_frame_with_score( 
										Mat & m_global_feature,
										Mat & m_pixel_feature,
										Mat & m_predict,
										Mat & label)
		{
			predict_frame(m_global_feature,m_pixel_feature,m_predict);

			compare_label( m_predict,label);

		}

		void compare_label(	Mat & m_predict,
							Mat & label)
		{
			int sz = label.size().height * label.size().width;
			for(int m=0;m<sz;m++)
			{
				lc_score_curve->Add(m_predict.at<float>(m),floor(0.5 + label.at<float>(m)));
			}
		}

		void OutputResult()
		{
			string curve_filename =m_score_path + METHOD +"_" + (*p_model_code) + "_score_curve.txt";
			FILE * out = fopen(curve_filename.data(),"w");//
			lc_score_curve->Output(out);
			fclose(out);
		}

		float m_distance(Mat & a, Mat b)
		{
			int sz1 = a.size().width * a.size().height;
			int sz2 = b.size().width * b.size().height;
			if(sz1>sz2) sz1 = sz2;
			float ans = 0;
			for(int i=0;i<sz1;i++)
			{
				float dif = a.at<float>(i) - b.at<float>(i);
				ans += dif * dif;
			}
			return ans;
		}


		void predict_frame(	Mat & m_global_feature,
							Mat & m_pixel_feature,
							Mat & m_predict)
		{
			
			double min_dis;
			

			int min_id = 0;

			if(K>1)
			{
				min_dis = m_distance(m_global_feature, centers.row(0));
				for(int i=1;i<K;i++)
				{
					double dis =  m_distance(m_global_feature, centers.row(i));
					if(dis < min_dis) 
					{
						min_dis = dis;
						min_id = i;
					}
				}
			}
			else min_id = 0;

			printf("it belongs to %d th classifier \n",min_id);

			SinglePredict(m_pixel_feature, METHOD , m_predict ,min_id);

			printf("done\n");			
			
		}

		

		void add_frame(	Mat & m_global_feature,
						Mat & m_pixel_feature,
						Mat & m_pixel_label)
		{

			global_feature.push_back(m_global_feature);
			pixel_feature.push_back(m_pixel_feature);
			pixel_label.push_back(m_pixel_label);
			classifier_label = Mat(0,0,5);

		}

		void supervise_learning()
		{
			for(int i= 0;i<feature_array.size();i++)
			{
				stringstream filename;
				
				filename<< root+"models/"+"LC_HEX" + (*p_model_code) + METHOD+"_" << i ;

				cout << "try to save model " << filename.str().c_str() <<endl;
				trainModel(feature_array[i],label_array[i],filename.str(),METHOD);				

			}
		}

		void unsupervise_learning()
		{

			frame_n = global_feature.size();

			if(frame_n<1) return;

			global_feature_n = global_feature[0].size().width;

			
			//Mat all_global_feature(0,0,5);
			Mat all_global_feature(frame_n,global_feature_n,5);

			for(int i=0;i<frame_n;i++)
			{
				global_feature[i].row(0).copyTo(all_global_feature.row(i));	
				
			}
			
			if(1)
			{


				cvflann::KMeansIndexParams params;

				
				Mat kmeans_labs;
				int attempts = 10;
				
				TermCriteria crit(TermCriteria::MAX_ITER +TermCriteria::EPS,30,0.01);

				if( K > frame_n) K = frame_n;

				printf("K is %d\n",K);
				//system("pause");


				if(K>1)
				{

					kmeans(all_global_feature,K,kmeans_labs,crit,attempts,KMEANS_PP_CENTERS,centers);

					string filename = root+"models/"+"LC_HEX" + (*p_model_code) + "main" ;

					FileStorage fs(filename, FileStorage::WRITE);

					fs << "kmean" << centers;

					fs.release();
				}

				//for(int i,0,frame_n) printf("%d ", kmeans_labs.at<int>(i));

				feature_array.clear();
				label_array.clear();

				Mat TempEmpty(0,0,5);

				for(int i=0;i<K;i++)
				{
					feature_array.push_back(TempEmpty);
					label_array.push_back(TempEmpty);
				}


				for(int i=0;i<frame_n;i++)
				{
					printf("ok");
					int id;
					if(K>1) id = kmeans_labs.at<int>(i);
					else id = 0;


					feature_array[id].push_back(pixel_feature[i]);
					label_array[id].push_back  (pixel_label[i]  );
				}
			}

		}

	void SinglePredict(Mat &desc, string method, Mat &h_x, int classifier_id);

	private:
};


void LC_HEX :: trainModel(Mat &data, Mat &lab, string model_filename, string method){
	
	double t;
	// === train 2nd models === //
	if(method=="svm")
	{
		CvSVM svm;
		
		CvSVMParams sv_params;
		sv_params.term_crit=cvTermCriteria(CV_TERMCRIT_ITER,100,0.000001);
		sv_params.kernel_type=CvSVM::LINEAR;
		sv_params.svm_type=CvSVM::EPS_SVR;
		sv_params.p = 1;
		
		if(VERBOSE) cout << "Train SVM ...";
		t = getTickCount();
		svm.train_auto(data,lab,Mat(),Mat(),sv_params);
		t = (getTickCount()-t)/getTickFrequency();
		if(VERBOSE) cout << " time:" << t << " secs." << endl;

		string model = model_filename + ".yml";
		svm.save(model.c_str());
		
	}	
	else if(method=="em")
	{

		EM my_em[2];

		int n_pos = 0;
		int n_neg = 0;
		for(int i=0;i<data.rows;i++){
			if(lab.at<float>(i)>0.1)
				n_pos++;
			else
				n_neg++;

		}
		
		Mat pos(n_pos,data.cols,5);
		Mat neg(n_neg,data.cols,5);
		

		n_pos = 0;
		n_neg = 0;

		if(VERBOSE) cout << "Spliting data ..." << endl;
		for(int i=0;i<data.rows;i++){
			if(lab.at<float>(i)>0){
				/*if(!pos.data) data.row(i).copyTo(pos);
				else pos.push_back(data.row(i));*/
				//pos.push_back(data.row(i));
				data.row(i).copyTo(pos.row(n_pos));
				n_pos++;
			}
			else {
				/*if(!neg.data) data.row(i).copyTo(neg);
				else neg.push_back(data.row(i));*/
				
				data.row(i).copyTo(neg.row(n_neg));
				n_neg++;
			}
			
		}
		
		
		// preprocessing with PCA should help (since assuming diagonal covariance)?
		int nclusters[2];
		nclusters[0] = 16;
		nclusters[1] = 16;
		//CvTermCriteria term_crit=cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 100, FLT_EPSILON);
		//CvEMParams param[2];
		//param[0] = CvEMParams(nclusters[0]);
		//param[1] = CvEMParams(nclusters[1]);
		
		
		FileStorage fs;
		
		if(VERBOSE) cout << "Training with "<< pos.rows <<" positive EM model...";
		t = getTickCount();
		my_em[0] = EM::EM(nclusters[0]);
		my_em[0].train(pos);
		//em[0].train(pos, Mat(),param[0]);
		t = (getTickCount()-t)/getTickFrequency();
		cout << " time:" << t << " secs." << endl;
		
		string posmodel = model_filename + "_pos.yml";
		fs = FileStorage(posmodel.c_str(), FileStorage::WRITE);
		my_em[0].write(fs);
        //em[0].write(fs.fs, "EM");
		
		
		if(VERBOSE) cout << "Training  with " << neg.rows << " negative EM model...";
		t = getTickCount();
		my_em[1] = EM::EM(nclusters[1]);
		my_em[1].train(neg);
		//em[1].train(neg, Mat(),param[1]);
		t = (getTickCount()-t)/getTickFrequency();
		cout << " time:" << t << " secs." << endl;
		

		string negmodel = model_filename + "_neg.yml";
		fs = FileStorage(negmodel.c_str(), FileStorage::WRITE);
        //em[1].write(fs.fs, "EM");
		my_em[1].write(fs);
		
	}
	else if(method =="ada")
	{
		CvBoost atr;
		CvBoostParams params;
		int boost_type = CvBoost::GENTLE; //CvBoost::REAL; //CvBoost::GENTLE;
		int weak_count = 100;
		double weight_trim_rate = 0.95;
		int max_depth = 1;
		bool use_surrogates = false;
		const float* priors = NULL;
		params = CvBoostParams(boost_type, weak_count,weight_trim_rate,max_depth,use_surrogates,priors);
		
		Mat varType = Mat::ones(data.cols+1,1,CV_8UC1) * CV_VAR_NUMERICAL; // all floats
		varType.at<uchar>(data.cols,0) = CV_VAR_CATEGORICAL;
		
		//lab = lab*2-1;
		//cout << lab << endl;
		//lab.convertTo(lab,CV_8UC1);
		
		double t = getTickCount();
		if(VERBOSE) cout << "Train (Gentle) AdaBoost model ...";
		atr.train(data,CV_ROW_SAMPLE,lab,Mat(),Mat(),varType,Mat(),params,false);
		t = (getTickCount()-t)/getTickFrequency();
		if(VERBOSE) cout << " time:" << t << " secs." << endl;
		
		if(VERBOSE) cout << "Saving model to: " << model_filename << endl;
		string model = model_filename + ".yml";
		atr.save(model.c_str());

	}
	else if(method=="rdt")
	{

		CvRTrees  m_random_tree;
		CvRTParams params;

		//params.max_depth = 5;
		

		Mat varType = Mat::ones(data.cols+1,1,CV_8UC1) * CV_VAR_NUMERICAL; // all floats
		varType.at<uchar>(data.cols,0) = CV_VAR_CATEGORICAL;

		params.max_depth = 6;
		params.regression_accuracy = 0.1;
		params.use_1se_rule = true;
		params.use_surrogates = true;
		params.truncate_pruned_tree = true;

		double t = getTickCount();
		if(VERBOSE) cout << "Train Random Trees model ...";

		m_random_tree.train(data, CV_ROW_SAMPLE , lab,Mat(),Mat(),varType,Mat(),params);


		t = (getTickCount()-t)/getTickFrequency();
		if(VERBOSE) cout << " time:" << t << " secs." << endl;

		//system("pause");

		if(VERBOSE) cout << "Saving model to: " << model_filename << endl;
		string model = model_filename + ".yml";

		m_random_tree.save(model.c_str());

	}
	else if(method=="dt")
	{
		CvDTree dtr;
		CvDTreeParams dt_params = CvDTreeParams();
		//		dt_params = CvDTreeParams(12, // 9, max depth
		//								  10, // min sample count
		//								  0.0, // regression accuracy (non-zero value makes it regression?)
		//								  true, // compute surrogate split, as we have missing data
		//								  12, //9, (not used for regression) max number of categories (use sub-optimal algorithm for larger numbers)
		//								  4, // the number of cross-validation folds
		//								  true, // true: harsh pruning
		//								  true, // true: truncate tree
		//								  0 // the array of priors, the bigger p_weight, the more attention
		//								  );

		int TREE_DEPTH = 10;
		
		dt_params = CvDTreeParams(TREE_DEPTH,10,0.0,true,TREE_DEPTH,4,true,true,0);
		//dt_params = CvDTreeParams(12,10,0.0, true,12,4,true,true,0);
		//dt_params = CvDTreeParams(9,10,0.0, true,9,4,true,true,0);
		//dt_params = CvDTreeParams(6,10,0.0, true,9,4,true,true,0);
		//dt_params = CvDTreeParams(4,10,0.0, true,10,4,true,true,0);
		
		if(VERBOSE) cout << "num data:" << data.rows << endl;
		
		Mat varType = Mat::ones(data.cols+1,1,CV_8UC1) * CV_VAR_NUMERICAL; // all floats
		
		double t = getTickCount();
		if(VERBOSE) cout << "Train decision tree model ...";
		//

		/*Mat newmat(data.size().height,data.size().width,5);
		printf("%d %d %d\n", data.size().width, data.size().height,newmat.size().width);
		system("pause");*/

		dtr.train(data,CV_ROW_SAMPLE,lab,Mat(),Mat(),varType,Mat(),dt_params);
		t = (getTickCount()-t)/getTickFrequency();
		if(VERBOSE) cout << " time:" << t << " secs." << endl;
		
		if(VERBOSE) cout << "Saving model to: " << model_filename << endl;
		string dtmodel = model_filename + ".yml";
		dtr.save(dtmodel.c_str());
		
	}
	else if(method=="ada")
	{
		CvBoost atr;
		CvBoostParams params;
		int boost_type = CvBoost::GENTLE; //CvBoost::REAL; //CvBoost::GENTLE;
		int weak_count = 100;
		double weight_trim_rate = 0.95;
		int max_depth = 1;
		bool use_surrogates = false;
		const float* priors = NULL;
		params = CvBoostParams(boost_type, weak_count,weight_trim_rate,max_depth,use_surrogates,priors);
		
		Mat varType = Mat::ones(data.cols+1,1,CV_8UC1) * CV_VAR_NUMERICAL; // all floats
		varType.at<uchar>(data.cols,0) = CV_VAR_CATEGORICAL;
		
		//lab = lab*2-1;
		//cout << lab << endl;
		//lab.convertTo(lab,CV_8UC1);
		
		double t = getTickCount();
		if(VERBOSE) cout << "Train (Gentle) AdaBoost model ...";
		atr.train(data,CV_ROW_SAMPLE,lab,Mat(),Mat(),varType,Mat(),params,false);
		t = (getTickCount()-t)/getTickFrequency();
		if(VERBOSE) cout << " time:" << t << " secs." << endl;
		
		if(VERBOSE) cout << "Saving model to: " << model_filename << endl;
		string model = model_filename + ".yml";
		atr.save(model.c_str());
		cout << "Done saving model." << endl;
		
	}
	
}


void  LC_HEX :: SinglePredict(Mat &desc, string method, Mat &h_x,int classifier_id){
	
	
	Mat hx = Mat::zeros((int)desc.rows,1,CV_32FC1);
	
	for(int k=0;k<(int)desc.rows;k++)
	{
		Mat row = desc.row(k);					// passing reference
		
		if(method=="ada")
		{
			hx.at<float>(k,0) = adaboost[classifier_id].predict(row);
		}
		else if(method == "svm")
		{
			hx.at<float>(k,0) = svms[classifier_id].predict(row);
		}
		else if(method=="dt")
		{
			CvDTreeNode *node;
			node = dtree[classifier_id].predict(row,Mat(),false);
			hx.at<float>(k,0) = node->value;
		}
		else if(method=="em"){
			
			
			double log_p_pos = em[0][classifier_id].predict(row)(0);
			double log_p_neg = em[1][classifier_id].predict(row)(0);

			double diff = log_p_pos - log_p_neg;

			if(diff>0) diff = 0; // acts as ceiling function skin with p=1


			hx.at<float>(k,0)  = (float)exp(diff);





		}
		else if(method=="rdt")
		{
			hx.at<float>(k,0) = random_trees[classifier_id].predict(row,Mat());

		}
		else exit(1);
		
	}
	h_x = hx;

	hx.release();
	//cout << "done" << endl;
	return;
}

Mat LC_HEX ::MoveResponsetoimage(Size sz, std::vector<KeyPoint> keypts , Mat & hx)
{
	Mat res = Mat::zeros(sz,CV_32FC1);
	for(int k=0;k<(int)hx.rows;k++)
	{
		int _r = floor(keypts[k].pt.y+.5);			// center
		int _c = floor(keypts[k].pt.x+.5);					
		
		res.at<float>(_r,_c) = hx.at<float>(k,0);


	}
	return res;
}

//root+"output/loss/"
void LC_HEX::init_test(std::string score_path)
{
	m_score_path = score_path;
	if(K>1)
	{
		string filename = root+"models/"+"LC_HEX" + (*p_model_code) + "main" ;

		FileStorage fs(filename, FileStorage::READ);

		fs["kmean"] >> centers;

		printf("dim of centers %d by %d\n",centers.size().height,centers.size().width);

		K = centers.size().height;
	}

	printf("K = %d\n",K);

	

	if(METHOD =="dt")
	{

		dtree = vector<CvDTree>(K);

		for(int i=0;i<K;i++)
		{
			stringstream filename;
			
			filename<< root+"models/"+"LC_HEX" + (*p_model_code) + METHOD+"_" << i <<".yml";

			cout << " try to load " << filename.str()<<endl;

			dtree[i].load(filename.str().c_str());
		}
	}
	else if(METHOD == "svm")
	{

		svms = vector<CvSVM>(K);

		for(int i=0;i<K;i++)
		{
			stringstream filename;
			
			filename<< root+"models/"+"LC_HEX" + (*p_model_code) + METHOD+"_" << i <<".yml";

			cout << " try to load " << filename.str()<<endl;

			svms[i].load(filename.str().c_str());
		}
	}
	else if(METHOD == "ada")
	{
		adaboost = vector<CvBoost>(K);

		for(int i=0;i<K;i++)
		{
			stringstream filename;
			
			filename<< root+"models/"+"LC_HEX" + (*p_model_code) + METHOD+"_" << i <<".yml";

			cout << " try to load " << filename.str()<<endl;

			adaboost[i].load(filename.str().c_str());
		}

	}
	else if(METHOD == "rdt")
	{
		random_trees = vector<CvRTrees>(K);

		for(int i=0;i<K;i++)
		{
			stringstream filename;
			
			filename<< root+"models/"+"LC_HEX" + (*p_model_code) + METHOD+"_" << i <<".yml";

			cout << " try to load " << filename.str()<<endl;

			random_trees[i].load(filename.str().c_str());


		}
	}
	else if(METHOD =="em")
	{
		em[0] = vector<EM>(K);
		em[1] = vector<EM>(K);

		for(int i=0;i<K;i++)
		{
			FileStorage fs;
			FileNode fileNode;

			stringstream filename;
			
			
			filename<< root+"models/"+"LC_HEX" + (*p_model_code) + METHOD+"_" << i <<"_pos.yml";

			cout << "Opening trained model: " << filename.str() << endl;


		
			fs = FileStorage(filename.str().c_str(), FileStorage::READ);
			fileNode = fs["EM"];
			em[0][i].read(fileNode);
		
		
			stringstream filename_neg;
			
			filename_neg<< root+"models/"+"LC_HEX" + (*p_model_code) + METHOD+"_" << i <<"_neg.yml";
			cout << "Opening trained model: " << filename_neg.str() << endl;
			
			fs = FileStorage(filename_neg.str().c_str(), FileStorage::READ);
			fileNode = fs["EM"];
			em[1][i].read(fileNode);
		}
	}

	

}

void LC_HEX::init(string n_root, string n_method,string * n_p_model_code,int n_K)
{
	root = n_root;
	METHOD = n_method;

	K = n_K;

	p_model_code = n_p_model_code;
}

