#include "Classifier.h"

//void testPredictOnFrame()
//{
//	LcVideoReadExt reader;
//
//	reader.f_start = 300;
//
//	reader.t_win = 2;
//
//	reader.f_rate = 50;
//
//	//LcRandomTrees my_classifier;
//	LcKNN my_classifier;
//
//	Mat frm_train;
//	Mat gt_train;
//	int j = reader.readNext(frm_train);
//	reader.getLabel( gt_train );
//
//	LcFeatureExtractor extractor;
//
//	extractor.set_extractor("l");
//
//	Mat feat_train;
//
//	Mat lab_train;
//
//	extractor.work(frm_train, feat_train ,gt_train , lab_train);
//
//	my_classifier.train( feat_train, lab_train);
//
//	Mat frm_test;
//
//	Mat feat_test;
//
//	reader.readNext( frm_test);
//
//	Mat gt_test, lab_test;
//
//	reader.getLabel( gt_test );
//
//	extractor.work(frm_test, feat_test, gt_test, lab_test);
//
//	Mat res_test;
//
//	LcValidator report = my_classifier.predict( feat_test,res_test, lab_test);
//	//LcValidator report = my_classifier.predict( feat_train,res_test, lab_train); // test on train
//
//	report.display();
//
//	Mat mask;
//
//	getLabelMask( res_test, mask, frm_test.size() , extractor.bound_setting);
//
//	imshow("debug mask", mask);
//
//	cv::waitKey(0);
//
//}
//
//void testClassifiers()
//{
//	LcVideoReadExt reader;
//
//	reader.t_win = 2;
//
//	vector< LcClassifier * > classifiers;
//
//	classifiers.push_back( new LcRandomTrees);
//
//	classifiers.push_back( new LcDecisionTree);
//
//	//classifiers.push_back( new LcAdaBoosting);
//
//
//		
//	Mat frm;
//	Mat gt;
//	int j = reader.readNext(frm);
//	reader.getLabel( gt );
//	if(j>=0 && gt.data)
//	{
//
//		Mat dsp; gt.copyTo(dsp);
//		dsp = dsp * 255.0;
//		dsp.convertTo( dsp, CV_8U);
//
//		cvtColor(dsp,dsp,CV_GRAY2BGR);
//
//		addWeighted(frm,0.5,dsp,0.5,0, dsp);
//
//		imshow("show",dsp);
//
//		cv::waitKey(1);
//
//		LcFeatureExtractor extractor;
//
//		Mat desc;
//
//		Mat lab;
//
//		extractor.work(frm, desc,gt , lab);
//
//		for(int i = 0;i< (int) classifiers.size();i++)
//			classifiers[i]->train( desc, lab);
//
//		Mat res;
//
//		for(int i = 0;i< (int) classifiers.size();i++)
//		{
//			LcValidator temp_validator = classifiers[i]->predict( desc, res,lab );
//		}
//
//
//	}
//
//	cv::destroyAllWindows();
//
//	system("pause");
//}

LcValidator estimateWeightedAvgPredict( vector< LcValidator > & h, vector<float> & weight )
{
	int n = (int) h.size();

	LcValidator sum_vali(0,0,0,0);
	for(int i = 0 ; i < n; i++) sum_vali = sum_vali + h[i];

	float prior_p = sum_vali.tp + sum_vali.fn;
	float prior_n = sum_vali.fp + sum_vali.tn;

	int hp_num = 1 << (n+1);

	Mat a = Mat::zeros( hp_num, 3 , 5 );

	a.at<float>( 0 , 0 ) = prior_p;
	a.at<float>( 0 , 1 ) = 0.0f;
	a.at<float>( 0 , 2 ) = prior_n;

	float sum_weight = 0;

	for(int i = 0; i < n ; i++) sum_weight += weight[i];

	for(int i = 0, j = 1; i < n ; i++, j*=2) 
	{
		float _w = weight[i];
		LcValidator & _h = h[i];

		for(int k = 0 ; k<j ; k++)
		{
			a.at<float>( k +j , 0 ) = a.at<float>( k , 0 ) 
				* (h[i].tp +1e-6f) / ( h[i].fn + h[i].tp + 2e-6f);

			a.at<float>( k +j , 1 ) = a.at<float>( k , 1 ) + _w;

			a.at<float>( k +j , 2 ) = a.at<float>( k , 2 ) 
				* (h[i].fp +1e-6f) / ( h[i].fp + h[i].tn + 2e-6f);

			a.at<float>( k , 0 ) = a.at<float>( k , 0 ) 
				* (h[i].fn +1e-6f) / ( h[i].fn + h[i].tp + 2e-6f);

			a.at<float>( k , 2 ) = a.at<float>( k , 2 ) 
				* (h[i].tn +1e-6f) / ( h[i].fp + h[i].tn + 2e-6f);
		}
	}

	sum_weight /= 2.0f;

	float tp = 0, fp = 0, tn = 0, fn =0;	

	for(int i = 0 ; i < hp_num;i++)
	{
		if( sum_weight + 1e-5f < a.at<float>( i , 1 ) )
		{	//predict as positive
			tp += a.at<float>( i , 0 );
			fp += a.at<float>( i , 2 );			
		}
		else if( sum_weight - 1e-5f > a.at<float>( i , 1 ) )
		{	//predict as nagetive
			fn += a.at<float>( i , 0 );
			tn += a.at<float>( i , 2 );			
		}
		else
		{
			tp += a.at<float>( i , 0 )/2.0f;
			fp += a.at<float>( i , 2 )/2.0f;	
			fn += a.at<float>( i , 0 )/2.0f;
			tn += a.at<float>( i , 2 )/2.0f;
		}
	}

	

	return LcValidator(tp, fp, fn , tn );

}

LcValidator estimateAvgPredict( vector< LcValidator> & h)
{
	int n = (int)h.size();

	//so the res could from 0 to n
	vector< double > cdp_p( n+1, 0); cdp_p[0] = 1.0;
	//predict a positive example into score
	vector< double > cdp_n( n+1, 0); cdp_n[0] = 1.0;
	//predict a negative example into score

	for(int i = 0 ; i < n; i ++ )
	{
		double sum_p = 0, sum_n = 0;
		for(int j = i+1 ; j >=0; j--)
		{
			cdp_p[j] = cdp_p[j] * double(h[i].fn + 1e-6) / double( h[i].fn + h[i].tp + 2e-6);
			if(j >0) cdp_p[j] += cdp_p[j-1] * double(h[i].tp +1e-6) / double( h[i].fn + h[i].tp + 2e-6);
			sum_p += cdp_p[j];

			cdp_n[j] = cdp_n[j] * double(h[i].tn + 1e-6) / double( h[i].tn + h[i].fp + 2e-6);
			if(j >0) cdp_n[j] += cdp_n[j-1] * double(h[i].fp +1e-6) / double( h[i].tn + h[i].fp + 2e-6);
			sum_n += cdp_n[j];
		}

		sum_p = 10.0/ sum_p; sum_n = 10.0/ sum_n;
		for(int j = 0; j<=i+1;j++)
		{
			cdp_p[j] *= sum_p; cdp_n[j] *= sum_n;
		}

	}	


	LcValidator sum_vali(0,0,0,0);
	for(int i = 0 ; i < n; i++) sum_vali = sum_vali + h[i];

	double prior_p = sum_vali.tp + sum_vali.fn;
	double prior_n = sum_vali.fp + sum_vali.tn;

	for(int j = 0; j<n+1;j++)
	{
		cdp_p[j] *= prior_p; cdp_n[j] *= prior_n;
	}

	//for(int i = 0 ; i < n+1; i++) cout << cdp_p[i] << " "; cout << endl;
	//for(int i = 0 ; i < n+1; i++) cout << cdp_n[i] << " "; cout << endl;

	LcValidator ans_vali(0,0,0,0);	

	for(int i = 0 ; i < n+1;i++)
	{
		if(i +i < n)
		{
			//predict as false
			ans_vali = ans_vali + LcValidator(0, 0, (float) cdp_p[i],(float)  cdp_n[i]);
		}
		else if( i+i == n)
		{
			ans_vali = ans_vali + LcValidator((float)cdp_p[i]/2.0f, (float )cdp_n[i]/2.0f,
				(float)cdp_p[i]/2.0f, (float)cdp_n[i]/2.0f);
		}
		else
		{
			ans_vali = ans_vali + LcValidator((float)cdp_p[i], (float)cdp_n[i],0,0);
		}
	}

	//cout << ans_vali.tp << " " << ans_vali.fn << " " << ans_vali.fp << " " << ans_vali.tn << endl;

	return ans_vali;


}
//==============================

LcValidator::LcValidator( float _tp, float _fp, float _fn , float _tn)
{
	tp = _tp; fp = _fp; fn = _fn; tn = _tn;
}

LcValidator LcValidator::operator +(const LcValidator & a)
{
	return LcValidator( a.tp + tp , a.fp + fp , a.fn + fn , a.tn + tn);
}
void LcValidator::display()
{
	//cout << "tp:" << tp << " fp:" << fp << " tn:"<< tn << " fn:" << fn << endl;

	cout << "Precision:" << getPrecision(1) << " " << getPrecision(0) << "(back) " << endl;
	cout << "  Recall :" << getRecall(1) << " " << getRecall(0) << "(back) " << endl;

	cout << "F:" << getF1() << " 0-1:" << getZeroOne() << endl;
}

float LcValidator::getZeroOne()
{
	return (tp+tn)/(tp+tn+fp+fn);
}

float LcValidator::getPrecision(int i)
{
	if(i){return tp/(1e-5f+tp+fp);}
	else {return tn/(1e-5f+tn+fn);}
}


float LcValidator::getRecall(int i)
{
	if(i){return tp/(1e-5f+tp+fn);}
	else {return tn/(1e-5f+fp+tn);}
}

float LcValidator::getF1(int i)
{
	float p = getPrecision(i);
	float r = getRecall(i);
	return 2*p*r/(1e-5f+p+r);
}

LcValidator::LcValidator( Mat & res, Mat & lab)
{
	count( res, lab, 0.5, tp, fp, tn, fn);
}

void LcValidator::count( Mat & res, Mat & lab, float th, float & tp, float & fp, float & tn, float & fn)
{
	if( res.rows != lab.rows){ cout << " size unmatch while predicting " << endl; return;}

	tp = fp = tn = fn = 0.0f;
	for(int i = 0; i <  (int) res.rows ; i++ )
	{
		if(res.at<float>(i, 0) > th)
		{
			if(lab.at<float>(i,0)>th) tp += 1.0f;
			else fp += 1.0f;
		}
		else
		{
			if(lab.at<float>(i,0)>th) fn += 1.0f;
			else tn += 1.0f;
		}
	}

	{
		float n = float( res.rows);
		fp/=n; tp/=n; tn/=n; fn/=n;
	}
}

//==============================

void LcRandomTrees::train(Mat & feature, Mat & label)
{
	_params.max_depth = 10;
	_params.regression_accuracy = 0.1f;
	_params.use_1se_rule = true;
	_params.use_surrogates = true;
	_params.truncate_pruned_tree = false;
	_params.min_sample_count = 10;

	double t = double(getTickCount());

	if( veb ) cout << "Train Random Trees model ...";

	Mat varType = Mat::ones(feature.cols+1,1,CV_8UC1) * CV_VAR_NUMERICAL; // all floats
	varType.at<uchar>(feature.cols,0) = CV_VAR_CATEGORICAL;

	_random_tree.train(feature , CV_ROW_SAMPLE , label,Mat(),Mat(),varType,Mat(), _params);

	t = (getTickCount()-t)/getTickFrequency();
	if( veb ) cout << " time:" << t << " secs." << endl;
	
}

LcValidator LcRandomTrees::predict( Mat & feature, Mat & res, Mat & label)
{
	int n = feature.rows;
	res = Mat::zeros( n, 1, 5);
	for(int i = 0; i< n ; i++)
	{
		res.at<float>(i,0) =  _random_tree.predict( feature.row(i) );
		//res.at<float>(i,0) =  _random_tree.predict_prob( feature.row(i) );
	}

	if( label.rows == feature.rows ) return LcValidator( res, label);
	else return LcValidator();
}

void LcRandomTrees::save( string filename_prefix ){
	string filename = filename_prefix + "_rdt.yml";
	_random_tree.save( filename.c_str());
}

void LcRandomTrees::load( string filename_prefix ){
	string filename = filename_prefix + "_rdt.yml";
	_random_tree.load( filename.c_str());
}

//==============================

void LcDecisionTree::train(Mat & feature, Mat & label)
{
	int TREE_DEPTH = 10;
			
	_params = CvDTreeParams(TREE_DEPTH,10,0.0,true,TREE_DEPTH,4,true,true,0);

	double t = double(getTickCount());

	if( veb ) cout << "Train decision tree model ...";

	Mat varType = Mat::ones(feature.cols+1,1,CV_8UC1) * CV_VAR_NUMERICAL; // all floats
	varType.at<uchar>(feature.cols,0) = CV_VAR_CATEGORICAL;

	_tree.train(feature,CV_ROW_SAMPLE,label,Mat(),Mat(),varType,Mat(),_params);

	t = (getTickCount()-t)/getTickFrequency();
	if( veb ) cout << " time:" << t << " secs." << endl;
	
}

LcValidator LcDecisionTree::predict( Mat & feature, Mat & res, Mat & label)
{

	int n = feature.rows;
	res = Mat::zeros( n, 1, 5);
	for(int i = 0; i< n ; i++)
	{
		CvDTreeNode *node;
		node = _tree.predict( feature.row(i) ,Mat(),false);
		res.at<float>(i,0) =  float(node->value);
	}

	if( label.rows == feature.rows ) return LcValidator( res, label);
	else return LcValidator();
}

void LcDecisionTree::save( string filename_prefix ){
	string filename = filename_prefix + "_dt.yml";
	_tree.save( filename.c_str() );
}

void LcDecisionTree::load( string filename_prefix ){
	string filename = filename_prefix + "_dt.yml";
	_tree.load( filename.c_str() );
}

//==============================

void LcAdaBoosting::train(Mat & feature, Mat & label)
{

	int boost_type = CvBoost::GENTLE; //CvBoost::REAL; //CvBoost::GENTLE;
	int weak_count = 100;
	double weight_trim_rate = 0.95;
	int max_depth = 1;
	bool use_surrogates = false;
	const float* priors = NULL;
	_params = CvBoostParams(boost_type, weak_count,weight_trim_rate,max_depth,use_surrogates,priors);
	
	Mat varType = Mat::ones(feature.cols+1,1,CV_8UC1) * CV_VAR_NUMERICAL; // all floats
	varType.at<uchar>(feature.cols,0) = CV_VAR_CATEGORICAL;
	
	//lab = lab*2-1;
	//cout << lab << endl;
	//lab.convertTo(lab,CV_8UC1);
	
	double t = (double)getTickCount();
	if(veb) cout << "Train (Gentle) AdaBoost model ...";
	_boost.train(feature,CV_ROW_SAMPLE,label,Mat(),Mat(),varType,Mat(),_params,false);
	t = (getTickCount()-t)/getTickFrequency();
	if(veb) cout << " time:" << t << " secs." << endl;
}

LcValidator LcAdaBoosting::predict( Mat & feature, Mat & res, Mat & label)
{
	int n = feature.rows;
	res = Mat::zeros( n, 1, 5);

	for(int i = 0; i< n ; i++)
	{
		res.at<float>(i,0) =  _boost.predict( feature.row(i) );
	}

	if( label.rows == feature.rows ) return LcValidator( res, label);
	else return LcValidator();
}

void LcAdaBoosting::save( string filename_prefix ){
	string filename = filename_prefix + "_ada.yml";
	_boost.save( filename.c_str() );
}

void LcAdaBoosting::load( string filename_prefix ){
	string filename = filename_prefix + "_ada.yml";
	_boost.load( filename.c_str() );
}

//==============================

LcKNN::LcKNN()
{
	rotation_kernel = Mat();
}

LcValidator LcKNN::predict(Mat & feature, Mat & res, Mat & label)
{

	cv::flann::Index _flann(_feat, cv::flann::KDTreeIndexParams(4));

	int n = feature.rows;
	res = Mat::zeros( n, 1, 5);
	
	Mat inds; Mat dists;

	_flann.knnSearch(feature, inds, dists,knn,cv::flann::SearchParams(64));

	for(int i = 0; i< n ; i++)
	{	
		float sum_weight = 0.0f;

		float sum_ans = 0.0f;

		for(int k = 0;k< knn ;k++)
		{
			float m_weight = 1;//exp(- dists[k]/scale);
			int & id = inds.at<int>(i,k);
			sum_weight += m_weight;
			sum_ans += m_weight * _lab.at<float>(id,0);
		}

		res.at<float>( i,0) = float( sum_ans/sum_weight);
	}

	if( label.rows == feature.rows ) return LcValidator( res, label);
	else return LcValidator();
}

void LcKNN::train(Mat & feature, Mat & label)
{

	knn = 5;

	feature.copyTo(_feat);
	label.copyTo(_lab);

}

void LcKNN::save( string filename_prefix ){
	string feature_name = filename_prefix + "_knn_feat.bin";
	lc::LcMat2Bin( feature_name.c_str(), _feat);
	string label_name = filename_prefix + "_knn_lab.bin";
	lc::LcMat2Bin( label_name.c_str(), _lab);
}

void LcKNN::load( string filename_prefix ){
	string feature_name = filename_prefix + "_knn_feat.bin";
	lc::LcBin2Mat( feature_name.c_str(), _feat);
	string label_name = filename_prefix + "_knn_lab.bin";
	lc::LcBin2Mat( label_name.c_str(), _lab);
}