#include "TrainMethod.h"       
#include <sys/stat.h>
BTMethod::BTMethod(/* args */)
{
}

BTMethod::~BTMethod()
{
}


/*Class LDA*/
LDA::LDA(/* args */)
{
}

LDA::~LDA()
{
}

int LDA::create_model(arma::mat data, arma::mat label){
    std::cout << "LDA model" << std::endl;

    return 0;
}
int LDA::data_predict(arma::mat data){
	std::cout << "LDA data_predict" << std::endl;
    return 0;
}


/*Class SVM*/
#define Malloc(type,n) (type *)malloc((n)*sizeof(type))
// 交叉验证输出使能
#define CV_PRINT
void print_null(const char *s) {}

SVM::SVM(std::string model_path)
		:modelPath(model_path)
{
    set_parameter();
	const char *error_msg;
    error_msg = svm_check_parameter(&prob,&param);
	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		// exit(1);
	}

	// 判断文件是否存在
	struct stat buffer;   
	if (stat(model_path.c_str(), &buffer) != 0){
		// 文件不存在，创建一个空文件
		std::ofstream file(model_path);
		// file << "This is a new file created by the program.";
		if (file.is_open()) {
			#ifdef DEBUG
			// printf("Creat file:%s\n",fileName.c_str());
			std::cout << "Creat file:\"" << fileName << "\"" << std::endl;
			#endif // DEBUG
			file.close();
		} 
		else {
			#ifdef DEBUG
			std::cerr << "Failed to create file:\"" << fileName << "\"" << std::endl;
			#endif // DEBUG
		}
	}
	std::cout << "using SVM model..." << std::endl;
}


SVM::~SVM()
{
}

int SVM::Cg_optimization_gridSearch(const svm_problem *prob, svm_parameter *param){

//  train_label:训练 集标签.要求与libsvm工具箱中要求一致.
//  train:训练集.要求与libsvm工具箱中要求一致.
//  cmin:惩罚参数c的变化范围的最小值(取以2为底的对数后),即 c_min = 2^(cmin).默认为 -5
//  cmax:惩罚参数c的变化范围的最大值(取以2为底的对数后),即 c_max = 2^(cmax).默认为 5
//  gmin:参数g的变化范围的最小值(取以2为底的对数后),即 g_min = 2^(gmin).默认为 -5
//  gmax:参数g的变化范围的最小值(取以2为底的对数后),即 g_min = 2^(gmax).默认为 5
// 
//  nr_fold:cross validation的参数,即给测试集分为几部分进行cross validation.默认为 10
//  cstep:参数c步进的大小.默认为 1
//  gstep:参数g步进的大小.默认为 1

	int nr_fold = 10;

    int gmin = -5;
    int gmax = 5;
	int gstep = 1;
    int cmin = -5;
    int cmax = 5;
	int cstep = 1;

	double maxAcc = 0;
	int best_C = param->C;
	int best_g = param->gamma;

	// std::vector<int> X(cmax-cmin+1);
	// std::iota(X.begin(), X.end(), cmin);

	// std::vector<int> Y(gmax-gmin+1);
	// std::iota(Y.begin(), X.end(), gmin);

	for (int i=gmin; i<gmax; i+=gstep){
		param->gamma = i;
		for (int j=cmin; j<cmax; j+=cstep){
			param->C = j;
			double acc = do_cross_validation(prob,param);
			
			// 取交叉验证准确率最高的c和g
			if (acc > maxAcc){
				maxAcc = acc;
				best_C = param->C;
				best_g = param->gamma;
			}
			// 如果准确率相同，选c小的一组cg
			if (acc == maxAcc && best_C > param->C){
				maxAcc = acc;
				best_C = param->C;
				best_g = param->gamma;
			}
			#ifdef CV_PRINT
			static int times = 0;
			std::cout << "==== do_cross_validation ====" << std::endl;
			std::cout << "times:" << times << std::endl;
			std::cout << "acc:" << acc << "\tC:" << j << "\tg:" << i << std::endl;
			++times;
			#endif // CV_PRINT
		}
	}

	param->C = best_C;
	param->gamma = best_g;

	std::cout << "==== result ====" << std::endl;
	std::cout << "maxAcc:" << maxAcc << "\tbest_C:" << best_C << "\tbest_g:" << best_g << std::endl;
	return 0;
}


void SVM::set_parameter()
{
	void (*print_func)(const char*) = nullptr;	// default printing to stdout

    param.svm_type = C_SVC;
	param.kernel_type = RBF;
	param.degree = 5;
	param.gamma = 3;	// 1/num_features
	// param.gamma = 10;	// 1/num_features
	param.coef0 = 0;
	param.nu = 0.5;
	param.cache_size = 100;
	// param.C = 1;
	param.C = 4;
	param.eps = 1e-3;
	param.p = 0.1;
	param.shrinking = 1;
	param.probability = 0;
	param.nr_weight = 0;
	param.weight_label = nullptr;
	param.weight = nullptr;
	cross_validation = 0;
	// nr_fold = 0;
	predict_probability = 0;

	svm_set_print_string_function(print_func);
}

double SVM::do_cross_validation(const svm_problem *prob, const svm_parameter *param, int nr_fold)
{
	int i;
	int total_correct = 0;
	double total_error = 0;
	double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
	double *target = (double *)malloc((prob->l)*sizeof(double));
	// 静默
	svm_set_print_string_function(&print_null);

	svm_cross_validation(prob,param,nr_fold,target);
	if(param->svm_type == EPSILON_SVR ||
	   param->svm_type == NU_SVR)
	{
		for(i=0;i<prob->l;i++)
		{
			double y = prob->y[i];
			double v = target[i];
			total_error += (v-y)*(v-y);
			sumv += v;
			sumy += y;
			sumvv += v*v;
			sumyy += y*y;
			sumvy += v*y;
		}
		printf("Cross Validation Mean squared error = %g\n",total_error/prob->l);
		printf("Cross Validation Squared correlation coefficient = %g\n",
			((prob->l*sumvy-sumv*sumy)*(prob->l*sumvy-sumv*sumy))/
			((prob->l*sumvv-sumv*sumv)*(prob->l*sumyy-sumy*sumy))
			);
	}
	else
	{
		for(i=0;i<prob->l;i++)
			if(target[i] == prob->y[i])
				++total_correct;
		printf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob->l);
	}

	double accuracy = (double)total_correct/prob->l;

	free(target);
	// 解除静默
	svm_set_print_string_function(nullptr);
	return accuracy;
}

int SVM::create_model(arma::mat data, arma::mat label){
	// SVM_Stru::TrainParaStru *paraPtr = reinterpret_cast< SVM_Stru::TrainParaStru *>(data);
	// paraPtr->data.print("paraPtr->data:");
	// paraPtr->label.print("paraPtr->label:");
	
	int sampleSum = label.n_elem;
	int data_rows = data.n_rows;
	int data_cols = data.n_cols;

	// 创建一个 double 数组
    // double* data_buff = new double[data_rows * data_cols];
	// 将矩阵数据复制到 double 数组中
    // // std::memcpy(data_buff, paraPtr->data.memptr(), data_rows * data_cols * sizeof(double));
	// std::vector<double> trainFeature_vector((paraPtr->data.memptr()), (paraPtr->data.memptr()) + paraPtr->data.n_elem);
	// std::vector<double> trainLabel_vector((paraPtr->label.memptr()), (paraPtr->label.memptr()) + paraPtr->label.n_elem);
	// std::vector<double> trainFeature_vector((data.memptr()), (data.memptr()) + data.n_elem);
	// std::vector<double> trainLabel_vector((label.memptr()), (label.memptr()) + label.n_elem);


	prob.l = sampleSum;
	prob.x = new svm_node*[sampleSum];   //特征矩阵
    prob.y = new double[sampleSum];     //标签矩阵
	if(prob.x == nullptr){
		std::cout << "In creat model, prob.x error!" << std::endl;
	}

	if(prob.y == nullptr){
		std::cout << "In creat model, prob.y error!" << std::endl;
	}

	// if(trainLabel_vector.size() != sampleSum){
	// 	std::cout << "sampleSum: " << sampleSum << " cols: " << data_cols << std::endl;
	// 	std::cout << "label:" << trainLabel_vector.size() << std::endl;
	// 	std::cout << "\033[31m"  << "label dimension error..." << "\033[0m" << std::endl;
	// 	return -1;
	// }

	for (int i=0; i<sampleSum; ++i) 
    {
        prob.x[i] = new svm_node[data_cols+1]; //
        for(int j=0; j<data_cols; ++j)
        {
            prob.x[i][j].index = j + 1;
            // prob.x[i][j].value = trainFeature_vector[j + data_cols*i];
            prob.x[i][j].value = data(i,j);
			// printf("prob.x[%d][%d].value=%lf  ",i,j,prob.x[i][j].value);
        }
        prob.x[i][data_cols].index = -1;
        prob.y[i] = label[i];
    }

	// 网格搜索法优化参数
	Cg_optimization_gridSearch(&prob, &param);

	model = svm_train(&prob,&param);

	if (svm_save_model(modelPath.c_str(), model))
	{
		std::cerr << "Save SVM to [" << modelPath << "] FAILED" << std::endl;
	} 
	else
	{
		std::cout << "Save SVM to [" << modelPath << "] SUCCEED." << std::endl;
	}

	svm_free_and_destroy_model(&model);
	


	svm_destroy_param(&param);
	delete[] prob.y;
	prob.y = nullptr;

	for(int i=0; i<sampleSum; ++i) 
		delete[] prob.x[i];
	delete[] prob.x;	
	prob.x = nullptr;

    return 0;
}

/**********************************
 * 函数名：data_predict
 * 功能：预测一帧数据的标签
 * 输入：数据
 * 输出：对应分类标签
 * 备注：无
**********************************/
int SVM::data_predict(arma::mat data){
	//1.get model
    svm_model *model = svm_load_model(modelPath.c_str());
	if(model == nullptr){
		fprintf(stderr,"can't open model file %s\n",modelPath.c_str());
		return -1;
	}

	int svm_type = svm_get_svm_type(model);
	int nr_class = svm_get_nr_class(model);
	double *prob_estimates = NULL;
	int j;

	double target_label, predict_label;


    //mat to svm_node
    // arma::mat sam = data.t();
    arma::mat sam = data;

	if(predict_probability)
	{
		if(svm_check_probability_model(model)==0)
		{
			fprintf(stderr,"Model does not support probabiliy estimates\n");
			predict_probability = 0;
		}
	}
	else
	{
		if(svm_check_probability_model(model)!=0)
			printf("Model supports probability estimates, but disabled in prediction.\n");
	}

	if(predict_probability)
	{
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else if(svm_type==ONE_CLASS)
		{
			// nr_class = 2 for ONE_CLASS
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			fprintf(stdout,"label normal outlier\n");
		}
		else
		{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			fprintf(stdout,"labels");
			for(j=0;j<nr_class;j++)
				fprintf(stdout," %d",labels[j]);
			fprintf(stdout,"\n");
			free(labels);
		}
	}

	// max_line_len = 1024;
	// line = (char *)malloc(max_line_len*sizeof(char));
#if 0
		struct svm_node *sample = new svm_node[data.n_cols];
		for (size_t i = 0; i < data.n_cols; i++)
		{
			sample[i].index = i+1;
			sample[i].value = sam(i);
			// std::cout << "sample["  << i << "].value=" << sample[i].value << std::endl;
		}
		sample[data.n_cols].index=-1;


		if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC || svm_type==ONE_CLASS))
		{
			predict_label = svm_predict_probability(model,sample,prob_estimates);
			fprintf(stdout,"%g",predict_label);
			for(j=0;j<nr_class;j++)
				fprintf(stdout," %g",prob_estimates[j]);
			fprintf(stdout,"\n");
		}
		else
		{
			predict_label = svm_predict(model,sample);
			fprintf(stdout,"%.17g\n",predict_label);
		}

		int correct = 0;
		int total = 0;
		double error = 0;
		double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;
		
		if(predict_label == target_label)
			++correct;
		error += (predict_label-target_label)*(predict_label-target_label);
		sump += predict_label;
		sumt += target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label*target_label;
		++total;

	if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
	{
		printf("Mean squared error = %g (regression)\n",error/total);
		printf("Squared correlation coefficient = %g (regression)\n",
			((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
			((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
			);
	}
	else
		printf("Accuracy = %g%% (%d/%d) (classification)\n",
			(double)correct/total*100,correct,total);
	if(predict_probability)
		free(prob_estimates);


	svm_free_and_destroy_model(&model);
	free(sample);
	return 0;
#else

	struct svm_node *sample = new svm_node[data.n_cols];
	for (size_t i = 0; i < data.n_cols; i++)
	{
		sample[i].index = i+1;
		sample[i].value = sam(i);
		// std::cout << "sample["  << i << "].value=" << sample[i].value << std::endl;
	}
	sample[data.n_cols].index=-1;


    double probresut[18];
    double resultLabel=svm_predict_probability(model,sample,probresut);
    //double resultLabel=svm_predict(model,sample);

    // std::cout<<"labels: "<<resultLabel<<" "<<std::endl;
    // std::cout<<"probresut: "<<probresut[0]*100 <<"% "<<std::endl;

	svm_free_and_destroy_model(&model);
	free(sample);
    return int(resultLabel);
#endif

}




/**********************************
 * 函数名：data_predict_test
 * 功能：从测试集验证模型分类准确性
 * 输入：
 * 输出：
 * 备注：无
**********************************/
int SVM::data_predict_test(const arma::mat &data, const arma::mat &label){
	// get model
    svm_model *model = svm_load_model(modelPath.c_str());
	if(model == nullptr){
		fprintf(stderr,"can't open model file %s\n",modelPath.c_str());
		return -1;
	}
	
	int data_rows = data.n_rows;
	int data_cols = data.n_cols;

	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;

	int svm_type = svm_get_svm_type(model);
	int nr_class = svm_get_nr_class(model);
	double *prob_estimates = NULL;
	int j;
	double target_label, predict_label;

    arma::mat sam = data;

	if(predict_probability)
	{
		if(svm_check_probability_model(model)==0)
		{
			fprintf(stderr,"Model does not support probabiliy estimates\n");
			// predict_probability = 0;
		}
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else if(svm_type==ONE_CLASS)
		{
			// nr_class = 2 for ONE_CLASS
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			fprintf(stdout,"label normal outlier\n");
		}
		else
		{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			fprintf(stdout,"labels");
			for(j=0;j<nr_class;j++)
				fprintf(stdout," %d",labels[j]);
			fprintf(stdout,"\n");
			free(labels);
		}
	}
	else
	{
		if(svm_check_probability_model(model)!=0)
			printf("Model supports probability estimates, but disabled in prediction.\n");
	}

	// n x 15 特征矩阵，n为数据个数
	struct svm_node *sample = new svm_node[data_cols];
	for(int dataIndex = 0; dataIndex < data_rows; ++dataIndex){
		target_label = label(dataIndex);

		for (size_t i = 0; i < data_cols; i++)
		{
			sample[i].index = i+1;
			sample[i].value = data(dataIndex,i);
			// std::cout << "sample["  << i << "].value=" << sample[i].value << std::endl;
		}
		sample[data_cols].index=-1;
		if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC || svm_type==ONE_CLASS))
		{
			predict_label = svm_predict_probability(model,sample,prob_estimates);
			fprintf(stdout,"%g",predict_label);
			for(j=0;j<nr_class;j++)
				fprintf(stdout," %g",prob_estimates[j]);
			fprintf(stdout,"\n");
		}
		else
		{
			predict_label = svm_predict(model,sample);
			fprintf(stdout,"%.17g\n",predict_label);
		}

		if(predict_label == target_label)
			++correct;
		error += (predict_label-target_label)*(predict_label-target_label);
		sump += predict_label;
		sumt += target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label*target_label;
		++total;
	}

	if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
	{
		printf("Mean squared error = %g (regression)\n",error/total);
		printf("Squared correlation coefficient = %g (regression)\n",
			((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
			((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
			);
	}
	else
	{
		printf("Accuracy = %g%% (%d/%d) (classification)\n",
			(double)correct/total*100,correct,total);
	}
		
	if(predict_probability)
		free(prob_estimates);


	svm_free_and_destroy_model(&model);
	free(sample);
	return 0;
	
}