#include "libsvm_classifier.h"
#include "svm.h"

#define Malloc(type,n) (type *)malloc((n)*sizeof(type))

static void normalization(svm_node* x) {
	svm_node* itr = x;

	double sum = 0;
	while(itr->index != -1) {
		sum += itr->value;
		itr++;
	}

	if(sum > 0 && sum != 1) {
		while(x->index != -1) {
			x->value /= sum;
			x++;
		}
	}
}

static void normalization(svm_problem &prob) {
	for(int i = 0; i < prob.l; i++) {
		normalization(prob.x[i]);
	}
}

libsvm_classifier::libsvm_classifier() {
	model = NULL;
}

double libsvm_classifier::test(boosting_node * x) {
	if(svm_get_kernel_type(model) == 4) {
		normalization((svm_node*)x);
	}
	// forced to use probability estimation
	double prob_estimates[2];
	svm_predict_probability(model,(svm_node*)x,prob_estimates);

	return prob_estimates[positive_index];
}

void libsvm_classifier::load(string filename) {
	model = svm_load_model(filename.c_str());
	find_positive_index();
}

void libsvm_classifier::dump(string filename) {
	svm_save_model(filename.c_str(), model);
}

libsvm_classifier::~libsvm_classifier() {
	if(model != NULL) {
		svm_destroy_model(model);
	}
}

void print_null(const char *s) {}

void exit_with_help()
{
	printf(
	"Usage: svm-train [options] training_set_file [model_file]\n"
	"options:\n"
	"-s svm_type : set type of SVM (default 0)\n"
	"	0 -- C-SVC\n"
	"	1 -- nu-SVC\n"
	"	2 -- one-class SVM\n"
	"	3 -- epsilon-SVR\n"
	"	4 -- nu-SVR\n"
	"-t kernel_type : set type of kernel function (default 2)\n"
	"	0 -- linear: u'*v\n"
	"	1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
	"	2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
	"	3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
	"	4 -- x^2 kernel: exp(-gamma*x^2-distance)\n"
	"	5 -- precomputed kernel (kernel values in training_set_file)\n"
	"-d degree : set degree in kernel function (default 3)\n"
	"-g gamma : set gamma in kernel function (default 1/k)\n"
	"-r coef0 : set coef0 in kernel function (default 0)\n"
	"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
	"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
	"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
	"-m cachesize : set cache memory size in MB (default 100)\n"
	"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
	"-h shrinking: whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
	"-b probability_estimates: whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
	"-wi weight: set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
	"-v n: n-fold cross validation mode\n"
	"-l n: n layers cascade\n"
	);
	exit(1);
}

void parse_command_line(int argc, char **argv, char *input_file_name, char *model_file_name, svm_parameter & param)
{
	int i;

	// default values
	param.svm_type = C_SVC;
	param.kernel_type = RBF;
	param.degree = 3;
	param.gamma = 0;	// 1/k
	param.coef0 = 0;
	param.nu = 0.5;
	param.cache_size = 100;
	param.C = 1;
	param.eps = 1e-3;
	param.p = 0.1;
	param.shrinking = 1;
	param.probability = 0;
	param.nr_weight = 0;
	param.weight_label = NULL;
	param.weight = NULL;
	//cross_validation = 0;

	// parse options
	for(i=1;i<argc;i++)
	{
		if(argv[i][0] != '-') break;
		if(++i>=argc)
			exit_with_help();
		switch(argv[i-1][1])
		{
			case 's':
				param.svm_type = atoi(argv[i]);
				break;
			case 't':
				param.kernel_type = atoi(argv[i]);
				break;
			case 'd':
				param.degree = atoi(argv[i]);
				break;
			case 'g':
				param.gamma = atof(argv[i]);
				break;
			case 'r':
				param.coef0 = atof(argv[i]);
				break;
			case 'n':
				param.nu = atof(argv[i]);
				break;
			case 'm':
				param.cache_size = atof(argv[i]);
				break;
			case 'c':
				param.C = atof(argv[i]);
				break;
			case 'e':
				param.eps = atof(argv[i]);
				break;
			case 'p':
				param.p = atof(argv[i]);
				break;
			case 'h':
				param.shrinking = atoi(argv[i]);
				break;
			case 'b':
				param.probability = atoi(argv[i]);
				break;
			//case 'v':
			//	cross_validation = 1;
			//	nr_fold = atoi(argv[i]);
			//	if(nr_fold < 2)
			//	{
			//		fprintf(stderr,"n-fold cross validation: n must >= 2\n");
			//		exit_with_help();
			//	}
			//	break;
			case 'w':
				++param.nr_weight;
				param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
				param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
				param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
				param.weight[param.nr_weight-1] = atof(argv[i]);
				break;
			//case 'l':
			//	cascade_mode = 1;
			//	n_layer = atoi(argv[i]);
			//	break;
			default:
				fprintf(stderr,"unknown option\n");
				exit_with_help();
		}
	}

	// determine filenames

	if(i>=argc)
		exit_with_help();

	strcpy(input_file_name, argv[i]);

	if(i<argc-1)
		strcpy(model_file_name,argv[i+1]);
	else
	{
		char *p = strrchr(argv[i],'/');
		if(p==NULL)
			p = argv[i];
		else
			++p;
		sprintf(model_file_name,"%s.model",p);
	}
}

void libsvm_classifier::train(boosting_problem & o_prob, int argc, char ** argv) {
	svm_problem svm_prob;
	svm_prob.l = o_prob.l;

	svm_prob.y = new double[svm_prob.l];
	for(int i = 0; i < svm_prob.l; i++) {
		svm_prob.y[i] = o_prob.y[i] == 1 ? 1 : 0;
	}

	svm_prob.x = (svm_node**)o_prob.x;

	// extend arguments
	int argc_t = argc + 2;
	char ** argv_t = new char*[argc_t];
	argv_t[0] = "svm-train";
	for(int i = 0; i < argc; i++) {
		argv_t[i+1] = argv[i];
	}
	argv_t[argc_t - 1] = "training_set_file";

	svm_parameter svm_param;
	char input_file_name[1024]; // not used.
	char model_file_name[1024]; // not used.
	parse_command_line(argc_t, argv_t, input_file_name, model_file_name, svm_param);

	// set non-zero gamma
	if(svm_param.gamma == 0) {
		svm_param.gamma = 1.0/o_prob.dim;
	}

	// forced to use probability estimation
	svm_param.probability = 1;

	if(svm_param.kernel_type == 4) {
		normalization(svm_prob);
	}

	const char *error_msg = svm_check_parameter(&svm_prob, &svm_param);
	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		exit(1);
	}

	model = svm_train(&svm_prob, &svm_param);
	find_positive_index();

	svm_destroy_param(&svm_param);
	delete [] argv_t;
	delete [] svm_prob.y;
}


void libsvm_classifier::find_positive_index() {
	int labels[2];
	svm_get_labels(model,labels);
	if(labels[0] == 1) {
		positive_index = 0;
	} else {
		positive_index = 1;
	}
}