# include "stdlib.h"
# include "stdio.h"
# include "memory.h"
# include "string.h"
# include "string"
# include "map"
# include "vector"
# include "set"
# include "data_struct.h"
# include "math.h"
# include "IO.h"

double sigma(double s)
{
	return exp(s)/(1+exp(s));
}

int main(int argn,char **argv)
{
	//parsing parameter
	char train_file[1024];
	char model_file[1024];
	int iter_number = 0;
	//usage ./train -d data -m model -n iter_number 
	if(argn != 7)
	{
		fprintf(stderr,"[ERROR] usage:./train -d data_file -m model_file -n iter_number\n");
		return -1;
	}
	for(int i = 1;i < 7 ;i += 2)
	{
		if(0  == strcmp(argv[i],"-d"))
		{
			strcpy(train_file,argv[i+1]);
		}
		else if(0 == strcmp(argv[i],"-m"))
		{
			strcpy(model_file,argv[i+1]);
		}
		else if(0 == strcmp(argv[i],"-n"))
		{
			iter_number = atoi(argv[i+1]);
		}
		else
		{
			fprintf(stderr,"[ERROR] usage:./train -d data_file -m model_file -n iter_number\n");
			return -1;
		}
	}
	
	//read data
	printf("start to  read data now\n");
	vector<Instance> * dataset = new vector<Instance>();
	map<string,int> * feature_name_id = new map<string,int>();
	if(!read_data(train_file,feature_name_id,dataset))
		return -1;
	vector<double> *weight = new vector<double>(feature_name_id->size(),0.0);
	printf("reading data finished\n");
	printf("instance number: %d\n",dataset->size());
	printf("feature  number: %d\n",feature_name_id->size());

	/*train*/
	printf("start to train\n");
	int num_positive = 0;
	for(int i = 0 ; i < dataset->size();i++)
	{
		if((*dataset)[i].label == 1)
			num_positive ++;
	}
	int num_total = dataset->size();
	double beta0 = log(1.0*num_positive/(num_total-num_positive));	

	printf("[train] start to caculate the const_var and s\n");
	//caculate the const_var and s
	double s = 0.0;
	int num_feature = feature_name_id->size();
	double * const_var = new double[num_feature];
		//the constant variable in formulate 57 in paper 
		//<<algorithms for maxinum-likelihood logistic regression>>
	double *tmp = new double[num_feature];
	memset(const_var,0,num_feature*sizeof(double));
	memset(tmp,0,num_feature*sizeof(double));
	for(int i = 0 ; i < dataset->size(); i ++)
	{
		if(s < 1.0*(*dataset)[i].feature.size())
		{
			s = 1.0 *(*dataset)[i].feature.size();
		}

		double *array = (*dataset)[i].label == 1 ? const_var: tmp;
		for(set<int>::iterator iter = (*dataset)[i].feature.begin(); iter != (*dataset)[i].feature.end(); iter ++)
		{
			array[*iter] += 1.0;
		}
	}
	for(int i = 0 ; i < num_feature; i ++)
	{
		if(tmp[i] < 0.0001 )
			tmp[i] = 0.0001;
		if(const_var[i] < 0.0001)
			const_var[i] =	0.0001;	

		const_var[i] /= tmp[i];
	}
	delete(tmp);
	printf("[train] caculating const_var and s finished\n");
	printf("[train] s:%lf\n",s);
	
	printf("[train] start to iter\n");
	//iterate
	printf("iter\tlikelihood\n");
	double * numerator = new double[num_feature];
	double * denominator = new double[num_feature];
	for(int i = 0 ; i < iter_number; i ++)
	{
		for(int j = 0 ; j < num_feature; j ++)
		{
			numerator[j] = denominator[j] = 0.0;
		}
		for(int data = 0 ; data < dataset->size(); data++)
		{
			double w0_plus_xi = 0.0;
			for(set<int>::iterator iter = (*dataset)[data].feature.begin(); iter != (*dataset)[data].feature.end(); iter++)
			{
				w0_plus_xi += (*weight)[*iter];
			}
			double sigma_value = sigma(w0_plus_xi);

			for(set<int>::iterator iter = (*dataset)[data].feature.begin(); iter != (*dataset)[data].feature.end();iter++)
			{
				numerator[*iter] += 1-sigma_value;
				denominator[*iter] += sigma_value;
			}	
				
		}
		double numerator_k = 0.0,denominator_k = 0.0;
		for(int k = 0 ; k < num_feature; k ++)
		{
			numerator_k = numerator[k];
			denominator_k = denominator[k];
			(*weight)[k] = (*weight)[k]+(1/s)*log(const_var[k]*(numerator_k)/(denominator_k));
		}

		//caculate log-likelihood
		double log_likelihood = 0.0;
		for(int data = 0 ; data < dataset->size(); data++)
		{
			double sum_w = 0.0;
			for(set<int>::iterator iter = (*dataset)[data].feature.begin();iter != (*dataset)[data].feature.end();iter ++)
			{
				sum_w += (*weight)[*iter];
			}
			if((*dataset)[data].label == 1)
				sum_w *= -1;
			log_likelihood += log(1/(1+exp(sum_w)));
		}
		printf("%d\t%lf\t",i,log_likelihood);
		printf("\n");
	}
	delete[](numerator);
	delete[](denominator);
	printf("[train] iterate finished\n");
	printf("train finished\n");
	
	printf("start to write model\n");
	/*write model*/
	if(!write_model(model_file,feature_name_id,weight,beta0))
		return -1;
	printf("writing model finished\n");	

	delete(feature_name_id);
	delete(weight);
	return 0;
	
}
