#include <cmath>
#include <vector>
#include <iomanip>
#include <iostream>
#include <fstream>
#include <string>
#include <cstring>
#include "lbfgs.h"
#include "CRFTagger.h"
#include "feature.h"

namespace MRFPP
{

int crf_test(const char* filename, FeatureIndex& feature_index)
{
	std::ifstream in(filename);
	std::string line;
	std::string token;
	std::vector <std::string> tokenlist;
	double right = 0.0, cnt = 0.0;
	while(std::getline(in,line))
	{	
		token = "";
		tokenlist.clear();
		for(size_t i = 0; i < line.size(); i++)
		{
			if(line[i] == ' ' && token == "")
				continue;
			else if(line[i] == ' ')
			{
				tokenlist.push_back(token);
				token = "";
			}
			else
				token += line[i];
		}
		tokenlist.push_back(token);
		std::string tag;
		getline(in,line);
		std::vector <int> answer;
		for(size_t i = 0; i < line.size(); i++)
		{
			if(line[i] == '/')
			{
				answer.push_back(feature_index.TagID(tag));
				tag.clear();
			}
			else
				tag += line[i];
		}
		answer.push_back(feature_index.TagID(tag));
		CRFTagger* tagger = new CRFTagger(feature_index.get_alpha());
		feature_index.getFeatures(tagger, tokenlist, answer);
		tagger -> open(&feature_index,(int)answer.size(),false);
		right += tagger -> viterbi(true);
		cnt += tagger -> get_xsize();
		delete tagger;
	}
	std::cout << "total Precison:" << right / cnt << std::endl;
	in.close();
	return 0;
}
int crfpp_learn(const char* trainfile,const char* testfile,const char* train_type)
{
	LBFGS lbfgs;
	int maxitr = 1000;
	double oldlogSum = 0.0;
	//std::string train_type ="VP";
	//std::string train_type ="LBFGS";
	bool orthant = false;
	double C = 1.0;//0.5;
	std::vector <Tagger*> x;
	std::cout.setf(std::ios::fixed, std::ios::floatfield);
	std::cout.precision(5);
	std::vector <double> alpha;
	FeatureIndex feature_index(alpha);
	feature_index.open(trainfile,true);
	feature_index.openFeatures(true,x);
	std::cout << "hello world!" <<std::endl;
	alpha.resize(feature_index.size());
	std::fill(alpha.begin(), alpha.end(), 0.0);
	std::vector <double> sum_alpha(feature_index.size());
	std::fill(sum_alpha.begin(),sum_alpha.end(),0.0);//for voted perceptron
	std::vector <double> expected(feature_index.size());
	feature_index.set_alpha(alpha);
	std::cout << "begin to train..." << std::endl;
	if(std::strcmp(train_type ,"LBFGS") == 0)
	{
	for(size_t itr = 0; itr < maxitr; itr++)
	{
		std::fill(expected.begin(), expected.end(), 0.0);
		double logSum = 0.0;
		for(size_t i = 0; i < x.size(); i++)
		{
			x[i] -> set_alpha(alpha);
			x[i] -> open(&feature_index,i,true);
			logSum += x[i] -> gradient(&expected[0]);
		}

		if(orthant)
		{
			for(size_t k = 0; k < feature_index.size(); k++)
				logSum += std::abs(alpha[k]/C);
		}
		else
		{
			for(size_t k = 0; k < feature_index.size(); k++)
			{
				logSum += alpha[k] * alpha[k] / (2.0 * C);
				expected[k] += alpha[k] / C;
			}
		}
		std::cout << "lfbgs..." << std::endl;
		std::cout << "feature_num " << feature_index.size() << std::endl;
		std::cout << "objecive " << logSum << std::endl;
		double diff = (itr == 0 ? 1.0 : std::abs(oldlogSum - logSum) / oldlogSum);
		oldlogSum = logSum;		
		if (lbfgs.optimize(feature_index.size(),
				  		  &alpha[0],
				          logSum,
				          &expected[0],
				          orthant,C) <= 0)
		;
		if(diff < 0.0001)
			break;
	}
	}
	else if(std::strcmp(train_type,"VP") == 0)//voted perceptron
	{
		for(size_t itr = 0; itr < maxitr; itr++)
		{
			for(size_t i = 0; i < x.size(); i++)
			{
				x[i] -> set_alpha(alpha);
				x[i] -> open(&feature_index,i,true);
				x[i] -> viterbi(false);
				feature_index.update(x[i]);
				for(size_t j = 0; j < alpha.size(); j++)
				{
					sum_alpha[j] += alpha[j];
				}
			}
		}

		for(size_t i = 0; i < alpha.size(); i++)
			alpha[i] = sum_alpha[i] / (maxitr * x.size());
		feature_index.set_alpha(alpha);
	}
	else
	{
		std::cerr << "The training type should define either VP or LBFGS" << std::endl;
		return -1;
	}
	std::cout << "============================" << std::endl;
	std::cout << "training compelete!" << std::endl;
	crf_test(testfile,feature_index);
	return 0;
}

}
