/*
 * trainer.h
 *
 *  Created on: Feb 2, 2012
 *      Author: kkb110
 */

#ifndef TRAINER_H_
#define TRAINER_H_

#include "common_includes.h"

#include "maximum_likelihood_trainer.h"

#define NUM_VARIABLES 14
class Trainer{
public:
	vector<vector<Shooter>> shooters;
	vector<vector<Model<double, 2, 2>::Shot>> shots;
	double eta;
	Trainer(){
		eta = 2;
	}
	Trainer(double eta){
		this->eta = eta;
	}
	int progress(
			void *instance,
			const lbfgsfloatval_t *x,
			const lbfgsfloatval_t *g,
			const lbfgsfloatval_t fx,
			const lbfgsfloatval_t xnorm,
			const lbfgsfloatval_t gnorm,
			const lbfgsfloatval_t step,
			int n,
			int k,
			int ls){
//		if (k > 20){
//			cout << "TERMINATING !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n";
//			return -1;
//		}
//		cout << fx << endl;
//		cout << exp(x[0]) / (exp(x[0]) + exp(x[1])) << '\t'
//				<< exp(x[1]) / (exp(x[0]) + exp(x[1])) << '\t'
//				<< exp(x[2]) / (exp(x[2]) + exp(x[3])) << '\t'
//				<< exp(x[3]) / (exp(x[2]) + exp(x[3])) << endl;
//		cout << x[4] << '\t' << x[5] << '\t' << x[6] << '\t' << x[7] << endl;
		return 0;
	}

	lbfgsfloatval_t evaluate(
			void* instance,
			const lbfgsfloatval_t* x,
			lbfgsfloatval_t* g,
			const int n,
			const lbfgsfloatval_t step){
		Model<Number<double, NUM_VARIABLES> , 2, 2> current_guess;

		Number<double, NUM_VARIABLES> xx[NUM_VARIABLES];
		for (size_t i = 0; i < NUM_VARIABLES; ++i){
			xx[i] = Number<double, NUM_VARIABLES>(i, x[i]);
		}

		current_guess.transition_probabilities(0, 0) = exp(xx[0])
				/ (exp(xx[0]) + exp(xx[1]));
		current_guess.transition_probabilities(0, 1) = exp(xx[1])
				/ (exp(xx[0]) + exp(xx[1]));
		current_guess.transition_probabilities(1, 0) = exp(xx[2])
				/ (exp(xx[2]) + exp(xx[3]));
		current_guess.transition_probabilities(1, 1) = exp(xx[3])
				/ (exp(xx[2]) + exp(xx[3]));

		current_guess.shooting_distributions[0].mean << xx[4], xx[5];
		current_guess.shooting_distributions[0].covariance << xx[6], xx[7], xx[7], xx[8];

		current_guess.shooting_distributions[1].mean << xx[9], xx[10];
		current_guess.shooting_distributions[1].covariance << xx[11], xx[12], xx[12], xx[13];

		Number<double, NUM_VARIABLES> score_total = 0;
		for (size_t i = 0; i < shooters.size(); ++i){
			vector<Model<Number<double, NUM_VARIABLES> , 2, 2>::Shot> shots_d(
					shots[i].size());
			for (size_t j = 0; j < shots[i].size(); ++j){
				shots_d[j] = shots[i][j].cast<Number<double, NUM_VARIABLES>>();
			}
			auto forward_backward = current_guess.forward_backward(shots_d, eta);
			auto score = current_guess.score_framewise(forward_backward, {
					shooters[i] })[0];
			score_total += score;
		}
//		cout
//				<< current_guess.path_similarity(
//						current_guess.best_shooters(forward_backward), shooters)
//				<< "  decoded best shooters" << endl;
//
//		cout
//				<< current_guess.path_similarity(
//						current_guess.best_path(shots_d), shooters)
//				<< "  decoded best path" << endl;
//
//		cout << x[4] << '\t' << x[5] << '\t' << x[6] << '\t' << x[7] << '\t'
//				<< x[8] << endl;
//		cout << x[9] << '\t' << x[10] << '\t' << x[11] << '\t' << x[12] << '\t'
//				<< x[13] << endl;

		score_total = -score_total;
		for (size_t i = 0; i < NUM_VARIABLES; ++i){
			g[i] = score_total[i];
		}
//		current_guess.save("./plot/test2");
		return score_total.var;
	}

	Model<double, 2, 2> run(){
		int ret = -2;
		lbfgsfloatval_t fx;
		lbfgsfloatval_t *x = lbfgs_malloc(NUM_VARIABLES);
		lbfgs_parameter_t param;
		lbfgs_parameter_init(&param);
		Model<double, 2, 2> model;

		//initialization
//		x[0] = 0.7;
//		x[1] = 0.3;
//		x[2] = 0.5;
//		x[3] = 0.5;
//
//		x[4] = 0.0;
//		x[5] = 0.0;
//		x[6] = 1;
//		x[7] = 0;
//		x[8] = 1;
//
//		x[9] = 0.0;
//		x[10] = 1.0;
//		x[11] = 2;
//		x[12] = 1;
//		x[13] = 1;
		MaximumLikelihoodTrainer trainer_MLE;
		trainer_MLE.shooters = shooters;
		trainer_MLE.shots = shots;
		model = trainer_MLE.run();
		x[0] = log(model.transition_probabilities(0, 0));
		x[1] = log(model.transition_probabilities(0, 1));
		x[2] = log(model.transition_probabilities(1, 0));
		x[3] = log(model.transition_probabilities(1, 1));

		x[4] = model.shooting_distributions[0].mean(0);
		x[5] = model.shooting_distributions[0].mean(1);
		x[6] = model.shooting_distributions[0].covariance(0,0);
		x[7] = model.shooting_distributions[0].covariance(0,1);
		x[8] = model.shooting_distributions[0].covariance(1,1);

		x[9] = model.shooting_distributions[1].mean(0);
		x[10] = model.shooting_distributions[1].mean(1);
		x[11] = model.shooting_distributions[1].covariance(0,0);
		x[12] = model.shooting_distributions[1].covariance(0,1);
		x[13] = model.shooting_distributions[1].covariance(1,1);

		for (int i = 0; i < 3; ++i){
			ret = lbfgs(NUM_VARIABLES, x, &fx, evaluate_wrapper,
					progress_wrapper, this, &param);
//			cout << "ret = " << ret << endl;
		}

//		cout << LBFGSERR_ROUNDING_ERROR << endl;
		model.transition_probabilities(0, 0) = exp(x[0])
				/ (exp(x[0]) + exp(x[1]));
		model.transition_probabilities(0, 1) = exp(x[1])
				/ (exp(x[0]) + exp(x[1]));
		model.transition_probabilities(1, 0) = exp(x[2])
				/ (exp(x[2]) + exp(x[3]));
		model.transition_probabilities(1, 1) = exp(x[3])
				/ (exp(x[2]) + exp(x[3]));
//		cout << model.transition_probabilities << endl;
		model.shooting_distributions[0].mean << x[4], x[5];
		model.shooting_distributions[0].covariance << x[6], x[7], x[7], x[8];
		model.shooting_distributions[1].mean << x[9], x[10];
		model.shooting_distributions[1].covariance << x[11], x[12], x[12], x[13];

		model.save("./plot/test2");

		return model;
	}

	static int progress_wrapper(
			void *instance,
			const lbfgsfloatval_t *x,
			const lbfgsfloatval_t *g,
			const lbfgsfloatval_t fx,
			const lbfgsfloatval_t xnorm,
			const lbfgsfloatval_t gnorm,
			const lbfgsfloatval_t step,
			int n,
			int k,
			int ls){
		return static_cast<Trainer*>(instance)->progress(instance, x, g, fx,
				xnorm, gnorm, step, n, k, ls);
	}

	static lbfgsfloatval_t evaluate_wrapper(
			void* instance,
			const lbfgsfloatval_t* x,
			lbfgsfloatval_t* g,
			const int n,
			const lbfgsfloatval_t step){
		return static_cast<Trainer*>(instance)->evaluate(instance, x, g, n,
				step);
	}
};

#endif /* TRAINER_H_ */
