#include "boosting_manager.h"
#include "base_classifier.h"
#include "boosting_algorithm.h"
#include "factory.h"

#include <strstream>
#include <fstream>
#include <iostream>

#include "adaboost.h"

// local tool
double local_assess(double * y, double * p, double * d, int l) {
	int tp = 0, tn = 0, fp = 0, fn = 0;
	for(int i = 0; i < l; i++) {
		double h = p[i] > 0.5 ? 1 : -1;
		
		if(y[i] == 1) {
			if(h == 1) {
				tp ++;
			} else {
				fn ++;
			}
		} else {
			if(h == 1) {
				fp ++;
			} else {
				tn ++;
			}
		}
	}
	
	double accuracy = (tp + tn) / (double)l;
	double precision = (tp + fp) == 0 ? 0 : tp / (double)(tp + fp);
	double recall = (tp + fp) == 0 ? 0 : tp / (double)(tp + fn);

	cout << "accuracy=" << accuracy;
	cout << "\tprecision=" << precision;
	cout << "\trecall=" << recall;

	return 1 - accuracy;
}

// preprocessing

void change_labels(boosting_problem & prob) {
	for(int i = 0; i < prob.l; i++) {
		if(prob.y[i] != 1) {
			prob.y[i] = -1;
		}
	}
}

void randomize(boosting_problem & prob) {
	for(int i = 0; i < prob.l; i++) {
		int j = i + rand()%(prob.l-i);
		swap(prob.y[i], prob.y[j]);
		swap(prob.x[i], prob.x[j]);
	}
}

void re_organize_problem(boosting_problem & prob) {
	int a = 0;
	int b = 0;
	while(a < prob.l) {
		if(prob.y[a] == -1) {
			if(b < a) {
				b = a + 1;
			}

			while(b < prob.l) {
				if(prob.y[b] == 1) {
					swap(prob.y[a], prob.y[b]);
					swap(prob.x[a], prob.x[b]);
					break;
				} else {
					b++;
				}
			}

			if(b == prob.l) {
				break;
			}
		} else {
			a++;
		}
	}
}

void boosting_manager::train_initialize(int layer, string algorithm_type, string base_type) {
	n_layer = layer;

	alpha = new double[n_layer];
	classifiers = new base_classifier*[n_layer];

	factory fact;
	a = fact.get_algorithm(algorithm_type);
	// base classifiers
	for(int i = 0; i < n_layer; i++) {
		classifiers[i] = fact.get_classifier(base_type);
	}
}

void boosting_manager::train(boosting_problem & prob, int argc, char ** argv) {
	preprocess(prob);

	a->train_initialization(argc, argv);

	int l = prob.l;
	double * d_t = new double[l];
	double * d_t1 = new double[l];
	double * p = new double[l];

	a->initialize_distribution(d_t, prob.y, l);

	actual_layer = n_layer;
	for(int i = 0; i < n_layer; i++) {
		cout << "i=" << i << "\t";
		base_classifier * c = classifiers[i];

		if(!c->train(prob, d_t, argc, argv)) {
			actual_layer = i;
			cout << "changes to " << actual_layer << " layers." << endl;
			break;
		}

		// get prediction
		if(a->full_updating()) {
			train_prediction_full(c, p, prob);
		} else {
			train_prediction_partial(c, p, d_t, prob);
		}

		// get alpha and update distribution
		alpha[i] = a->get_alpha(prob.y, p, d_t, l);
		cout << "alpa=" << alpha[i] << "\t";
		a->update_distribution(alpha[i], prob.y, p, l, d_t, d_t1);

		// log
		local_assess(prob.y, p, d_t, l);
		cout << endl;

		// swap distribution
		swap(d_t, d_t1);
	}

	delete [] d_t;
	delete [] d_t1;
	delete [] p;
}

double boosting_manager::test(boosting_node * x) {
	double * p = new double[n_layer];
	for(int i = 0; i < n_layer; i++) {
		p[i] = classifiers[i]->test(x);
	}
	double result = a->predict(alpha, p, n_layer);
	delete [] p;

	return result;
}

void boosting_manager::batch_test(boosting_node * x, vector<double> & results) {
	results.resize(n_layer);
	double * p = new double[n_layer];
	for(int i = 0; i < n_layer; i++) {
		p[i] = classifiers[i]->test(x);
		results[i] = a->predict(alpha, p, i + 1);
	}
	delete [] p;
}

void boosting_manager::load(string filename) {
	ifstream fin(filename.c_str());
	if(fin.fail()) {
		fin.close();
		cout << "Connot open " << filename << endl;
	}

	string algorithm_type;
	fin >> n_layer >> algorithm_type;
	actual_layer = n_layer;

	alpha = new double[n_layer];
	classifiers = new base_classifier*[n_layer];

	factory fact;
	a = fact.get_algorithm(algorithm_type);

	// base classifiers
	for(int i = 0; i < n_layer; i++) {
		string base_type;
		fin >> alpha[i] >> base_type;
		classifiers[i] = fact.get_classifier(base_type);
		classifiers[i]->load(get_filename(filename, i));
	}

	fin.close();
}

void boosting_manager::dump(string filename) {
	ofstream fout(filename.c_str());
	fout << actual_layer << "\t" << a->get_type() << endl;
	for(int i = 0; i < actual_layer; i++) {
		fout << alpha[i] << "\t" << classifiers[i]->get_type() << endl;
		classifiers[i]->dump(get_filename(filename, i));
	}
	fout.close();
}

string boosting_manager::get_filename(string filename, int lyr) {
	strstream sstr;
	sstr << filename << "." << lyr << ".txt";
	string result;
	getline(sstr, result);
	return result;
}

boosting_manager::~boosting_manager() {
	delete [] alpha;
	for(int i = 0; i < n_layer; i++) {
		delete classifiers[i];
	}
	delete [] classifiers;
	delete a;
}

void boosting_manager::preprocess(boosting_problem & prob) {

	// change label 0 to -1
	change_labels(prob);

	// randomize
	randomize(prob);

	// re organize
	re_organize_problem(prob);
}

void boosting_manager::train_prediction_full(base_classifier * c, double * p, boosting_problem & prob) {
	for(int j = 0; j < prob.l; j++) {
		p[j] = c->test(prob.x[j]);
	}
}

void boosting_manager::train_prediction_partial(base_classifier * c, double * p, double * d, boosting_problem & prob) {
	int index = 0;
	while(prob.y[index] == 1 && index < prob.l) {
		p[index] = 1;
		index ++;
	}

	int pos = index;
	int count = 0;
	while(count < pos && index < prob.l) {
		if(d[index] == 0) {
			index ++;
			continue;
		}

		double r = c->test(prob.x[index]);
		p[index] = r;
		index ++;

		if(r >= 0.5) {
			count ++;
		}
	}
	
	while(index < prob.l) {
		p[index] = 1;
		index++;
	}
}
