package com.googlecode.adaboost.trainer;

import java.io.FileNotFoundException;
import java.util.Arrays;

import com.googlecode.adaboost.AdaBoostEngine;
import com.googlecode.adaboost.classifier.BinaryWeakClassifier;
import com.googlecode.adaboost.classifier.RealWeakClassifier;
import com.googlecode.adaboost.classifier.WeakClassifier;
import com.googlecode.adaboost.classifier.util.WeakClassifierEvaluator;
import com.googlecode.adaboost.exception.AdaBoostException;
import com.googlecode.adaboost.module.ModuleGenerator;

/**
 * @version Create on 2012-11-11
 * @author Yinzi Chen
 */

public class Trainer {

	protected AdaBoostEngine adaBoost;
	protected TrainingData trainingData;
	protected AdaBoostConfiguration configuration;
	protected ModuleGenerator moduleGenerator;

	public Trainer(AdaBoostEngine adaBoost, AdaBoostConfiguration configuration) {
		this.adaBoost = adaBoost;
		this.trainingData = new TrainingData();
		this.configuration = configuration;
		this.moduleGenerator = new ModuleGenerator(configuration);
	}

	public void train(String trainingFile) throws AdaBoostException,
			FileNotFoundException {
		trainingData.load(trainingFile);
		// for (int i = 0; i < trainingData.getDataNum(); ++i) {
		// System.out.println(trainingData.getTrainingData().get(i).getInput()
		// + " " + trainingData.getTrainingData().get(i).getOutput());
		// }
		switch (configuration.getTrainingType()) {
		case BINARY_ADABOOST:
			binaryTrain();
			break;
		case REAL_ADABOOST:
			realTrain();
			break;
		default:
			throw new AdaBoostException("Unknown training type: "
					+ configuration.getTrainingType());
		}
	}

	private void binaryTrain() throws AdaBoostException {
		double[] probability = new double[trainingData.getDataNum()];
		Arrays.fill(probability, 1.0 / trainingData.getDataNum());
		double q1, q2, sum, alpha;
		double bound = 1.0, errorRate;
		for (int i = 0; i < configuration.getMaxEpochs(); ++i) {
			BinaryWeakClassifier classifier = moduleGenerator
					.generateBinaryWeakClassifier();
			classifier.initClassifier(trainingData, probability);
			errorRate = WeakClassifierEvaluator.evaluateBinaryClassifier(
					classifier, trainingData, probability);
			if (errorRate >= 0.5 || errorRate == 0) {
				throw new AdaBoostException(
						"The weak classifier is too weak or too strong with errorRate: "
								+ errorRate);
			}
			alpha = classifier.getAlpha();
			q1 = Math.pow(Math.E, -alpha);
			q2 = 1 / q1;
			sum = 0.0;
			for (int j = 0; j < trainingData.getDataNum(); ++j) {
				probability[j] *= (classifier.makeDecision(trainingData
						.getTrainingData().get(j)) == trainingData
						.getTrainingData().get(j).getOutput() ? q1 : q2);
				sum += probability[j];
			}
			for (int j = 0; j < trainingData.getDataNum(); ++j) {
				probability[j] /= sum;
			}
			bound *= sum;

			adaBoost.addWeakClassifier(classifier);

			if (isTrainingEnd(i + 1, bound, errorRate, sum, probability,
					classifier)) {
				break;
			}

		}
	}

	private void realTrain() throws AdaBoostException {
		double[] probability = new double[trainingData.getDataNum()];
		Arrays.fill(probability, 1.0 / trainingData.getDataNum());
		double q1, q2, sum;
		double bound = 1.0, errorRate;
		for (int i = 0; i < configuration.getMaxEpochs(); ++i) {
			RealWeakClassifier classifier = moduleGenerator
					.generateRealWeakClassifier();
			classifier.initClassifier(trainingData, probability);
			errorRate = WeakClassifierEvaluator.evaluateRealClassifier(
					classifier, trainingData, probability,
					configuration.getEpsilon());
			if (errorRate >= 0.5 || errorRate == 0) {
				throw new AdaBoostException(
						"The weak classifier is too weak or too strong with errorRate: "
								+ errorRate);
			}
			q1 = Math.pow(Math.E, -classifier.getCplus());
			q2 = Math.pow(Math.E, -classifier.getCminus());
			sum = 0.0;
			for (int j = 0; j < trainingData.getDataNum(); ++j) {
				if (classifier.makeDecision(trainingData.getTrainingData().get(
						j)) == 1) {
					probability[j] *= (trainingData.getTrainingData().get(j)
							.getOutput() == 1 ? q1 : 1 / q1);
				} else {
					probability[j] *= (trainingData.getTrainingData().get(j)
							.getOutput() == 1 ? q2 : 1 / q2);
				}
				sum += probability[j];
			}
			for (int j = 0; j < trainingData.getDataNum(); ++j) {
				probability[j] /= sum;
			}
			bound *= sum;

			adaBoost.addWeakClassifier(classifier);

			if (isTrainingEnd(i + 1, bound, errorRate, sum, probability,
					classifier)) {
				break;
			}

		}
	}

	private boolean isTrainingEnd(int iterationNum, double bound, double error,
			double sum, double[] probability, WeakClassifier classifier) {
		System.out.println("Iteration " + iterationNum);
		classifier.dumpClassifier();
		System.out.printf("error = %f\n", error);
		System.out.printf("Z = %f\n", sum);
		System.out.printf("pi = {");
		for (int j = 0; j < trainingData.getDataNum(); ++j) {
			System.out.printf(" %.3f%s", probability[j],
					j == trainingData.getDataNum() - 1 ? "" : ",");
		}
		System.out.println("}");

		double[] outputs = new double[trainingData.getDataNum()];
		double errorRate = adaBoost.runAll(trainingData, outputs);
		System.out.printf("Bound = %f\n\n", bound);
		if (errorRate <= configuration.getDesiredError()) {
			return true;
		}
		return false;
	}

}
