package org.estela.classifiers;

import java.util.Arrays;

import org.estela.evaluation.ConfusionMatrix;
import org.estela.evaluation.CostMatrix;
import org.estela.ml.dataset.Attribute;
import org.estela.ml.dataset.DataSet;
import org.estela.ml.dataset.DiscreteAttribute;
import org.estela.ml.dataset.Instance;
import org.estela.stats.KernelEstimator;
import org.estela.stats.ProbabilityEstimator;
import org.estela.util.Math2;
import org.estela.util.MinMax;


/**
 * 
 * @version $Revision$
 * @date Apr 26, 2006
 * @author <a href="mailto:akantek@nyse.com">Antonio Kantek</a>
 */
public class ClassifierPerformance {
	/** The attribute to predict */
	protected Attribute _clazz;

	/** The prior probabilities of the categories (DiscreteAttribute clazz only) */
	protected double[] _clazzPriors;

	/** The sum of counts for priors */
	protected double _clazzPriorsSum;

	/** Number of non-missing class training instances seen */
	protected int _numTrainClazzValues;

	/** Array containing all numeric training class values seen */
	protected double[] _trainClazzValues;

	/** Array containing all numeric training class weights */
	protected double[] _trainClazzWeights;

	/** The Confusion Matrix */
	private ConfusionMatrix _confusionMatrix;

	/** The weight of all instances that had a class assigned to them. */
	private double _withClass;

	/** The weight of all unclassified instances. */
	private double _unclassified;

	/** Sum of class values. */
	private double _sumClass;

	/** Sum of squared class values. */
	private double _sumSqrClass;

	/** Sum of predicted * class values. */
	private double _sumClassPredicted;

	/** * Sum of predicted values. */
	private double _sumPredicted;

	/** Sum of squared predicted values. */
	private double _sumSqrPredicted;

	/** Numeric class error estimator for scheme */
	private ProbabilityEstimator _errorEstimator;

	/** Numeric class error estimator for prior */	
	private ProbabilityEstimator _priorErrorEstimator;

	/** Total entropy of scheme predictions */	
	private double _sumSchemeEntropy;

	/** Total entropy of prior predictions */
	private double _sumPriorEntropy;

	/** The number of classes. */	
	private int _numClasses;

	/** Sum of errors. */	
	private double _sumErr;

	/** Sum of absolute errors. */	
	private double _sumAbsErr;

	/** Sum of squared errors. */	
	private double _sumSqrErr;

	/** Sum of absolute errors of the prior */	
	private double _sumPriorAbsErr;

	/** Sum of absolute errors of the prior */	
	private double _sumPriorSqrErr;

	/** The weight of all instances that had no class assigned to them. */	
	private double _missingClass;

	/** Resolution of the margin histogram */	
	private int _marginResolution = 500;

	/** Cumulative margin distribution */
	private double[] _marginCounts;

	private CostMatrix _costMatrix;

	/** The total cost of predictions (includes instance weights) */	
	private double _totalCost;

	/** Total Kononenko & Bratko Information */	
	private double _sumKBInfo;
	
	/** The weight of all incorrectly classified instances. */
	private double _incorrect;

	/** The weight of all correctly classified instances. */	
	private double _correct;
	

	
	
	/**
	 * The minimum probablility accepted from an estimator to avoid taking
	 * log(0) in Sf calculations.
	 */
	protected static final double MIN_SF_PROB = Double.MIN_VALUE;	

	/**
	 * 
	 * @param clazz
	 *            The attribute to predict.
	 */

	public ClassifierPerformance(Attribute clazz) {
		_clazz = clazz;
		if (_clazz.isDiscrete()) {
			DiscreteAttribute discreteAttr = (DiscreteAttribute) _clazz;
			String[] categories = discreteAttr.getCategories();
			_confusionMatrix = new ConfusionMatrix(categories.length);
			_clazzPriors = new double[categories.length];
			_numClasses = categories.length;
		} else {
			_clazzPriors = new double[1];
			_numClasses = 1;
		}
		_clazzPriors = new double[_numClasses];
		_marginCounts = new double[_marginResolution + 1];		
	}

	/**
	 * 
	 * @param priors
	 */
	public void setPriors(DataSet priors) {
		this.checkDataSet(priors);

		int priorsSize = priors.size();
		int clazzIdx = _clazz.getIndex();

		if (_clazz.isDiscrete()) {
			String[] categories = ((DiscreteAttribute) _clazz).getCategories();
			for (int i = 0; i < categories.length; i++) {
				_clazzPriors[i] = 1d;
			}
			_clazzPriorsSum = categories.length;

			Instance instance;
			double[] instanceValues;
			for (int i = 0; i < priorsSize; i++) {
				instance = priors.getInstance(i);
				instanceValues = instance.getValues();
				if (instanceValues[clazzIdx] != Double.NaN) {
					_clazzPriors[(int) instanceValues[clazzIdx]] += instance
							.getWeight();
					_clazzPriorsSum += instance.getWeight();
				}
			}

		} else {
			_numTrainClazzValues = 0;
			Instance instance;
			double[] instanceValues;
			for (int i = 0; i < priorsSize; i++) {
				instance = priors.getInstance(i);
				instanceValues = instance.getValues();
				if (instanceValues[clazzIdx] != Double.NaN) {
					if (_trainClazzValues == null) {
						_trainClazzValues = new double[10];
						_trainClazzWeights = new double[10];
					}

					if (_numTrainClazzValues == _trainClazzValues.length) {
						double[] temp = new double[_trainClazzValues.length + 10];
						System.arraycopy(_trainClazzValues, 0, temp, 0,
								_trainClazzValues.length);
						_trainClazzValues = temp;

						temp = new double[_trainClazzWeights.length + 10];
						System.arraycopy(_trainClazzWeights, 0, temp, 0,
								_trainClazzWeights.length);
						_trainClazzWeights = temp;
					}

					_trainClazzValues[_numTrainClazzValues] = instanceValues[clazzIdx];
					_trainClazzWeights[_numTrainClazzValues] = instance
							.getWeight();
					_numTrainClazzValues++;
				}
			}
		}
	}

	/**
	 * Sets the class prior probabilities.
	 * 
	 * @param priors
	 */
	private void checkDataSet(DataSet dataSet) {
		Attribute[] attributes = dataSet.getAttributes();
		// int idx = Arrays.binarySearch(attributes, _clazz);

		// if (idx < 0) {
		// throw new IllegalArgumentException("Evaluator not compatible with
		// dataSet: " + dataSet);
		// }
	}

	/**
	 * 
	 * @param classifier
	 * @param testSet
	 * @return
	 */
	public void evaluate(Classifier classifier, DataSet testSet) {
		int dataSetSize = testSet.size();
		double predictions[] = new double[dataSetSize];
		try {
			for (int i = 0; i < dataSetSize; i++) {
				predictions[i] = this.getPrediction(classifier, testSet
						.getInstance(i));
			}
		} catch (Exception ex) {
			ex.printStackTrace();
		}
	}

	private double getPrediction(Classifier classifier, Instance instance)
			throws Exception {
		Instance classMissing = instance.clone();
		classMissing.setMissing(_clazz.getIndex());
		double prediction = 0;
		if (_clazz.isDiscrete()) {
			double[] dist = classifier.distribution(classMissing);
			prediction = MinMax.maxIndex(dist);
			if (dist[(int) prediction] <= 0) {
				prediction = Double.NaN;
			}
			updateStatsForClassifier(dist, instance);
		} else {
			prediction = classifier.classify(instance);
			updateStatsForPredictor(prediction, instance);
		}
		return prediction;
	}

	protected void updateStatsForPredictor(double predictedValue,
			Instance instance) throws Exception {

		double[] values = instance.getValues();
		double weight = instance.getWeight();
		int clazzIndex = _clazz.getIndex();
		if (!Double.isNaN(values[clazzIndex])) {

			// Update stats
			_withClass += weight;
			if (Double.isNaN(predictedValue)) {
				_unclassified += weight;
				return;
			}
			_sumClass += weight * values[clazzIndex];
			_sumSqrClass += weight * values[clazzIndex] * values[clazzIndex];
			_sumClassPredicted += weight * values[clazzIndex] * predictedValue;
			_sumPredicted += weight * predictedValue;
			_sumSqrPredicted += weight * predictedValue * predictedValue;

			if (_errorEstimator == null) {
				this.setNumericPriorsFromBuffer();
			}
		}

		double predictedProb = Math.max(_errorEstimator
				.getProbability(predictedValue - values[clazzIndex]),
				MIN_SF_PROB);
		
		double priorProb = Math.max(_priorErrorEstimator
				.getProbability(values[clazzIndex]), MIN_SF_PROB);
		
		_sumSchemeEntropy -= Math2.log_2(predictedProb) * weight;
		_sumPriorEntropy -= Math2.log_2(priorProb) * weight;
		_errorEstimator.addValue(predictedValue - values[clazzIndex],
				weight);
		
		updateNumericScores(makeDistribution(predictedValue),
				makeDistribution(values[clazzIndex]), weight);		
		
	}

	private void updateNumericScores(double[] predicted, double[] actual, double weight) {
		double diff;
		double sumErr = 0, sumAbsErr = 0, sumSqrErr = 0;
		double sumPriorAbsErr = 0, sumPriorSqrErr = 0;
		for (int i = 0; i < _numClasses; i++) {
			diff = predicted[i] - actual[i];
			sumErr += diff;
			sumAbsErr += Math.abs(diff);
			sumSqrErr += diff * diff;
			diff = (_clazzPriors[i] / _clazzPriorsSum) - actual[i];
			sumPriorAbsErr += Math.abs(diff);
			sumPriorSqrErr += diff * diff;
		}
		_sumErr += weight * sumErr / _numClasses;
		_sumAbsErr += weight * sumAbsErr / _numClasses;
		_sumSqrErr += weight * sumSqrErr / _numClasses;
		_sumPriorAbsErr += weight * sumPriorAbsErr / _numClasses;
		_sumPriorSqrErr += weight * sumPriorSqrErr / _numClasses;
		
	}

	private double[] makeDistribution(double predictedClass) {
		double[] result = new double[_numClasses];
		if (Double.isNaN(predictedClass)) {
			return result;
		}
		if (_clazz.isDiscrete()) {
			result[(int) predictedClass] = 1.0;
		} else {
			result[0] = predictedClass;
		}
		return result;
	}

	public static/* @pure@ */int[] sort(/* @non_null@ */double[] array) {

		int[] index = new int[array.length];
		array = (double[]) array.clone();
		for (int i = 0; i < index.length; i++) {
			index[i] = i;
			if (Double.isNaN(array[i])) {
				array[i] = Double.MAX_VALUE;
			}
		}
		quickSort(array, index, 0, array.length - 1);
		return index;
	}

	private static void quickSort(/* @non_null@ */double[] array, /* @non_null@ */
			int[] index, int left, int right) {

		if (left < right) {
			int middle = partition(array, index, left, right);
			quickSort(array, index, left, middle);
			quickSort(array, index, middle + 1, right);
		}
	}

	private static int partition(double[] array, int[] index, int l, int r) {

		double pivot = array[index[(l + r) / 2]];
		int help;

		while (l < r) {
			while ((array[index[l]] < pivot) && (l < r)) {
				l++;
			}
			while ((array[index[r]] > pivot) && (l < r)) {
				r--;
			}
			if (l < r) {
				help = index[l];
				index[l] = index[r];
				index[r] = help;
				l++;
				r--;
			}
		}
		if ((l == r) && (array[index[r]] > pivot)) {
			r--;
		}

		return r;
	}

	protected void setNumericPriorsFromBuffer() {
		double numPrecision = 0.01; // Default value
		if (_numTrainClazzValues > 1) {
			double[] temp = new double[_numTrainClazzValues];
			System.arraycopy(_trainClazzValues, 0, temp, 0,
					_numTrainClazzValues);
			int[] index = sort(temp);
			double lastVal = temp[index[0]];
			double deltaSum = 0;
			int distinct = 0;
			for (int i = 1; i < temp.length; i++) {
				double current = temp[index[i]];
				if (current != lastVal) {
					deltaSum += current - lastVal;
					lastVal = current;
					distinct++;
				}
			}
			if (distinct > 0) {
				numPrecision = deltaSum / distinct;
			}
		}
		_priorErrorEstimator = new KernelEstimator(numPrecision);
		_errorEstimator = new KernelEstimator(numPrecision);
		_clazzPriors[0] = _clazzPriorsSum = 0;
		for (int i = 0; i < _numTrainClazzValues; i++) {
			_clazzPriors[0] += _trainClazzValues[i] * _trainClazzWeights[i];
			_clazzPriorsSum += _trainClazzWeights[i];
			_priorErrorEstimator.addValue(_trainClazzValues[i],
					_trainClazzWeights[i]);
		}

	}

	protected void updateStatsForClassifier(double[] predictedDistribution,
			Instance instance) throws Exception {

		double[] values = instance.getValues();
		int clazzIndex = _clazz.getIndex();
		double weight = instance.getWeight();
		if (Double.isNaN(values[clazzIndex])) {
			_missingClass += weight;
			return;
		}
		int actualClass = (int) values[clazzIndex];
		this.updateMargins(predictedDistribution, actualClass, weight );
		int predictedClass = -1;
		double bestProb = 0.0;
		for (int i = 0; i < _numClasses; i++) {
			if (predictedDistribution[i] > bestProb) {
				predictedClass = i;
				bestProb = predictedDistribution[i];
			}
		}

		_withClass += weight;
		if (_costMatrix != null) {
			if (predictedClass < 0) {
				// For missing predictions, we assume the worst possible
				// cost.
				// This is pretty harsh.
				// Perhaps we could take the negative of the cost of a
				// correct
				// prediction
				// (-m_CostMatrix.getElement(actualClass,actualClass)),
				// although often this will be zero
				_totalCost += weight
						* _costMatrix.getMaxCost(actualClass);
			} else {
				_totalCost += weight
						*_costMatrix.getElement(actualClass,
								predictedClass);
			}
		}
		
		// Update counts when no class was predicted
		if (predictedClass < 0) {
			_unclassified += weight;
			return;
		}
		
		double predictedProb = Math.max(MIN_SF_PROB,
				predictedDistribution[actualClass]);
		System.out.println("actualClass"+ actualClass);
		System.out.println("predictedDistribution:" + Arrays.toString(predictedDistribution));
		System.out.println("predictedDistribution[actualClass]" + predictedDistribution[actualClass]);
		
		
		double priorProb = Math.max(MIN_SF_PROB, _clazzPriors[actualClass]
				/ _clazzPriorsSum);
		
		if (predictedProb >= priorProb) {
			_sumKBInfo += (Math2.log_2(predictedProb) - Math2.log_2(priorProb))
					* weight;
		} else {
			_sumKBInfo -= (Math2.log_2(1.0 - predictedProb) - Math2.log_2(1.0 - priorProb))
					* weight;
		}

		System.out.println(">>>_sumKBInfo" + _sumKBInfo);
		System.out.println("predictedProb " + predictedProb);
		System.out.println("priorProb " + priorProb);
		
		_sumSchemeEntropy -= Math2.log_2(predictedProb) * weight;
		_sumPriorEntropy -= Math2.log_2(priorProb) * weight;
		
		updateNumericScores(predictedDistribution,
				makeDistribution(values[clazzIndex]), weight);
		
		_confusionMatrix.addValue(actualClass, predictedClass, weight);
		if (predictedClass != actualClass) {
			_incorrect += weight;
		} else {
			_correct += weight;
		}
	}
	
	private void updateMargins(double[] predictedDistribution, int actualClass, double weight) {
		double probActual = predictedDistribution[actualClass];
		double probNext = 0;
		
		for (int i = 0; i < _numClasses; i++)
			if ((i != actualClass) && (predictedDistribution[i] > probNext))
				probNext = predictedDistribution[i];

		double margin = probActual - probNext;
		int bin = (int) ((margin + 1.0) / 2.0 * _marginResolution);
		_marginCounts[bin] += weight;
		
	}

	public String toSummaryString(String title,
			boolean printComplexityStatistics) {
		
		StringBuilder buff = new StringBuilder();
		if (_clazz.isDiscrete()) {
			buff.append("\nCorrectly Classified Instances\tcorrect:");
			buff.append(_correct).append("\t").append(this.pctCorrect());
			buff.append("%");
			
			buff.append("\nIncorrectly Classified Instances   ");
			buff.append(_incorrect).append("\t").append(this.pctIncorrect());
			
			buff.append("\nKappa statistic\t").append(kappa());
			
		} else {
			buff.append("\nCorrelation coefficient\n");
			try {
				buff.append(correlationCoefficient());
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		buff.append("\nMean absolute error\t");
		buff.append(meanAbsoluteError());
		buff.append("\nRoot mean squared error\t");
		buff.append(rootMeanSquaredError());
		buff.append("\nRelative absolute error\t");
		buff.append(relativeAbsoluteError());
		buff.append("\nRoot relative squared error\t");
		buff.append(rootRelativeSquaredError());
		
		return buff.toString();
	}

	private double rootRelativeSquaredError() {
		return 100.0 * rootMeanSquaredError() / rootMeanPriorSquaredError();
	}

	private double rootMeanPriorSquaredError() {
		return Math.sqrt(_sumPriorSqrErr / _withClass);
	}

	private double relativeAbsoluteError() {
		return 100 * meanAbsoluteError() / meanPriorAbsoluteError();
	}

	private double meanPriorAbsoluteError() {
		return _sumPriorAbsErr / _withClass;
	}

	private double rootMeanSquaredError() {
		// TODO Auto-generated method stub
		return  Math.sqrt(_sumSqrErr / _withClass);
	}

	private double meanAbsoluteError() {
		return _sumAbsErr / _withClass;
	}

	/**
	 * Returns the correlation coefficient if the class is numeric.
	 * 
	 * @return the correlation coefficient
	 * @exception Exception
	 *                if class is not numeric
	 */
	public final double correlationCoefficient() throws Exception {


		double correlation = 0;
		double varActual = _sumSqrClass - _sumClass * _sumClass
				/ _withClass;
		double varPredicted = _sumSqrPredicted - _sumPredicted
				* _sumPredicted / _withClass;
		double varProd = _sumClassPredicted - _sumClass * _sumPredicted
				/ _withClass;

		if (this.smOrEq(varActual * varPredicted, 0.0)) {
			correlation = 0.0;
		} else {
			correlation = varProd / Math.sqrt(varActual * varPredicted);
		}

		return correlation;
	}

	private boolean smOrEq(double d, double e) {
		return (d-e < 1e-6);
	}

	/**
	 * Returns value of kappa statistic if class is nominal.
	 * 
	 * @return the value of the kappa statistic
	 */
	public final double kappa() {

		double[] sumRows = new double[_confusionMatrix.size()];
		double[] sumColumns = new double[_confusionMatrix.size()];
		double sumOfWeights = 0;
		for (int i = 0; i < _confusionMatrix.size(); i++) {
			for (int j = 0; j < _confusionMatrix.size(); j++) {
				sumRows[i] += _confusionMatrix.get(i,j);
				sumColumns[j] += _confusionMatrix.get(i,j);
				sumOfWeights += _confusionMatrix.get(i,j);
			}
		}
		double correct = 0, chanceAgreement = 0;
		for (int i = 0; i < _confusionMatrix.size(); i++) {
			chanceAgreement += (sumRows[i] * sumColumns[i]);
			correct += _confusionMatrix.get(i,i);
		}
		chanceAgreement /= (sumOfWeights * sumOfWeights);
		correct /= sumOfWeights;

		if (chanceAgreement < 1) {
			return (correct - chanceAgreement) / (1 - chanceAgreement);
		} else {
			return 1;
		}
	}

	private double pctIncorrect() {
		return 100 * _incorrect / _withClass;
	}

	private double pctCorrect() {
		// TODO Auto-generated method stub
		return 100 * _correct / _withClass;
	}

	@Override
	public String toString() {
		StringBuilder buff = new StringBuilder();
		buff.append("Class: ").append(_clazz);
		buff.append("\nClass priors sum: ").append(_clazzPriorsSum);
		buff.append("\nClass priors: ").append(Arrays.toString(_clazzPriors));
		buff.append("\nConfusion matrix: ").append(_confusionMatrix);
		buff.append("\nNumber of non-missing train class values: ").append(
				_numTrainClazzValues);
		buff.append("\nTraining class values: ").append(
				Arrays.toString(_trainClazzValues));
		buff.append("\nTraining class weights: ").append(
				Arrays.toString(_trainClazzWeights));
		buff.append("\nWith class: ").append(_withClass);
		buff.append("\nUnclassified: ").append(_unclassified);
		buff.append("\nSum class: ").append(_sumClass);
		buff.append("\nSum sqr class: ").append(_sumSqrClass);
		buff.append("\nSum class predicted: ").append(_sumClassPredicted);
		buff.append("\nSum predicted: ").append(_sumPredicted);
		buff.append("\nSum sqr predicted: ").append(_sumSqrPredicted);
		buff.append("\nError estimator: ").append(_errorEstimator);
		buff.append("\nPrior error estimator: ").append(_priorErrorEstimator);
		buff.append("\nSum scheme entropy: ").append(_sumSchemeEntropy);
		buff.append("\nSum prior entropy: ").append(_sumPriorEntropy);
		buff.append("\nNumber of classes: ").append(_numClasses);
		buff.append("\nSum of errors: ").append(_sumErr);
		buff.append("\nSum absolute error: ").append(_sumAbsErr);
		buff.append("\nSum square errors: ").append(_sumSqrErr);
		buff.append("\nSum prior absolute error: ").append(_sumPriorAbsErr);
		buff.append("\nSum prior square error: ").append(_sumSqrErr);
		buff.append("--------------------------------------------------------");
		buff.append("\nMissing class: ").append(_missingClass);
		buff.append("\nMargin resolution: ").append(_marginResolution);
		buff.append("\nMargin counts: ").append(_marginCounts);
		buff.append("\nCost matrix: ").append(_costMatrix);
		buff.append("\nTotal cost: ").append(_totalCost);
		buff.append("\nTotal Kononenko & Bratko Information (sum kb info): ").append(_sumKBInfo);
		buff.append("\nThe weight of all incorrectly classified instances (incorrect): ").append(_incorrect);
		buff.append("\nThe weight of all correctly classified instances: ").append(_correct);
		return buff.toString();
	}
}
