/**
 * 
 */
package weka.classifiers.meta;

import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.Vector;

import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.classifiers.SingleClassifierEnhancer;
import weka.classifiers.trees.RandomTree;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.SelectedTag;
import weka.core.Tag;
import weka.core.Utils;
import weka.core.neighboursearch.LinearNNSearch;
import weka.core.neighboursearch.NearestNeighbourSearch;

/**
 * @author TUL
 * 
 */
public class LocallySelectedEnsemble extends SingleClassifierEnhancer implements
		OptionHandler {

	/**
	 * 
	 */
	private static final long serialVersionUID = 8510814946404379027L;
	private Classifier localClassifier;
	private double kPercent = 0.10;

	public static final Tag[] TAGS_eval = { 
		new Tag(0, "Greedy, Accuracy"),
		new Tag(1, "Greedy, Contribution"),
		new Tag(2, "Global") };
	public int m_eval = 0;
	private Instances m_Train;
	private NearestNeighbourSearch m_NNSearch = new LinearNNSearch();;
	private int m_kNN;
	private List<Classifier> m_BaseClassifiers;
	private Classifier[] m_Classifiers;

	  /** The bagger. */
	  protected Bagging bagging = null;
	private int m_numTrees;
	/**
	 * numInstances x numClasses
	 */
	private double[][] numPredictionsPerClass;
	
	/**
	 * numInstances x numBaseClassifiers
	 */
	private double[][] predictions;
	private Instances neighbours;
	private int[] ensembleIndeces;

	/**
	 * Algorithmic description: Train L number of diverse models m to form
	 * ensemble M For each test instance find nearest neighbors Choose models
	 * locally
	 * 
	 * Choosing models is done either greedily or by reduction
	 * 
	 * @param instances
	 *            set of instances serving as training data
	 * @throws Exception
	 *             if the classifier has not been generated successfully
	 */
	public void buildClassifier(Instances instances) throws Exception {

		// can classifier handle the data?
		getCapabilities().testWithFail(instances);

		// remove instances with missing class
		m_Train = new Instances(instances);
		m_Train.deleteWithMissingClass();

		instances.numClasses();
		instances.classAttribute().type();

	    
	    bagging = new Bagging();
	    RandomTree rTree = new RandomTree();

	    // set up the bagger and build the forest
	    bagging.setClassifier(rTree);
	    bagging.setNumIterations(m_numTrees);
	    bagging.setCalcOutOfBag(false);
	    bagging.buildClassifier(m_Train);  
		m_BaseClassifiers = bagging.getEnsembleMembers();

		m_NNSearch.setInstances(m_Train);

		m_kNN = (int) (m_Train.numInstances() * kPercent);
		if (m_kNN == 0)
			m_kNN = 1;
		// m_kNNUpper = m_kNN;

	}

	/**
	 * Calculates the class membership probabilities for the given test
	 * instance.
	 * 
	 * @param instance
	 *            the instance to be classified
	 * @return predicted class probability distribution
	 * @throws Exception
	 *             if an error occurred during the prediction
	 */
	public double[] distributionForInstance(Instance instance) throws Exception {

		if (m_Train.numInstances() == 0) {
			throw new Exception("No training instances!");
		}
		m_NNSearch.addInstanceInfo(instance);
		neighbours = m_NNSearch.kNearestNeighbours(instance, m_kNN);
		initiatePredictions();

		switch (m_eval) {
		case 0:
		case 1:
			int i = useGreedySelection(instance);
			Classifier[] tmpClassifiers = m_Classifiers;
			m_Classifiers = new Classifier[i];
			for (int j = 0; j < i; j++)
				m_Classifiers[j] = tmpClassifiers[j];
			bagging.setClassifiers(m_Classifiers);
			return bagging.distributionForInstance(instance);
		case 2:
			neighbours = m_NNSearch.kNearestNeighbours(instance, 1);
			getClassifier().buildClassifier(neighbours);
			return getClassifier().distributionForInstance(instance);
		}
		throw new Exception(
				"LocalSelectedEnsemble:distributionForInstance: Impossible point to reach");
	}

	private int useGreedySelection(Instance instance)
			throws Exception {
		Evaluation evaluation = new Evaluation(neighbours);
		double maxContribution = -1;
		Integer selected = 0, maxIdx = 0;
		evaluation.setPriors(m_Train);
		for (int j = 0; j < m_BaseClassifiers.size(); j++) {
			evaluation.evaluateModel(m_BaseClassifiers.get(j), neighbours);
			if (maxContribution < 1 - evaluation.errorRate()) {
				maxContribution = 1 - evaluation.errorRate();
				selected = j;
			}
		}

		Classifier[] tmpClassifiers = new Classifier[1];
		ensembleIndeces = new int[m_BaseClassifiers.size()];
		tmpClassifiers[0] = m_BaseClassifiers.get(selected);
		List<Integer> UnselectedClassifiers = new ArrayList<Integer>();
		for (int i = 0; i < m_BaseClassifiers.size(); i++)
			UnselectedClassifiers.add(i);
		UnselectedClassifiers.remove(new Integer(selected));
		initiatePredictionsPerClass(selected);
		ensembleIndeces[0] = selected;

		maxContribution = 0;
		for (int i = 1; i < m_BaseClassifiers.size(); i++) {
			m_Classifiers = new Classifier[i + 1];
			for (int j = 0; j < i; j++)
				m_Classifiers[j] = tmpClassifiers[j];

			if (m_eval == 0)
				selected = selectOnAccuracy(UnselectedClassifiers);
			else
				selected = selectOnContribution(UnselectedClassifiers);
			m_Classifiers[i] = m_BaseClassifiers.get(selected);
			ensembleIndeces[i] = selected;
			UnselectedClassifiers.remove(new Integer(selected));
			bagging.setClassifiers(m_Classifiers);
			evaluation.evaluateModel(bagging, neighbours);
			if (maxContribution < 1 - evaluation.errorRate()) {
				maxContribution = 1 - evaluation.errorRate();
				maxIdx = i + 1;
			}

			updatePredictionsPerClass(selected);
			tmpClassifiers = m_Classifiers;
		}
		return maxIdx;
	}

	private void initiatePredictions() throws Exception {
		predictions = new double[neighbours.numInstances()][m_BaseClassifiers.size()];
		for (int i = 0; i < neighbours.numInstances(); i++)
			for (int j = 0; j < m_BaseClassifiers.size(); j++)
				predictions[i][j] = m_BaseClassifiers.get(j).classifyInstance(neighbours.get(i));
	}

	private void initiatePredictionsPerClass(int classifier) throws Exception {
		numPredictionsPerClass = new double[neighbours.numInstances()][neighbours.numClasses()];
		updatePredictionsPerClass(classifier);
	}

	private void updatePredictionsPerClass(int classifier) throws Exception {
		for (int j = 0; j < neighbours.numInstances(); j++) {
			numPredictionsPerClass[j][(int) predictions[j][classifier]]++;
		}		
	}

	private int selectOnContribution(List<Integer> unselectedClassifiers) throws Exception {
		double maxContribution = -1;

		int selected = 0;
		for (int classifier : unselectedClassifiers) {
			//m_Classifiers[m_Classifiers.length - 1] = m_BaseClassifiers.get(classifier);
			double contribution = 0;
			for (int j = 0; j < neighbours.numInstances(); j++) {
				double localCont = 0, majPredPerClass = 0, secPredPerClass = 0;
				double[] localPredPerClass = new double[neighbours.numClasses()];
				int pred = (int) predictions[j][classifier], 
					cl = (int) neighbours.get(j).classValue(),
					majCl = 0, secCl = 0;
				localPredPerClass = numPredictionsPerClass[j];
				localPredPerClass[pred]++;
				for (int i = 0; i < neighbours.numClasses(); i++) {
					if (localPredPerClass[i] > majPredPerClass) {
						secPredPerClass = majPredPerClass;
						secCl = majCl;
						majPredPerClass = localPredPerClass[i];
						majCl = i;
					} else if (localPredPerClass[i] > secPredPerClass) {
						secPredPerClass = localPredPerClass[i];
						secCl = i;
					}
				}
				
				if (pred == cl) {
					if (pred == majCl) 
						localCont = localPredPerClass[secCl];
					else
						localCont = 2 * localPredPerClass[majCl] - localPredPerClass[pred];
				} else {
					localCont = localPredPerClass[cl] - localPredPerClass[pred] - localPredPerClass[majCl];
				}
				contribution += localCont;
			}
			if (maxContribution < contribution) {
				maxContribution = contribution;
				selected = classifier;
			}//*/
		}
		return selected;
	}

	private int selectOnAccuracy(List<Integer> unselectedClassifiers) throws Exception {
		Evaluation evaluation = new Evaluation(neighbours);
		evaluation.setPriors(m_Train);
		double maxContribution = -1;

		int selected = 0;
		for (int classifier : unselectedClassifiers) {
			m_Classifiers[m_Classifiers.length - 1] = m_BaseClassifiers.get(classifier);
			bagging.setClassifiers(m_Classifiers);
			evaluation.evaluateModel(bagging, neighbours);
			if (maxContribution < 1 - evaluation.errorRate()) {
				maxContribution = 1 - evaluation.errorRate();
				selected = classifier;
			}
		}
		return selected;
	}

	private void useGlobalOptimization(Instance instance) throws Exception {
		return;
	}

	/**
	 * Returns an enumeration describing the available options
	 * 
	 * @return an enumeration of all the available options
	 */
	public Enumeration<Option> listOptions() {

		Vector<Option> newVector = new Vector<Option>();

		newVector.addElement(new Option(
				"\tPercent of nearest neighbours (k) used in classification.\n"
						+ "\t(Default = 0.1)", "P", 1,
				"-P <percent of neighbors>"));

		newVector.addElement(new Option(
				"\tDetermines if and how a local search for an optimal k is performed.\n"
						+ "\t(Default Basic)", "S", 1, "-S"));

		Enumeration<Option> en = super.listOptions();
		while (en.hasMoreElements()) {
			Option tmp = en.nextElement();
			newVector.addElement(tmp);
		}

		return newVector.elements();
	}

	/**
	 * Parses a given list of options. Valid options are:
	 * <p>
	 * 
	 * -B classifierstring <br>
	 * Classifierstring should contain the full class name of a scheme included
	 * for selection followed by options to the classifier (required, option
	 * should be used once for each classifier).
	 * <p>
	 * 
	 * @param options
	 *            the list of options as an array of strings
	 * @exception Exception
	 *                if an option is not supported
	 */
	public void setOptions(String[] options) throws Exception {
	    
	    String tmpStr = Utils.getOption('I', options);
	    if (tmpStr.length() != 0) {
	      m_numTrees = Integer.parseInt(tmpStr);
	    } else {
	      m_numTrees = 10;
	    }

		String knnString = Utils.getOption('P', options);
		if (knnString.length() != 0) {
			setKPercent(Double.parseDouble(knnString));
		} else {
			setKPercent(0.10);
		}

		String tmp = Utils.getOption("S", options);
		if (tmp.length() != 0)
			setEvaluation(new SelectedTag(Integer.parseInt(tmp), TAGS_eval));
		else
			setEvaluation(new SelectedTag(m_eval, TAGS_eval));

		super.setOptions(options);
	}

	/**
	 * Gets the current settings of the Classifier.
	 * 
	 * @return an array of strings suitable for passing to setOptions
	 */
	public String[] getOptions() {

		String[] options;
		Vector<String> result = new Vector<String>();

	    
	    result.add("-I");
	    result.add("" + getNumTrees());
	    
		result.add("-P");
		result.add("" + getKPercent());

		result.add("-S");
		result.add("" + m_eval);

		options = super.getOptions();
		for (int i = 0; i < options.length; i++) {
			result.add(options[i]);
		}

		String[] array = new String[result.size()];
		for (int i = 0; i < result.size(); i++)
			array[i] = result.elementAt(i).toString();

		return array; // (String[]) result.toArray(new String[result.size()]);
	}
	  
	  /**
	   * Returns the tip text for this property
	   * @return tip text for this property suitable for
	   * displaying in the explorer/experimenter gui
	   */
	  public String numTreesTipText() {
	    return "The number of trees to be generated.";
	  }

	  /**
	   * Get the value of numTrees.
	   *
	   * @return Value of numTrees.
	   */
	  public int getNumTrees() {
	    
	    return m_numTrees;
	  }
	  
	  /**
	   * Set the value of numTrees.
	   *
	   * @param newNumTrees Value to assign to numTrees.
	   */
	  public void setNumTrees(int newNumTrees) {
	    
	    m_numTrees = newNumTrees;
	  }

	/**
	 * Returns the tip text for this property
	 * 
	 * @return tip text for this property suitable for displaying in the
	 *         explorer/experimenter gui
	 */
	public String evaluationTipText() {
		return "...";
	}

	/**
	 * Get the value of crt.
	 * 
	 * @return Value of crt.
	 */
	public SelectedTag getEvaluation() {

		return new SelectedTag(m_eval, TAGS_eval);
	}

	/**
	 * Set the value of crt.
	 * 
	 * @param creationType
	 *            Value to assign to crt.
	 */
	public void setEvaluation(SelectedTag value) {
		if (value.getTags() == TAGS_eval)
			m_eval = value.getSelectedTag().getID();
	}

	/**
	 * Returns the tip text for this property.
	 * 
	 * @return tip text for this property suitable for displaying in the
	 *         explorer/experimenter gui
	 */
	public String KPercentTipText() {
		return "The percent of neighbours to use.";
	}

	/**
	 * Set the percent of neighbours the learner is to use.
	 * 
	 * @param k
	 *            the percent of neighbours.
	 */
	public void setKPercent(double k) {
		kPercent = k;
	}

	/**
	 * Gets the percent of neighbours the learner will use.
	 * 
	 * @return the percent of neighbours.
	 */
	public double getKPercent() {

		return kPercent;
	}

}
