package weka.classifiers.trees.j48;

import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
import weka.classifiers.lazy.IBk;

import java.util.Collections;
import java.util.Vector;

public class C45MeetsKNN extends AugmentedClassifierTree {
	
	/**
	 * 
	 */
	private static final long serialVersionUID = -1671663682003366020L;
	
	private IBk leafClassifier;
	
	private Vector<Integer> splitsVector;
	
	  /** True if the tree is to be pruned. */
	  boolean m_pruneTheTree = false;

	  /** The confidence factor for pruning. */
	  float m_CF = 0.25f;

	  /** Is subtree raising to be performed? */
	  boolean m_subtreeRaising = true;

	  /** Cleanup after the tree has been built. */
	  boolean m_cleanup = true;

	private boolean deleteAttributes = true;

	public   C45MeetsKNN(ModelSelection toSelectLocModel,
		    boolean pruneTree,float cf,
		    boolean raiseTree,
		    boolean cleanup) throws Exception
	{
	    super(toSelectLocModel);

	    m_pruneTheTree = pruneTree;
	    m_CF = cf;
	    m_subtreeRaising = raiseTree;
	    m_cleanup = cleanup;
	}
	
	public void buildClassifier(Instances data) throws Exception {

	    // can classifier tree handle the data?
		getCapabilities().testWithFail(data);

		// 	remove instances with missing class
		data = new Instances(data);
		data.deleteWithMissingClass();
    
		buildTree(data, m_subtreeRaising);
		collapse();
		if (m_pruneTheTree) {
			prune();
		}
		
		//kNN insertion point
		splitsVector = new Vector<Integer>();
		knnRun(splitsVector);
		
		
		if (m_cleanup) {
			cleanup(new Instances(data, 0));
		}
	}
	
	  /**
	   * Collapses a tree to a node if training error doesn't increase.
	   */
	  public final void collapse(){

	    double errorsOfSubtree;
	    double errorsOfTree;
	    int i;

	    if (!m_isLeaf){
	      errorsOfSubtree = getTrainingErrors();
	      errorsOfTree = localModel().distribution().numIncorrect();
	      if (errorsOfSubtree >= errorsOfTree-1E-3){

		// Free adjacent trees
		m_sons = null;
		m_isLeaf = true;
				
		// Get NoSplit Model for tree.
		m_localModel = new NoSplit(localModel().distribution());
	      }else
		for (i=0;i<m_sons.length;i++)
		  son(i).collapse();
	    }
	  }
	  
	  /**
	   * Computes errors of tree on training data.
	   * 
	   * @return the training errors
	   */
	  private double getTrainingErrors(){

	    double errors = 0;
	    int i;

	    if (m_isLeaf)
	      return localModel().distribution().numIncorrect();
	    else{
	      for (i=0;i<m_sons.length;i++)
		errors = errors+son(i).getTrainingErrors();
	      return errors;
	    }
	  }	  

	public Instances getInstances() {
		return m_train;
	}

	public IBk getIBkClassifier()
	{
		return leafClassifier;
	}
	
	
	public void knnRun(Vector<Integer> splitVector) throws Exception {
		
		splitsVector = new Vector<Integer>(splitVector);
		if (m_isLeaf)
		{
			//Check Number of neighbours
			leafClassifier = new IBk(1);
			
			if (deleteAttributes){
			//Ignore discarded attributes
			Collections.sort(splitsVector);
			//System.out.println("[Train] Before: " + m_train.numAttributes());
			for (int i = splitsVector.size()-1  ; i >= 0 ; i--)
			{
				int elm = splitsVector.elementAt(i);
				//if  (m_train.checkForAttributeType(elm))
				//{
				try {
					
				m_train.deleteAttributeAt(elm);
				
			//	System.out.println("Deleted successfully: " + splitsVector.elementAt(i));
				}
				catch (Exception e)
				{
					System.out.println("Shit storm at: " + splitsVector.elementAt(i));
					throw e;
					
				}
				//}
			}
			//System.out.println("[Train] After: " + m_train.numAttributes());
			}
			leafClassifier.buildClassifier(m_train);
		}
		else
		{
			int cl = ((C45Split)localModel()).attIndex();
			if (!splitsVector.contains(cl))
				splitsVector.add(cl);
						
			for (int i = 0 ; i < m_sons.length;i++)
			{
				((C45MeetsKNN)m_sons[i]).knnRun(splitsVector);
			}
		}
	}

	public void prune() throws Exception
	{

	    double errorsLargestBranch;
	    double errorsLeaf;
	    double errorsTree;
	    int indexOfLargestBranch;
	    C45MeetsKNN largestBranch;
	    int i;

	    if (!m_isLeaf){

	      // Prune all subtrees.
	      for (i=0;i<m_sons.length;i++)
		son(i).prune();

	      // Compute error for largest branch
	      indexOfLargestBranch = localModel().distribution().maxBag();
	      if (m_subtreeRaising) {
		errorsLargestBranch = son(indexOfLargestBranch).
		  getEstimatedErrorsForBranch((Instances)m_train);
	      } else {
		errorsLargestBranch = Double.MAX_VALUE;
	      }

	      // Compute error if this Tree would be leaf
	      errorsLeaf = 
		getEstimatedErrorsForDistribution(localModel().distribution());

	      // Compute error for the whole subtree
	      errorsTree = getEstimatedErrors();

	      // Decide if leaf is best choice.
	      if (Utils.smOrEq(errorsLeaf,errorsTree+0.1) &&
		  Utils.smOrEq(errorsLeaf,errorsLargestBranch+0.1)){

		// Free son Trees
		m_sons = null;
		m_isLeaf = true;
			
		// Get NoSplit Model for node.
		m_localModel = new NoSplit(localModel().distribution());
		return;
	      }

	      // Decide if largest branch is better choice
	      // than whole subtree.
	      if (Utils.smOrEq(errorsLargestBranch,errorsTree+0.1)){
		largestBranch = son(indexOfLargestBranch);
		m_sons = largestBranch.m_sons;
		m_localModel = largestBranch.localModel();
		m_isLeaf = largestBranch.m_isLeaf;
		newDistribution(m_train);
		prune();
	      }
	    }
	  }


	protected AugmentedClassifierTree getNewTree(Instances data) throws Exception {
		    
		    C45MeetsKNN newTree = 
		      new C45MeetsKNN(m_toSelectModel, m_pruneTheTree, m_CF,
						     m_subtreeRaising, m_cleanup);
		    newTree.buildTree((Instances)data, m_subtreeRaising);

		    return newTree;
	}
	
	  /**
	   * Computes estimated errors for tree.
	   * 
	   * @return the estimated errors
	   */
	  private double getEstimatedErrors(){

	    double errors = 0;
	    int i;

	    if (m_isLeaf)
	      return getEstimatedErrorsForDistribution(localModel().distribution());
	    else{
	      for (i=0;i<m_sons.length;i++)
		errors = errors+son(i).getEstimatedErrors();
	      return errors;
	    }
	  }
	  
	  /**
	   * Computes estimated errors for one branch.
	   *
	   * @param data the data to work with
	   * @return the estimated errors
	   * @throws Exception if something goes wrong
	   */
	  private double getEstimatedErrorsForBranch(Instances data) 
	       throws Exception {

	    Instances [] localInstances;
	    double errors = 0;
	    int i;

	    if (m_isLeaf)
	      return getEstimatedErrorsForDistribution(new Distribution(data));
	    else{
	      Distribution savedDist = localModel().m_distribution;
	      localModel().resetDistribution(data);
	      localInstances = (Instances[])localModel().split(data);
	      localModel().m_distribution = savedDist;
	      for (i=0;i<m_sons.length;i++)
		errors = errors+
		  son(i).getEstimatedErrorsForBranch(localInstances[i]);
	      return errors;
	    }
	  }

	  /**
	   * Computes estimated errors for leaf.
	   * 
	   * @param theDistribution the distribution to use
	   * @return the estimated errors
	   */
	  private double getEstimatedErrorsForDistribution(Distribution 
							   theDistribution){

	    if (Utils.eq(theDistribution.total(),0))
	      return 0;
	    else
	      return theDistribution.numIncorrect()+
		Stats.addErrs(theDistribution.total(),
			      theDistribution.numIncorrect(),m_CF);
	  }


	  /**
	   * Method just exists to make program easier to read.
	   * 
	   * @return the local split model
	   */
	  private ClassifierSplitModel localModel(){
	    
	    return (ClassifierSplitModel)m_localModel;
	  }

	  /**
	   * Computes new distributions of instances for nodes
	   * in tree.
	   *
	   * @param data the data to compute the distributions for
	   * @throws Exception if something goes wrong
	   */
	  private void newDistribution(Instances data) throws Exception {

	    Instances [] localInstances;

	    localModel().resetDistribution(data);
	    m_train = data;
	    if (!m_isLeaf){
	      localInstances = 
		(Instances [])localModel().split(data);
	      for (int i = 0; i < m_sons.length; i++)
		son(i).newDistribution(localInstances[i]);
	    } else {

	      // Check whether there are some instances at the leaf now!
	      if (!Utils.eq(data.sumOfWeights(), 0)) {
		m_isEmpty = false;
	      }
	    }
	  }

	  /**
	   * Method just exists to make program easier to read.
	   */
	  private C45MeetsKNN son(int index){

	    return (C45MeetsKNN)m_sons[index];
	  }
	  
	  public double classifyInstance(Instance instance) 
			    throws Exception {

			    double maxProb = -1;
			    double currentProb;
			    int maxIndex = 0;
			    int j;
			    System.out.println("Classy");
			    C45MeetsKNN leaf = this;

			    for (j = 0; j < instance.numClasses(); j++) {
			      currentProb = getProbs(j, instance, 1,leaf);
			      if (Utils.gr(currentProb,maxProb)) {
				maxIndex = j;
				maxProb = currentProb;
			      }
			    }
			    
			    if (leaf == this)
			    {
			    	System.out.println("Kaka");
			    	return (double)maxIndex;
			    }
			    else
			    {
			    	System.out.println("Here bitch");
			    	return leaf.getIBkClassifier().classifyInstance(instance);
			    }
	  }
	  
	  /**
	   * Help method for computing class probabilities of 
	   * a given instance.
	   * 
	   * @param classIndex the class index
	   * @param instance the instance to compute the probabilities for
	   * @param weight the weight to use
	   * @return the probs
	   * @throws Exception if something goes wrong
	   */
	  private double getProbs(int classIndex, Instance instance, double weight, C45MeetsKNN leaf) 
	    throws Exception {
	    double prob = 0;
	    if (m_isLeaf) {
	    	//leaf = this;
	    	//System.out.print("At the leaf...");
	     // return weight * localModel().classProb(classIndex, instance, -1);
	    	//System.out.println("We here?");
	    	Instance inst = new Instance(instance);
	    //	System.out.println("inst num of attribs = " + inst.numAttributes());
	    	if (deleteAttributes )
	    	{
	    	Collections.sort(splitsVector);
	    	//System.out.println("Before: " + inst.numAttributes());
	    	for (int i = splitsVector.size()-1 ; i >= 0  ; i--)
	    	{
	    		int elm = splitsVector.elementAt(i);
				inst.deleteAttributeAt(elm);
					//inst.setMissing(elm);
	    	}
	    	}
	    	//System.out.println("After: " + inst.numAttributes());
	    	double[] dd = getIBkClassifier().distributionForInstance(inst);
	    	return weight*dd[classIndex];
	    } else {
	      int treeIndex = localModel().whichSubset(instance);
	      if (treeIndex == -1) {
	    	 // System.out.println("More than one...");
		double[] weights = localModel().weights(instance);
		for (int i = 0; i < m_sons.length; i++) {
		  if (!son(i).m_isEmpty) {
		    prob += son(i).getProbs(classIndex, instance, 
					    weights[i] * weight, leaf);
		  }
		}
		return prob;
	      } else {
		if (son(treeIndex).m_isEmpty) {
			//System.out.println("Empty Son...");
		  return weight * localModel().classProb(classIndex, instance, 
							 treeIndex);
		} else {
			//System.out.println("Something sucks here...");
			return son(treeIndex).getProbs(classIndex, instance, weight, leaf);
		}
	      }
	    }
	  } 
	  
	  public double [] distributionForInstance(Instance instance,
				 boolean useLaplace) 
throws Exception {

		  double [] doubles = new double[instance.numClasses()];
		  C45MeetsKNN leaf = this;
		  double sum = 0;
		  for (int i = 0; i < doubles.length; i++) {
			  doubles[i] = getProbs(i, instance, 1, leaf);
			 /* if (leaf == this)
				  System.out.print("");
			  else
				  doubles[i] = leaf.getIBkClassifier().classifyInstance(instance);
			  leaf = this;*/
			
		  }
		  
		  return doubles;
	  }
	  

}
