package weka.classifiers.trees;

import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Vector;

import weka.classifiers.Classifier;
import weka.classifiers.RandomizableClassifier;
import weka.core.AdditionalMeasureProducer;
import weka.core.Attribute;
import weka.core.Capabilities;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.NoSupportForMissingValuesException;
import weka.core.Option;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import weka.core.Capabilities.Capability;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;

/**
<!-- globalinfo-start -->
* Class for constructing a decision tree based on the LSID3 algorithm. Can only deal with nominal attributes. No missing values allowed. Empty leaves may result in unclassified instances. For more information see: <br/>
* <br/>
* S. Esmeir (2006). Anytime learning of decision trees.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* &#64;article{Esmeir2006,
*    author = {S. Esmeir},
*    journal = {Machine Learning},
*    number = {1},
*    pages = {897-900},
*    title = {Anytime learning of decision trees.},
*    volume = {1},
*    year = {2006}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
* Valid options are: <p/>
* 
* <pre> -nts
*  If set, classifier is run in LSID3 mode, 
*  otherwise classifier is run in ID3 mode. 
* </pre>
* 
<!-- options-end -->
*
* @author Semion Arest (semjon.arest@gmail.com)
*/
public class LSID3 extends Classifier
		implements AdditionalMeasureProducer, TechnicalInformationHandler
{
	private Attribute m_Attribute;				//Root tree attribute.
	private int m_AttIdx;						//Root tree index.
	
	private Attribute m_ClassAttribute;			//Classifying attribute.
	private double m_ClassValue;				//Value of the classifying attribute.
	private double[] m_Distribution;			//Values of the class attributes with probability of each.
	
	private LSID3[] m_Successors;				//Root children.

	private int nts;							//Number of trees in LSID3 sample.
	private int numLeaves;						//Number of leaves of the tree.
	
	private boolean isLSID3TreeChanged;			//Needed for the recalculation of number of leaves in the tree.
	
	private List<String> m_excAttributeNames;	//Excluded attribute names. Needed for skipping the attribute if it's in the list.
	
	public LSID3()
	{
		m_Attribute = null;
		m_AttIdx = -1;
		m_ClassAttribute = null;
		m_ClassValue = -1.0;
		m_Distribution = null;
		m_Successors = null;
		
		nts = 0;
		numLeaves = 0;
		
		isLSID3TreeChanged = true;
		m_excAttributeNames = new ArrayList<String>();
	}
	/**
	   * Returns a string describing the classifier.
	   * @return a description suitable for the GUI.
	   */
	public String globalInfo() 
	{

	    return  "Class for constructing an unpruned decision tree based on the LSID3 "
	      + "algorithm. Can only deal with nominal attributes. No missing values "
	      + "allowed. Empty leaves may result in unclassified instances. For more "
	      + "information see: \n\n"
	      + getTechnicalInformation().toString();
	}
	  
	/**
	   * Returns an instance of a TechnicalInformation object, containing 
	   * detailed information about the technical background of this class,
	   * e.g., paper reference or book this class is based on.
	   * 
	   * @return the technical information about this class
	   */
	@Override
	public TechnicalInformation getTechnicalInformation()
	{
		TechnicalInformation 	result;
	    
	    result = new TechnicalInformation(Type.ARTICLE);
	    result.setValue(Field.AUTHOR, "S. Esmeir");
	    result.setValue(Field.YEAR, "2006");
	    result.setValue(Field.TITLE, "Anytime learning of decision trees");
	    result.setValue(Field.JOURNAL, "Machine Learning");
	    result.setValue(Field.VOLUME, "1");
	    result.setValue(Field.NUMBER, "1");
	    result.setValue(Field.PAGES, "897-900");
	    
	    return result;
	}

	/**
	   * Returns default capabilities of the classifier.
	   *
	   * @return      the capabilities of this classifier
	   */
	  public Capabilities getCapabilities() 
	  {
	    Capabilities result = super.getCapabilities();
	    result.disableAll();

	    // attributes
	    result.enable(Capability.NOMINAL_ATTRIBUTES);
	    
	    // class
	    result.enable(Capability.NOMINAL_CLASS);

	    // instances
	    result.setMinimumNumberInstances(0);
	    
	    return result;
	  }

	  
	 /**
	   * Returns an enumeration of the additional measure names.
	   *
	   * @return an enumeration of the measure names
	   */
	@Override
	public Enumeration<String> enumerateMeasures()
	{
		Vector<String> newVector = new Vector<String>(1);
	    newVector.addElement("measureNumLeaves");
	    
	    return newVector.elements();
	}

	/**
	   * Returns the value of the named measure.
	   *
	   * @param measureName the name of the measure to query for its value
	   * @return the value of the named measure
	   * @exception IllegalArgumentException if the named measure is not supported
	   */
	@Override
	public double getMeasure(String measureName)
	{
	    if (measureName.equalsIgnoreCase("measureNumLeaves")) 
	    {
	    	return measureNumLeaves();
	    }
	    else 
	    {
	    	throw new IllegalArgumentException(measureName + " not supported (LSID3)");
	    }
	}

	/**
	   * Builds LSID3 decision tree classifier.
	   *
	   * @param data the training data
	   * @exception Exception if classifier can't be built successfully
	   */
	@Override
	public void buildClassifier(Instances data) throws Exception
	{
		//Can classifier handle the data?
	    getCapabilities().testWithFail(data);

	    //Remove instances with missing class.
	    data = new Instances(data);
	    data.deleteWithMissingClass();
	    
	    makeTree(data);
	    
	}
	
	/***
	 * Return number of leaves in the main tree.
	 * @return	number of leaves.
	 */
	private double measureNumLeaves()
	{
		if (isLSID3TreeChanged)
		{
			if (m_Successors != null)
			{
				for (LSID3 t : m_Successors)
				{
					if (t != null)
					{
						numLeaves += 1 + t.measureNumLeaves();
					}
				}
			}
		}
		
		isLSID3TreeChanged = false;
		return numLeaves;
	}
	
	 /**
	   * Method for building an LSID3 tree.
	   *
	   * @param data the training data
	   * @exception Exception if decision tree can't be built successfully
	   */
	private void makeTree(Instances data)
	{
		
		// Check if no instances have reached this node.
	    if (data.numInstances() == 0) 
	    {
	      
	      m_Attribute = null;
	      m_ClassValue = Instance.missingValue();
	      m_Distribution = new double[data.numClasses()];
	      return;
	    }

	    //Choose attribute with minimum tree size in sample.
	    
	    //System.out.println(MessageFormat.format("makeTree: choosing attribute.", null));
	    m_Attribute = chooseAtrribute(data);
	    //System.out.println(MessageFormat.format("makeTree: attribute {0} is chosen.", m_Attribute.name()));
	    if (isLeaf(data, m_Attribute))
	    {
	    	
	    	m_Attribute = null;
	    	m_Distribution = new double[data.numClasses()];
	 
	    	Enumeration instEnum = data.enumerateInstances();
	    	while (instEnum.hasMoreElements()) 
	    	{
		        Instance inst = (Instance) instEnum.nextElement();
		        m_Distribution[(int) inst.classValue()]++;
	    	}
	    	
	      
	      Utils.normalize(m_Distribution);
	      
	      m_ClassValue = Utils.maxIndex(m_Distribution);
	      m_ClassAttribute = data.classAttribute();	  
	    }
	    else 
	    {
	      Instances[] splitData = LSID3Utils.splitData(data, m_Attribute);
	      
	      m_Successors = new LSID3[m_Attribute.numValues()];
	      
	      //System.out.println(MessageFormat.format("makeTree: printing excluded attributes.", null));
	      //System.out.println(MessageFormat.format("makeTree: father: {0}", m_excAttributeNames));
	      List<String> exc = addExcludedAttribute(m_Attribute);
	      for (int j = 0; j < m_Attribute.numValues(); j++) 
	      {
	        m_Successors[j] = new LSID3();
	        m_Successors[j].setNts(this.nts);
	        m_Successors[j].setExcludedAttributes(exc);
	        //System.out.println(MessageFormat.format("makeTree: son {0}: {1}", j, exc));
	        m_Successors[j].makeTree(splitData[j]);
	        //System.out.println(MessageFormat.format("makeTree: returned from son {0}", j));
	      }	      
	    }
	}
	
	public void setOptions(String[] options) throws Exception
	{
		String r = Utils.getOption("nts", options);
		if (r.length() == 0)
		{
			setNts(0);
		}
		else
		{
			setNts(Integer.parseInt(r));
		}
		super.setOptions(options);
	}
	
	/***
	 * Chooses attribute with minimal tree size in generated sample.
	 * @param data	- given data
	 * @return	attribute
	 */
	private Attribute chooseAtrribute(Instances data)
	{		
		Attribute attr = null;
		
		//If sample size is 0 then choose attribute like Id3
		if (nts <= 0)
		{
			attr = id3ChooseAttribute(data);
		}
		//Else then for each attribute generate sample of nts subtrees
		//then choose the attribute x which has the minimal tree size rooted at x.
		else
		{
			Enumeration attributes = data.enumerateAttributes();
			
			int[] sampleTreesMinSizes = new int[data.numAttributes() - 1];
			for (int i = 0; i < sampleTreesMinSizes.length; i++) { sampleTreesMinSizes[i] = Integer.MAX_VALUE; }
			
			m_AttIdx = 0;
			while (attributes.hasMoreElements())
			{
				Attribute a = (Attribute) attributes.nextElement();
				if (!m_excAttributeNames.contains(a.name()))
				{
					//System.out.println(MessageFormat.format("chooseAtrribute: sampling for attribute {0}.", a.name()));
					sampleTreesMinSizes[a.index()] = 0;
					Instances[] subsets = LSID3Utils.splitData(data, a);
					int i = 0;
					for (Instances subset : subsets)
					{
						//System.out.println(MessageFormat.format("chooseAtrribute: sampling subset {0}.", i));
						//Tree size is computed by summing up sizes of sons' subtrees.
						sampleTreesMinSizes[a.index()] += computeMinSizeTree(subset);
						//System.out.println(MessageFormat.format("chooseAtrribute: subtree size is {0}", sampleTreesMinSizes[a.index()]));
						i++;
					}
					//System.out.println(MessageFormat.format("chooseAtrribute: tree size for {0} is {1}.", a.name(), sampleTreesMinSizes[a.index()]));
				}
			}
			
			//Choose attribute for which tree size is minimal.
			m_AttIdx = Utils.minIndex(sampleTreesMinSizes);
			attr = data.attribute(m_AttIdx);
			
		}
		
		return attr;
	}
	
	private boolean isLeaf(Instances data, Attribute attr)
	{
		return LSID3Utils.computeInfoGain(data, attr) == LSID3Utils.FULL_GAIN
				|| m_excAttributeNames.size() == data.numAttributes() - 1;
	}
	
	
	
	public int getNts()
	{
		return this.nts;
	}
	
	public void setNts(int nts)
	{
		this.nts = nts;
	}
	
	/**
	   * Returns an enumeration describing the available options.
	   *
	   * Valid options are: <p>
	   *
	   * -nts <br>
	   * Number of trees in the sample.<p>
	   *
	   * @return an enumeration of all the available options.
	   */
	  public Enumeration<Option> listOptions() 
	  {

	    Vector<Option> newVector = new Vector<Option>(1);

	    newVector.
		addElement(new Option("\tNumber of trees in the sample.",
				      "nts", 0, "-nts"));
	   
	    return newVector.elements();
	  }
	  
	  /**
	   * Gets the current settings of the Classifier.
	   *
	   * @return an array of strings suitable for passing to setOptions
	   */
	  public String [] getOptions() 
	  {
		Vector<String> options = new Vector<String>();
		options.add("-nts");
		options.add("" + getNts());
		
		return options.toArray(new String[options.size()]);
	  }
	  
	  class BiasedRandomSampleTree
	  {
		  private int numLeaves;						//Number of leaves in tree in the sample.
		  private BiasedRandomSampleTree[] children;	//Children of the current root.
		  private Attribute root;						//Current root.
		  
		  private boolean isTreeChanged;				//Sets to true every time when there is some change in tree.
		  												//Needed for recalculating number of leaves.
		  
		  public BiasedRandomSampleTree()
		  {
			numLeaves = 0;
			root = null;
			isTreeChanged = true;
			children = null;
		  }
		  
		  /***
		   * Builds tree in the sample.
		   * @param data	- given dataset
		   */
		  public void buildTree(Instances data)
		  {
			  //System.out.println(MessageFormat.format("buildTree: started", null));
			  root = chooseRootAttribute(data);
			  
			  Instances[] splitData = LSID3Utils.splitData(data, root);
			 
			  if (!isLeaf(data, root) 
					  && !(countNonEmptySubsets(splitData) == 1))
			  {
				  children = new BiasedRandomSampleTree[root.numValues()];
				  for (int i = 0; i < children.length; i++)
				  {
					  children[i] = new BiasedRandomSampleTree();
					  children[i].buildTree(splitData[i]);
				  }
			  }
			  {
				  root = null;
			  }
			  
		  }
		  
		  /***
		   * Computes number of leaves in the tree.
		   * @return	number of leaves.
		   */
		  public int computeNumLeaves()
		  {
			  if (isTreeChanged)
			  {
				  if (children != null)
				  {
					  for (BiasedRandomSampleTree t : children)
					  {
						  if (t != null)
						  {
							  numLeaves += t.computeNumLeaves();
						  }
					  }
				  }
				  else
				  {
					  return 1;
				  }
			  }
			  
			  isTreeChanged = false;
			  return numLeaves;
		  }
		  
		  /***
		   * Chooses attribute to be root. Based on biased random choosing.
		   * @return	chosen attribute
		   */
		  private Attribute chooseRootAttribute(Instances data)
		  {
		    double[] infoGains = new double[data.numAttributes()];
		    
		    Enumeration attEnum = data.enumerateAttributes();
		    while (attEnum.hasMoreElements()) 
		    {
		      Attribute att = (Attribute) attEnum.nextElement();
		      
		      infoGains[att.index()] = LSID3Utils.computeInfoGain(data, att);
		    }		    
		    return data.attribute(computeAttributeIndex(infoGains));
		  }
		  
		  /**
		   * Checks if given attribute is leaf.
		   * @param attr
		   * @return
		   */
		  private boolean isLeaf(Instances data, Attribute attr)
		  {
			  return LSID3Utils.computeInfoGain(data, attr) == LSID3Utils.FULL_GAIN;
		  }
		  
		  /***
		   * Computes index of attribute based 
		   * @param infoGains
		   * @return
		   */
		  private int computeAttributeIndex(double[] infoGains)
		  {
			  int attIdx = -1;
			  for (int j = 0; j < infoGains.length; j++)
			  {
				  if (infoGains[j] == LSID3Utils.FULL_GAIN)
				  {
					  attIdx = j;
				  }
			  }
			  if (attIdx == -1
					  && isValidGains(infoGains))
			  {
			    //Convert gain values to be in (0, 1)
				
			    Utils.normalize(infoGains);
			    
			    double rnd = Math.random();
	
			    double[] infoGainsBackup = infoGains.clone();
			    int j = 0;
			    Arrays.sort(infoGainsBackup);
			    for (j = 0; rnd > infoGainsBackup[j]
			    		&& j < infoGainsBackup.length - 1; j++);
				  
			    double v = infoGainsBackup[j];
				  
			    //Find index in original gains array.
			    //Assumption: v always exists in the infoGains.
			    attIdx = indexOf(infoGains, v);
			  }
			  else
			  {
				  attIdx = 0;
			  }
			  
			  return attIdx;
		  }
		  
		  /**
		   * Cleans the tree.
		   */
		  public void cleanTree()
		  {
			numLeaves = 0;
			isTreeChanged = true;
			if (children != null)
			{
				for (BiasedRandomSampleTree t : children)
				{
					if (t != null)
					{
						t.cleanTree();
					}
				}
			}
			root = null;
			children = null;
		  }
		  
		  private int indexOf(double[] infoGains, double v)
		  {
			  int idx = -1;
			  for (int i = 0; i < infoGains.length; i++)
			  {
				  if (infoGains[i] == v)
				  {
					  idx = i;
					  break;
				  }
			  }
			  return idx;
		  }
		  
		  private boolean isValidGains(double[] infoGains)
		  {
			  boolean isValidGains = false;
			  for (double d : infoGains)
			  {
				  if (d != 0.0) isValidGains = true;
			  }
			  
			  return isValidGains;
		  }
		  
		  private int countNonEmptySubsets(Instances[] subsets)
		  {
			int countNonEmptySubsets = 0;
			for (Instances subset : subsets)
			{
				if (subset.numInstances() > 0) countNonEmptySubsets++;
			}
			
			return countNonEmptySubsets;
		  }
	  }
	  
	  private Attribute id3ChooseAttribute(Instances data)
	  {
		// Compute attribute with maximum information gain.
	    double[] infoGains = new double[data.numAttributes()];
	    Enumeration attEnum = data.enumerateAttributes();
	    while (attEnum.hasMoreElements()) 
	    {
	      Attribute att = (Attribute) attEnum.nextElement();
	      infoGains[att.index()] = LSID3Utils.computeInfoGain(data, att);
	    }
	    
	    return data.attribute(Utils.maxIndex(infoGains));
	  }
	  
	  /***
	   * Compute minimal size of the tree in the sample
	   * @param data
	   * @return
	   */
	  private int computeMinSizeTree(Instances data)
	  {
		  
		  BiasedRandomSampleTree t = new BiasedRandomSampleTree();
		  
		  t.buildTree(data);
		  int minSize = t.computeNumLeaves();
		  //System.out.println(MessageFormat.format("computeMinSizeTree: sample tree size is {0}", minSize));
		  
		  t.cleanTree();
		  
		  for (int i = 1; i < nts; i++)
		  {
			  t.buildTree(data);
			  
			  int size = t.computeNumLeaves();
			  //System.out.println(MessageFormat.format("computeMinSizeTree: sample tree size is {0}", size));
			  minSize = size < minSize ? size : minSize;
			  t.cleanTree();
		  }
		  
		  
		  
		  return minSize;
	  }
	  
	  /**
	   * Classifies a given test instance using the decision tree.
	   *
	   * @param instance the instance to be classified
	   * @return the classification
	   * @throws NoSupportForMissingValuesException if instance has missing values
	   */
	  public double classifyInstance(Instance instance) 
	    throws NoSupportForMissingValuesException 
	  {
	    if (instance.hasMissingValue()) 
	    {
	      throw new NoSupportForMissingValuesException("LSID3: no missing values, "
	                                                   + "please.");
	    }
	    
	    if (m_Attribute == null) 
	    {
	      return m_ClassValue;
	    } 
	    else 
	    {
	      return m_Successors[(int) instance.value(m_Attribute)].
	        classifyInstance(instance);
	    }
	  }
	  
	  /**
	   * Main method.
	   *
	   * @param args the options for the classifier
	   */
	  public static void main(String[] args) 
	  {
	    runClassifier(new LSID3(), args);
	  }
	  
	  public String toString()
	  {
		  return "Number of leaves: " + measureNumLeaves(); 
	  }
	  
	  /**
	   * Computes class distribution for instance using decision tree.
	   *
	   * @param instance the instance for which distribution is to be computed
	   * @return the class distribution for the given instance
	   * @throws NoSupportForMissingValuesException if instance has missing values
	   */
	  public double[] distributionForInstance(Instance instance) 
	    throws NoSupportForMissingValuesException {

	    if (instance.hasMissingValue()) {
	      throw new NoSupportForMissingValuesException("Id3: no missing values, "
	                                                   + "please.");
	    }
	    if (m_Attribute == null) {
	      return m_Distribution;
	    } else { 
	      return m_Successors[(int) instance.value(m_Attribute)].
	        distributionForInstance(instance);
	    }
	  }
	  
	  private List<String> addExcludedAttribute(Attribute m_Attribute)
	  {
		List<String> exc = new ArrayList<String>();
		for (String name : m_excAttributeNames)
		{
			exc.add(name);
		}
		exc.add(m_Attribute.name());
		
		return exc;
	  }
	  
	  private void setExcludedAttributes(List<String> exc)
	  {
		  this.m_excAttributeNames = exc;
	  }
}
