/*
 *    MyId3.java
 *    Copyright (C) 2012 Institute Teknologi Bandung, Bandung, Indonesia
 *
 */
package main;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Enumeration;
import weka.classifiers.Classifier;
import weka.classifiers.Sourcable;
import weka.core.Capabilities.Capability;
import weka.core.*;

public class MyId3 
  extends Classifier {

  //Untuk serialization
  static final long serialVersionUID = -2693678647096322562L;
  
  //Array of node anak
  private MyId3[] successors; //!!! tiap node kelasnya MyId3, jadi suksesor alias anak2nya MyId3 juga

  //Attribute pada node sekarang
  private Attribute attribute; 

  //Nilai klasifikasi data, ke node leaf
  private double classValue; //!!! value dari kelas kalo udah sampe daun, ini dia yang jadi hasil penelusuran tree

  //Probabilitas untuk masing-masing kelas
  //Digunakan untuk klasifikasi
  private double[] distribution; //!!! katanya sih probabilitas dari masing2 kelas untuk suatu instance. buat apa yak (?)

  //Atribut dari kelas yang diklasifikasikan
  //Digunakan jika sudah terminasi
  private Attribute m_ClassAttribute; //!!! atribut dari kelas (?)

  
  /**
   * hanya bisa menangani kasus atribut nominal, kelas nominal dan tidak ada missing value
   * @return 
   */
  public Capabilities getCapabilities() {
    Capabilities result = super.getCapabilities();
    result.disableAll();

    result.enable(Capability.NOMINAL_ATTRIBUTES); 
    result.enable(Capability.NOMINAL_CLASS);
    result.enable(Capability.MISSING_CLASS_VALUES); 

    // instances
    result.setMinimumNumberInstances(0);
    return result;
  }

  /**
   * Builds MyId3 decision tree classifier.
   *
   * @param data data training
   * @exception Exception if classifier can't be built successfully
   */
  public void buildClassifier(Instances data) throws Exception { //!!! ini tahap preprosesor, sebelum bisa ngeklasifikasiin, perlu di build dulu

    //Mengetes apakah id3 capabilitynya sesuai dengan data
    getCapabilities().testWithFail(data);
    
    //Menghilangkan instances dengan missing value
    data = new Instances(data); 
    data.deleteWithMissingClass();
    
    //Mulai learning
    DecisionTreeLearning(data);
  }

  /**
   * Mengetes apakah semua instances mempunyai kelas yang sama
   * True jika benar, false jika tidak
   * @param instances
   * @return 
   */
  public boolean IsInstancesHaveSameClassification(Instances instances){
    int i;
    String initialclass = instances.instance(0).stringValue(instances.numAttributes()-1);
    for(i=1;i<instances.numInstances();++i){
        if (initialclass!=instances.instance(i).stringValue(instances.numAttributes()-1)){
            return false;
        }
    }
    return true;
  }
  
  /**
   * Method for building an MyId3 tree.
   *
   * @param data the training data
   * @exception Exception if decision tree can't be built successfully
   */
  private void DecisionTreeLearning(Instances data) throws Exception { //!!! prosesnya kita implementasi masing2 :3
    //Kasus 1, examples tidak ada 0, tapi masih ada attribute
    //Return value ke orang tua
    if (data.numInstances() == 0) {
      attribute = null;
      classValue = Instance.missingValue();
      distribution = new double[data.numClasses()];
      return;
    }else if (IsInstancesHaveSameClassification(data)){
//        System.out.println("SAME ALL");
        attribute = null;
        distribution = new double[data.numClasses()];
        Enumeration instEnum = data.enumerateInstances();
        while (instEnum.hasMoreElements()) {
            Instance inst = (Instance) instEnum.nextElement();
            distribution[(int) inst.classValue()]++;
        }
        Utils.normalize(distribution);
        classValue = Utils.maxIndex(distribution);
        m_ClassAttribute = data.classAttribute();
        return;
        
    }
    
    // Compute attribute with maximum information gain.
    double[] infoGains = new double[data.numAttributes()];
    double infoGain = entropy(data);
    Enumeration attEnum = data.enumerateAttributes();
    while (attEnum.hasMoreElements()) {
      Attribute att = (Attribute) attEnum.nextElement();
      infoGains[att.index()] = infoGain + remainder(data, att);
//      System.out.println("Attributenya : " + att);
//      System.out.println("Gain : " + infoGains[att.index()]);
    }
//    System.out.println("Break");
    attribute = data.attribute(Utils.maxIndex(infoGains));
//    System.out.println("Maximum : " + attribute);
    
//    Kasus all false or true
//    if (Utils.eq(infoGains[attribute.index()], 1)) {
//        System.out.println("SATU");
//    }
    
    // Make leaf if information gain is zero. 
    // Otherwise create successors.
    //Kasus 3, dimana examples masih ada, tapi attribute tidak ada lagi
    //Solusi ambil kelas 
    if (Utils.eq(infoGains[attribute.index()], 0)) {
//      System.out.println("attribute henti : " + attribute);
      attribute = null;
      distribution = new double[data.numClasses()];
      Enumeration instEnum = data.enumerateInstances();
      while (instEnum.hasMoreElements()) {
        Instance inst = (Instance) instEnum.nextElement();
        distribution[(int) inst.classValue()]++;
      }
      Utils.normalize(distribution);
      classValue = Utils.maxIndex(distribution);
      m_ClassAttribute = data.classAttribute();
    }
    //Kasus 4, examples dan attribute masih ada, tapi tidak semuanya kelas klasifikasinya sama
    else {
      Instances[] splitData = splitData(data, attribute);
      successors = new MyId3[attribute.numValues()];
      for (int j = 0; j < attribute.numValues(); j++) {
//        System.out.println("Attribute chosen : " + attribute.value(j));
        successors[j] = new MyId3();
        successors[j].DecisionTreeLearning(splitData[j]);
      }
    }
  }

  /**
   * Mengklasifikasikan decision tree
   *
   * @param instance the instance to be classified
   * @return the classification
   * @throws NoSupportForMissingValuesException if instance has missing values
   */
  public double classifyInstance(Instance instance) //!!! disini instance diklasifikasikan pake tree yang udah di build
    throws NoSupportForMissingValuesException {

    if (instance.hasMissingValue()) {
      throw new NoSupportForMissingValuesException("MyId3 : no missing values, "
                                                   + "please.");
    }
    if (attribute == null) {
      return classValue;
    } else {
      return successors[(int) instance.value(attribute)].classifyInstance(instance);
    }
  }

  /**
   * Computes class distribution for instance using decision tree.
   *
   * @param instance the instance for which distribution is to be computed
   * @return the class distribution for the given instance
   * @throws NoSupportForMissingValuesException if instance has missing values
   */
  public double[] distributionForInstance(Instance instance) //!!! melihat probabilitas distribusi kelas untuk certain instance
    throws NoSupportForMissingValuesException {

    if (instance.hasMissingValue()) {
      throw new NoSupportForMissingValuesException("MyId3 : no missing values, "
                                                   + "please.");
    }
    if (attribute == null) {
      return distribution;
    } else { 
      return successors[(int) instance.value(attribute)].distributionForInstance(instance);
    }
  }

  /**
   * Prints the decision tree using the private toString method from below.
   *
   * @return a textual description of the classifier
   */
  public String toString() { //!!! ini tree yang dimunculin di output yang diGUI. kenapa ada toString(0)? Itu maksudnya rekursif dari node yang merupakan level ke 0. (jadi kalo mau print dari level ke 1, taro 1. Ini tergantung implementasi tree masing-masing

    if ((distribution == null) && (successors == null)) {
      return "Belum ada model MyId3.";
    }
    return "MyId3\n\n" + toString(0);
  }

  
  /**
   * Menghitung Remainder dari fungsi
   *
   * @param data the data for which info gain is to be computed
   * @param att the attribute
   * @return the information gain for the given attribute and data
   * @throws Exception if computation fails
   */
  private double remainder(Instances data, Attribute att)
    throws Exception {

    double remainder = 0.0;
    Instances[] splitData = splitData(data, att);
    for (int j = 0; j < att.numValues(); j++) {
      if (splitData[j].numInstances() > 0) {
        remainder -= ((double) splitData[j].numInstances() /
                     (double) data.numInstances()) *
          entropy(splitData[j]);
      }
    }
    return remainder;
  }

  /**
   * Computes the entropy of a dataset.
   * 
   * @param data the data for which entropy is to be computed
   * @return the entropy of the data's class distribution
   * @throws Exception if computation fails
   */
  private double entropy(Instances data) throws Exception {

    double [] classCounts = new double[data.numClasses()];
    
    
    Enumeration instEnum = data.enumerateInstances();

//    System.out.println("break");
    while (instEnum.hasMoreElements()) {
      Instance inst = (Instance) instEnum.nextElement();
      classCounts[(int) inst.classValue()]++;
    }
    double entropy = 0;
    for (int j = 0; j < data.numClasses(); j++) {
      if (classCounts[j] > 0) {
        entropy -= classCounts[j] * Utils.log2(classCounts[j]);
      }
    }
    entropy /= (double) data.numInstances();
    return entropy + Utils.log2(data.numInstances());
  }

  /**
   * Splits a dataset according to the values of a nominal attribute.
   *
   * @param data the data which is to be split
   * @param att the attribute to be used for splitting
   * @return the sets of instances produced by the split
   */
  private Instances[] splitData(Instances data, Attribute att) {

    Instances[] splitData = new Instances[att.numValues()];
    for (int j = 0; j < att.numValues(); j++) {
      splitData[j] = new Instances(data, data.numInstances());
    }
    Enumeration instEnum = data.enumerateInstances();
    while (instEnum.hasMoreElements()) {
      Instance inst = (Instance) instEnum.nextElement();
      splitData[(int) inst.value(att)].add(inst);
    }
    for (int i = 0; i < splitData.length; i++) {
      splitData[i].compactify();
    }
    return splitData;
  }

  /**
   * Mengeluarkan string pohon mulai dari yang level "level"
   *
   * @param level adalah level dari pohon yang ingin diprint
   * @return the tree as string at the given level
   */
  private String toString(int level) {

    StringBuffer text = new StringBuffer();
    
    if (attribute == null) {
      if (Instance.isMissingValue(classValue)) {
        text.append(": null");
      } else {
        text.append(": " + m_ClassAttribute.value((int) classValue));
      } 
    } else {
      for (int j = 0; j < attribute.numValues(); j++) {
        text.append("\n");
        for (int i = 0; i < level; i++) {
          text.append("|  ");
        }
        text.append(attribute.name() + " = " + attribute.value(j));
        text.append(successors[j].toString(level + 1));
      }
    }
    return text.toString();
  }
  

  /**
   * Testing MyId3
   *
   * @param args
   */
  public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
//      Instances instances = new Instances(new BufferedReader(new FileReader("src/java/data_set/kuisib.arff")));
      Instances instances = new Instances(new BufferedReader(new FileReader("src/java/data_set/segment_discretize_code.arff")));
      MyId3 myid3 = new MyId3();
      instances.setClassIndex(instances.numAttributes()-1);
      myid3.buildClassifier(instances);
      System.out.println("Final result : " + myid3.toString());
//      double[] val = new double[0];
//      val[0]=0.0;
//      val[1]=5.0;
//      val[2]=2.0;
//      int idxmax = Utils.maxIndex(val);
//      System.out.println("Nilai max : " + idxmax);
//      System.out.println("Nilai val[0] : " + val[0]);
//      if (Utils.eq(val[0], 0)){
//          System.out.println("EAA");
//      }
  }
}
