package a3;

import java.text.ParseException;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.lang.Math;

import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;


public class ID3 {
	/**
	 * Sums the elements of an array.
	 * @param a the array containing the values.
	 * @return the sum.
	 */
	public static int sumArray(int[] a){
		int sum = 0;
		for(int k = 0; k < a.length; k++){
			sum += a[k];
		}
		return sum;
	}
	
	/**
	 * Calculates which class has the highest occurrence within the data set.
	 * @param data the current/remaining attributes and examples.
	 * @return the value/name of the most prevalent class.
	 */
	public static String pluralityValue(Instances data){
		int[] classCounts = data.attributeStats(data.classIndex()).nominalCounts;
		int max = 0;
		int indexOfMax = 0;
		for(int i = 0; i < classCounts.length; i++){
			if(classCounts[i] > max){
				max = classCounts[i];
				indexOfMax = i;
			}
		}
		return data.classAttribute().value(indexOfMax);
	}
	
	/**
	 * Calculates the entropy for a problem of an arbitrary number of classes.
	 * @param classCounts an array containing the number of examples resulting in each respective class.
	 * @return the entropy value.
	 */
	public static double entropy(int[] classCounts){
		double ent = 0.0;
		int nbrOfExamples = sumArray(classCounts);
		for(int i = 0; i < classCounts.length; i++){
			double c = (double)classCounts[i]/(double)(nbrOfExamples);
			if(!(c == 0.0 || c == 1.0)){
				ent -= c*Math.log(c)/Math.log(2);
			}
		}
		return ent;
	}
	
	/**
	 * Determines which of the attributes left in data yields the highest gain.
	 * @param data the current/remaining attributes and examples.
	 * @return index of the resulting attribute in data.
	 */
	public static int importance(Instances data){
		int indexOfAttribute = 0;
		double maxGain = 0.0;
		int[] classCountsBefore = data.attributeStats(data.classIndex()).nominalCounts;
		double entropyBefore = entropy(classCountsBefore);
		for(int i = 0; i < data.numAttributes()-1; i++){
			Attribute a = data.attribute(i);
			Instances tempData = new Instances(data);
			double remainder = 0.0;
			for(int j = 0; j < a.numValues(); j++){
				tempData.delete();
				for(int k = 0; k < data.numInstances(); k++){
					if(data.instance(k).stringValue(a) == a.value(j)){
						tempData.add(data.instance(k));
					}
				}
				int[] classCountsAfter = tempData.attributeStats(tempData.classIndex()).nominalCounts;
				int exBefore = sumArray(classCountsBefore);
				int exAfter = sumArray(classCountsAfter);
				double weight = (double)exAfter/(double)exBefore;
				remainder += weight*entropy(classCountsAfter);
			}
			double thisGain = entropyBefore - remainder;
			if(thisGain > maxGain){
				maxGain = thisGain;
				indexOfAttribute = i;
			}
		}
		return indexOfAttribute;
	}
	
	/**
	 * Induces a decision tree according to the ID3 algorithm.
	 * @param data the current/remaining attributes and examples.
	 * @param pData the attributes and examples of the parent node.
	 * @return a decision tree in simple string format.
	 */
	public static String decisionTreeLearning(Instances data, Instances pData){
		if(data.numInstances() == 0){
			return ": " + pluralityValue(pData);
		}else if(data.attributeStats(data.classIndex()).distinctCount == 1){
			return ": " + data.firstInstance().stringValue(data.classIndex());
		}else if(data.numAttributes()-1 == 0){
			return ": " + pluralityValue(data);
		}else{
			int attrIndex = importance(data);
			Attribute a = data.attribute(attrIndex);
			String tree = "";
			Instances newData = new Instances(data);
			newData.deleteAttributeAt(attrIndex);
			for(int i = 0; i < a.numValues(); i++){
				tree += "\n" + a.name() + " = " + a.value(i);
				newData.delete();
				for(int j = 0; j < data.numInstances(); j++){
					if(data.instance(j).stringValue(a) == a.value(i)){
						Instance ex = new Instance(data.instance(j));
						ex.deleteAttributeAt(attrIndex);
						newData.add(ex);
					}
				}
				String subTree = decisionTreeLearning(newData, data);
				subTree = subTree.replaceAll("\n", "\n  ");
				tree += " " + subTree;
			}
			return tree;
		}
	}

	public static void main(String[] args) throws ParseException, IOException {
		BufferedReader reader = new BufferedReader(new FileReader("C:/Program Files/Weka-3-6/data/weather.nominal.arff"));
		Instances data = new Instances(reader);
		reader.close();
		data.setClassIndex(data.numAttributes() - 1);
		Instances parentData = new Instances(data);
		parentData.delete();
		System.out.println(decisionTreeLearning(data,parentData));
	}

}
