import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;


public class DecisionTree {
	final ArrayList<Data> trainingExamples;
	ArrayList<Integer> setOfAttribute;	
	InformationGain information; 
	Node root = null;
	
	public DecisionTree(String filename) {	
		information = new InformationGain();
		trainingExamples = new SelectSplitPoint(filename).getPerfectExamples();
		setOfAttribute = new InitialAttribute().getAttributes();		
		
	}

	/**
	 * Build the decision tree, here we choose to use ID3 algorithm which is totally different from C4.5
	 * @return the root
	 */
	public Node buildDecisionTree(){
		return buildTreeRec(trainingExamples,setOfAttribute,root);
	}
	
	/**
	 * The recursion method for building decision tree
	 * @param setOfExamples
	 * @param setOfAttribute
	 * @param parentNode
	 * @return
	 */
	private Node buildTreeRec(ArrayList<Data> setOfExamples,ArrayList<Integer> setOfAttribute, Node parentNode){
		
		if(parentNode == null){
			/*
			 * To create the root
			 */
			root = selectNode(setOfExamples,setOfAttribute);			
			parentNode = root;
		}
		
		/*
		 *  To get the list of values this attribute has
		 */
		ArrayList<String> values = parentNode.getValues();

		
		  /*
		   *  remove the attribute which has already been selected(This is from ID3 algorithm)
		   */
		  if(setOfAttribute.contains(parentNode.getAttribute())){
			 setOfAttribute.remove(parentNode.getAttribute());
			 
		  }
		  
		/*
		 * For each value of particular node, we will go though this loop to create the child node
		 */
		for(String value:values){				
			// TO get the subset of training examples in order to calculate the information gain of potential child nodes
			Integer ParentAttribute = parentNode.getAttribute();
			  ArrayList<Data> subSetOfExamples = information.getSubSetOfExamples(setOfExamples, ParentAttribute,value);	
			  
			  
			// If the number of examples is 0, then choose the most popular classification
			if(subSetOfExamples.size() ==0 || subSetOfExamples == null){				
				 String popularClass = information.getPopularClass(setOfExamples ,parentNode.getAttribute(),value);
				 ////////////////////////////////////////////////
				 //System.out.println("values: "+value+"Class non sub examples: "+popularClass);
				 
				 parentNode.setLeafNode(popularClass,value);
				 continue;
			  }
			  
			// If the examples classified by the node belong to same class, then add the leaf node
			if(information.entropy(subSetOfExamples)==0){  
				 String popularClass = information.getPopularClass(subSetOfExamples,parentNode.getAttribute(),value);
	             ////////////////////////////////////////////////
				 System.out.println("values: "+value+"Class entropy0: "+popularClass);
				 
				 parentNode.setLeafNode(popularClass,value);
				 continue;
			  }		
			
			// If the number of attribute is 0, then choose the most popular classification  
			if(setOfAttribute.size() == 0 || setOfAttribute == null){				  
				 String popularClass = information.getPopularClass(subSetOfExamples,parentNode.getAttribute(),value);
	             ////////////////////////////////////////////////
				 System.out.println("values: "+value+"Class attributes used up: "+popularClass);
				 
				 parentNode.setLeafNode(popularClass,value);
				 continue;
			  }
		  
			  
			Node selectedNode = selectNode(subSetOfExamples,setOfAttribute);	  
			Node childNode = parentNode.setChildNode(value, selectedNode.getAttribute());
		  
		    buildTreeRec(subSetOfExamples,setOfAttribute,childNode);
 
		}
		return root;
		
	}
	
	
	/**
	 * To calculate the information gain and decide which attribute should be selected
	 * 
	 * @param setOfExamples
	 * @param setOfAttribute
	 * @return
	 */
	public Node selectNode(ArrayList<Data> setOfExamples,ArrayList<Integer> setOfAttribute) {
		HashMap<Double, Integer> mapOfGainAttribute = new HashMap<Double, Integer>();
		ArrayList<Double> setOfGain = new ArrayList<Double>(5000);

		if(setOfExamples==null||setOfExamples.size()==0||setOfAttribute==null||setOfAttribute.size()==0){
			return null;
		}
		
		for (int attribute : setOfAttribute) {
			/*
			 * Here to calculate the information gain for specific attribute
			 */
			double gain = new InformationGain().getGain(setOfExamples,attribute);
			setOfGain.add(gain);
			mapOfGainAttribute.put(gain, attribute);
		}
		Collections.sort(setOfGain);
		/*
		 * To get the attribute which has the maximum information gain	
		 */
		int selectedAttribute = mapOfGainAttribute.get(setOfGain.get(setOfGain.size() - 1));
		return new Node(selectedAttribute);
	}
	
	
	/**
	 * Traverse the tree until it reaches the leaf node, in order to know specific data belongs to which classification
	 * @param node
	 */
	public String search(Data data,Node node){
		if(node.getAttribute()==InitialAttribute.Class){
			return node.getLeafNode();
		}
		int nodeAttribute = node.getAttribute();
		String dataValue = data.getAttribute(nodeAttribute);
		return search(data,node.getChildNode(dataValue));
	}
}
