package UFFT_package.Node1;

import java.util.ArrayList;
import UFFT_package.Decision1.Decision;
import java.util.Iterator;
import java.lang.Math;

import javax.swing.tree.DefaultMutableTreeNode;

import UFFT_package.Example1.Example;



public class Node 
{
	int num_attrs;
	int num_examples;
	int previousIndex,currentIndex;
	Integer[] attrs;
	public int attribute;
	int major_class;
	int num_children;
	ArrayList example_list;
	Node[] children;
	public double test_val;
	int flag=0;
	public Node()
	{
		num_children=0;
		num_examples=0;
		num_attrs=0;
	    example_list=new ArrayList();	
	    attrs=new Integer[70];
	    currentIndex=Decision.n_min;
	    test_val=0.0;
	    
	}
	
	public void build_tree(int defaultClass, int numClasses) 
	{	
	   int max_class_count,first_class,different_classes=0;	
	   Integer class_count[]=new Integer[numClasses];
	   
	   //System.out.println(num_examples);
	   if(num_examples==0)
	   {
		   major_class=defaultClass;
		  // System.out.println("*****");
		   Decision.store_pos_value_right(); 
		   
		   return;
		   
	   }
	   
	   for(int x=0;x<numClasses;x++)
		   class_count[x]=0;
		
	   first_class=((Example)example_list.get(0)).my_class;
	   different_classes=0;
	   
	   Iterator it=example_list.iterator(); // n_min examples in root
	   while(it.hasNext())
	   {
		   Example temp=(Example)it.next();
           class_count[temp.my_class]++;
           if(temp.my_class!=first_class)
        	   different_classes=1;   
	   }
	   
	   if(different_classes==0)//we found that all examples having same class label, so major_class will b that only
	   {
		    major_class=first_class;
		   // System.out.println("*****");
		    Decision.store_pos_value_right();  
		    
		    return;
	   }
	   
	   major_class=0; //els need to find out ,class label having majority count
	   max_class_count=class_count[0];
	   
	   for(int x=0;x<numClasses;x++)
   		{
   		     if(class_count[x]>max_class_count)
   		    {
   		      	major_class=x;
   			    max_class_count=class_count[x];
   		    }
   		}
	   
	   if(num_attrs==0)//if no. attributes r zero, cannot create decision tree further
	   {
		   Decision.store_pos_value_right();     
		   //System.out.println("*****");
		   //while(Decision.current_index!=0)
		  
		   return;
	   }
	   
	   attribute=splitter(numClasses);//if no, then call splitter which will calculate info.gain n ll decide root
	   //here "attribute" contains index of attribute tht had maximum info gain; returnd by Splitter
	   
	   
	   if(attribute!=-1)
	   {
		   System.out.println("Dimension Chosen ::"+Decision.dimensions[attribute].getAttribute()+"\n\n");
		   partition();//to maintain staistics n n_min no of egs. at ech child node!egs are xtraxted from main l1 arraylist 
		   for(int x=0;x<num_children;x++)
		   {
			   //children[x].previousIndex=currentIndex;
			   children[x].build_tree(major_class,numClasses);
			  
			   
		   }
	   }
	}

	private void partition() //distribute eg. over each children of root node,n children for ech child recursivly
	{
		int x,y;
		//System.out.println("inside partition");
		
		//children=new Node[Decision.dimensions[attribute].num_values];//for UFFT its size wll b 2
		
		children=new Node[2];
		
		for(int val=0;val<2;val++)
		{
			
			children[val]=new Node();
			num_children++;//no. of children will b 2 for UFFT
		}
		Example temp;
		Iterator it1=example_list.iterator();
		
		while(it1.hasNext())
		{
			
			temp=(Example)it1.next();
			//System.out.println(temp.lookup_value(attribute,test_val));
			children[temp.lookup_value(attribute,test_val)].add_example(temp);	//hv changed lookup_value function
		}
		//System.out.println("*****"+children[0].num_examples);

		//adding more examples so that N_min examples are there in each node
		//for(int i=0;i<num_children;i++)
			//children[i].add_MoreExamples(i);
		//System.out.println("****"+children[0].num_examples);
		
		for(x=0;x<num_attrs;x++)
		{
			if(attrs[x]!=attribute)
			{
				for(y=0;y<num_children;y++)
				{
					children[y].add_attribute(attrs[x]);
				}
			}
		}
		
	    //System.out.println("end of partition");
	}
	private void printAll() {
		System.out.println("Attrubite"+this.attribute+"Num of ATTRIBUTES: " + this.num_attrs+"   Num of Ex:"+this.num_examples);
		print_ex();
		
	}

	private void print_ex() 
	{
		Iterator ei=example_list.iterator();
		while(ei.hasNext())
		{	
			Example e=(Example)ei.next();
			for(int x=0;x<4;x++)
			{
	             System.out.print(e.values[x]+" ");    
			}
			System.out.print(e.my_class+"\n");
		}	
		
	}

	private int splitter(int numClasses) // decides root node havin max info gain
	{
		
	//	System.out.println("\n\nNode....Splitter.....");
		flag=0;
		double max_value=evaluate(attrs[0],numClasses);
		
		double a,b,c,max_value2=0;
		int max_dimension2=attrs[0];
		int max_dimension=attrs[0];
		double[] d=new double[Decision.num_dimensions];
		double value;
		d[attrs[0]]=test_val;
		System.out.println("Dimension :" + Decision.dimensions[attrs[0]].getAttribute() + "\t d("+Decision.dimensions[attrs[0]].getAttribute()+"):" + d[attrs[0]]+"\t infogain : "+ max_value+"\n");
		double epsilon=0,delta=0.001;
		double tau=0.5;
	//finding first attribut having highest info. gain 
		
		for(int x=1;x<num_attrs;x++)
		{
			value=evaluate(attrs[x],numClasses);
			d[attrs[x]]=test_val;
			System.out.println("Dimension :" + Decision.dimensions[attrs[x]].getAttribute() + "\t d("+Decision.dimensions[attrs[x]].getAttribute()+"):" + d[attrs[x]]+"\t infogain : "+ value+"\n");
			
			if(value>max_value)
			{
				
				max_value=value;
				max_dimension=attrs[x];
			}
		}
		//h_attr=max_value;
//finding attribut having second highest info gain		
		flag=1;
		for(int x=0;x<num_attrs;x++)
		{
			value=evaluate(attrs[x],numClasses);
			if(value>max_value2 && value<max_value)
			{
				max_value2=value;
				max_dimension2=attrs[x];
			}
			
		}
		a=Math.log10(numClasses);
        a=a*a;
		b=Math.log(1/delta);
        c=(a*b)/(2*num_examples);
        
		epsilon=Math.sqrt(c);

		System.out.println("Epsilon :: "+ epsilon);
		System.out.println("Max Dimension :"+Decision.dimensions[max_dimension].getAttribute()+"\t\tMax Value of info.gain :"+ max_value);
		System.out.println("Second Max Dimension :" + Decision.dimensions[max_dimension2].getAttribute()+"\t   Second Max Value of info gain :" + max_value2);
		double diff=max_value-max_value2;
		System.out.println("Difference of Info gain of Max two Dimensions :"+diff);
     if( (diff>epsilon) ||( (diff<=epsilon)&&(epsilon<tau)) ) 
     {
    	// Decision ds=new Decision();
  	   //ds.store_pos_value(attribute,test_val);
    	 test_val=d[max_dimension];
    	 Decision.store_pos_value_left(max_dimension,test_val); //storing ech d value to an array in pckg decision 
    	 return max_dimension;// if houeffding bound condtn satisfies it returns first highest attribt
    	 
     }
     else
			return -1;
		
	}
	
//this whol functn wll gt chng for UFFT
	private double evaluate(int dim, int numClasses) //evaluates hoeffding bound n info gain etc.
	{
        double class_totals[]=new double[numClasses]; 
        //Double dim_totals[]=new Double[Decision.dimensions[dim].num_values];
		double at=0.0,entropy=0.0;
        int x,k;
        String str;
        double[] class_add=new double[numClasses]; 
        double[] mean_class=new double[numClasses]; 
        double[] cont_table_less=new double[numClasses];
        double[] cont_table_great=new double[numClasses];
        double[] prob=new double[numClasses];
        double info=0.0,less=0.0,great=0.0,h;
        //double d=0.0;
        for(k=0;k<numClasses;k++)
        {
        	 class_add[k]=0.0;
        	 class_totals[k]=0.0;
        	
        }
         //------calculating sum for ech attribute::1)total of values having class UP 2)total for class DOWN
         Iterator it=example_list.iterator();
         
         while(it.hasNext())
         {
        	 Example temp=(Example)it.next();
        	 //for(x=0;x<Decision.num_dimensions;x++)
        	 {
        		 at=Double.parseDouble(temp.values[dim]);
        		//System.out.println("\n***************"+at+"***************");
        		class_add[temp.my_class] += at;
        	 }
         
         class_totals[temp.my_class]++;//got count...how many eg. having "up" n how mny havin "down"
         
         
         }
         
            
        //----------calculating mean for ech class label 
         
          for(k=0;k<numClasses;k++)
        	 {
        		 mean_class[k]=class_add[k]/class_totals[k];
        	 }
        	
         
         
         double sum=0.0;
        
         
         //---------calculating d(i) for ech attribute
             
       	 for(k=0;k<numClasses;k++)
        	 {
        		 sum +=mean_class[k];
        	 }
        		 
        	test_val=sum/numClasses;
         
         //-----generate CONTINGENCY TABLE 
         
     	
    	for(k=0;k<numClasses;k++)
    		{
    			cont_table_less[k]=0;
    			cont_table_great[k]=0;
    		}
    	
     	it=example_list.iterator();
     	while(it.hasNext())
     	{
     	Example temp=(Example)it.next();
       	       
        
       	 		at=Double.parseDouble(temp.values[dim]);
       			if(at<=test_val)
       			{
       				cont_table_less[temp.my_class]++;
       			}
       			else
       			{
       				cont_table_great[temp.my_class]++;
       			}
       		
        
     	}
     	if(flag==0)
     	{
     	System.out.println("\t<="+test_val+"\t>"+test_val);	
       	System.out.println("UP\t"+cont_table_less[0]+"\t\t"+cont_table_great[0]);
       	System.out.println("down\t"+cont_table_less[1]+"\t\t"+cont_table_great[1]);
     	}
     	//----finding info gain for all atributes ;using contingency table
     	
     	
     		for(k=0;k<numClasses;k++)
     		{
     			prob[k]=cont_table_less[k]+cont_table_great[k];
     		}
     	
        	for(k=0;k<numClasses;k++)
     		{
     			info+=(prob[k]/num_examples)*((Math.log(num_examples)-Math.log(prob[k]))/Math.log(2));
     			if(cont_table_less[k]==0)
     				less+=0.0;
     			else
     				less+=(cont_table_less[k]/num_examples)*(Math.log(num_examples)/Math.log(2)-Math.log(cont_table_less[k])/Math.log(2));
     			if(cont_table_great[k]==0)
     				great+=0.0;
     			else
     				great+=(cont_table_great[k]/num_examples)*(Math.log(num_examples)/Math.log(2)-Math.log(cont_table_great[k])/Math.log(2));
     		} 	
     		
     	//System.out.println("*****"+great);
     	
     	
     		h=info+less+great;
     	//h=info;
     	//System.out.println("*****"+h);
     	
     	return h;
	}
     	
     	
    public void add_attribute(int x) 
	{	
		attrs[num_attrs++]=(Integer)x;
	}

	public void add_example(Example example)
	{
		
		example_list.add(example);
		num_examples++;
	}

	public void print(DefaultMutableTreeNode current_parent, int i)
	{
		int x,y;
		
		if(num_children==0)
		{
			DefaultMutableTreeNode classnode=new DefaultMutableTreeNode(Decision.classes[major_class]);
			current_parent.add(classnode);
		}
		else
		{
			DefaultMutableTreeNode child1 = current_parent;
			if(i!=0)
			{
				child1=new DefaultMutableTreeNode(Decision.dimensions[attribute].getAttribute());
				current_parent.add(child1);
			}	
			for(x=0;x<num_children;x++)
			{
				//DefaultMutableTreeNode child=new DefaultMutableTreeNode(Decision.dimensions[attribute].pos_values[x]); //******** ** ****
				DefaultMutableTreeNode child=new DefaultMutableTreeNode(Decision.all_d[Decision.count]);
				/*
				System.out.println("DTNodeAddition: Adding :" 
				+ Decision.all_d[Decision.count++] +"(" +  Decision.dimensions[attribute].attribute + ")" 
				+ " as child of " + current_parent.getUserObject() );
				 */
				Decision.count++;
				child1.add(child);		
				
				children[x].print(child,1);
			}
	    }
	}
	
}
