package design;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.Job;

public class C45  {


    public static Split currentsplit=new Split();//本次迭代的分割属性
    
    public static List <Split> splitted=new ArrayList<Split>();;//记录已被作为分组变量的所有属性值
    
    public static int current_index=0;

    public static double entropy=0;
public static void main(String[] args) throws Exception {
	
	  splitted.add(currentsplit);
	  int status1=0;
	  int status2=0;
	  int split_index=0;  
	  String classLabel=null;
	  int total_attributes=TokenizerMapper1.no_Attr;//attribute number
	  total_attributes=4;
	  int split_size=splitted.size();
	  InfoGainRatio gainObj=null;
	  Split newnode=null;
	  int temp_size; 
	  while(split_size>current_index)
        {
    	  currentsplit=(Split) splitted.get(current_index); 
    	  gainObj=new  InfoGainRatio();
	     
    	  status1=runjob1();//提交job1
	      System.out.println("Current node  index :"+current_index);
	      gainObj.getcount();//get record number matrix (记录数矩阵）
	      entropy=gainObj.currNodeEntropy();//get the current node's entropy
	      classLabel=gainObj.majorityLabel();//get the current node's label
	      currentsplit.classLabel=classLabel;
	    
    	if(entropy!=0.0 && currentsplit.attr_index.size()!=total_attributes)
    		        //熵不为0，而且属性未被划分完，当前为非叶节点
	    {
	    	System.out.println("");
	    	System.out.println("Entropy is NOT zero : "+entropy);
          
	    	status2=runjob2();//提交job2
	    	
	   	  split_index=getMaxInfoRatio();//找出当前节点下信息增益比最大的属性
	      String attr_values_split=gainObj.getvalues(split_index);
	      StringTokenizer attrs = new StringTokenizer(attr_values_split);
	      int number_splits=attrs.countTokens(); //number of splits possible with  attribute selected
	      String red="";
	     
	      System.out.println(" Spliting attribute index:  "+split_index);
	      System.out.println(" Spliting attribute values  "+attr_values_split);
	    
	      for(int splitnumber=1;splitnumber<=number_splits;splitnumber++)
	      {
	    	temp_size=currentsplit.attr_index.size();
	    	newnode=new Split(); 
	    	for(int y=0;y<temp_size;y++)   // CLONING OBJECT to  CURRENT NODE
	    	{
	    	
	    		newnode.attr_index.add(currentsplit.attr_index.get(y));
	    		newnode.attr_value.add(currentsplit.attr_value.get(y));
	    	}
	    	red=attrs.nextToken();
	    	newnode.attr_index.add(split_index);
	    	newnode.attr_value.add(red);
	    	splitted.add(newnode);//生成新的节点
	      }
	    }
	    else//当前为叶节点，生成规则
	    {
	    	System.out.println("");
	    	String rule="";
	    	temp_size=currentsplit.attr_index.size();
	    	for(int val=0;val<temp_size;val++)  
	    	{
	    	rule=rule+" "+currentsplit.attr_index.get(val)+" "+currentsplit.attr_value.get(val);
	    	}
	    	rule=rule+" "+currentsplit.classLabel;
	    	writeRuleToFile(rule);
	    	if(entropy!=0.0)
	    		System.out.println("Enter rule in file:: "+rule);
	    	else
	    	    System.out.println("Enter rule in file (Entropy zero )::   "+rule);
	    	
	    	
	    }
	    
	    split_size=splitted.size();
	    System.out.println("TOTAL NODES: "+split_size);
	    
	    current_index++;
	    
        }
	  
	  System.out.println("COMPLEEEEEEEETEEEEEEEEEE");
	    	System.exit(status1+status2);

  }
  public static void writeRuleToFile(String text) {
	    try {

	            
	    	BufferedWriter bw = new BufferedWriter(new FileWriter(new File("/home/ubuntu/C45_P/rule.txt/"), true));    
	    	bw.write(text);
	            bw.newLine();
	            bw.close();
	    } catch (Exception e) {
	    }
	}
  
  public static int getMaxInfoRatio(){//找最大增益比属性
	  FileInputStream fstream;
	  int max_attr=0;
	  double maxInfoRatio=0.0;
		try {
			
			fstream = new FileInputStream("/home/ubuntu/C45_P/JOB2/intermediate"+C45.current_index+".txt");
				
			  DataInputStream in = new DataInputStream(fstream);
			  BufferedReader br = new BufferedReader(new InputStreamReader(in));
			  String line;
			  StringTokenizer itr;
			  			
			  while((line=br.readLine())!=null){
				  itr= new StringTokenizer(line);
				  int no_attr=Integer.parseInt(itr.nextToken());
				  double infoRatio=Double.parseDouble(itr.nextToken());
				  System.out.println(infoRatio+" ");
				  if(infoRatio>maxInfoRatio)
				  {
					  max_attr=no_attr;
					  maxInfoRatio=infoRatio;
				  }
			  }
			  br.close();
			  }catch (Exception e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				   	  
				    	  //Close the input stream
				}
		
	  return max_attr;
  }
       //job1的参数设置
  public static int runjob1() throws Exception {
    Configuration conf = new Configuration ();
    Job job1 = new Job(conf, "C45_job1"+C45.current_index);
    job1.setJarByClass(C45.class);
    job1.setMapperClass(TokenizerMapper1.class);
    //job1.setCombinerClass(SumReducer1.class);
    job1.setReducerClass(SumReducer1.class);
    job1.setMapOutputKeyClass(Text.class);
    job1.setMapOutputValueClass(IntWritable.class);
    job1.setOutputKeyClass(Text.class);
    job1.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPaths(job1, "/home/ubuntu/C45_P/playtennis.txt");
    FileOutputFormat.setOutputPath(job1, new Path("/home/ubuntu/C45_P/JOB1/intermediate"+current_index));
    while(!job1.waitForCompletion(false));
    return 0;
  }
      //job2的参数设置
  public static int runjob2() throws Exception {
	  Configuration conf = new Configuration ();
	    Job job2 = new Job(conf, "C45_job2"+C45.current_index);
	    job2.setJarByClass(C45.class);
	    job2.setMapperClass(Mapper2.class);
	    job2.setReducerClass(Reducer2.class);
	    job2.setMapOutputKeyClass(Text.class);
	    job2.setMapOutputValueClass(Text.class);
	    job2.setOutputKeyClass(Text.class);
	    job2.setOutputValueClass(DoubleWritable.class);
	    FileInputFormat.addInputPaths(job2, "/home/ubuntu/C45_P/JOB1/intermediate"+C45.current_index+".txt");
	    FileOutputFormat.setOutputPath(job2, new Path("/home/ubuntu/C45_P/JOB2/intermediate"+current_index));
	    while(!job2.waitForCompletion(false));
	    return 0;
  }
  
}