package design;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.StringTokenizer;
//import java.util.List;
//import java.util.StringTokenizer;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class Reducer2 extends Reducer
        <Text, Text, Text, DoubleWritable> {
    static String count[][] = new String[1000][4];
    int linenumber = 0;

    /*计算当前节点下每个属性的信息增益比*/
    public void reduce(Text key, Iterable<Text> values,
                       Context context) throws IOException, InterruptedException {
        String str = key.toString();
        int index = Integer.parseInt(str);
        for (Text value : values) {
            String line = value.toString();
            StringTokenizer itr = new StringTokenizer(line);
            count[linenumber][0] = str;//属性序号
            count[linenumber][1] = itr.nextToken();//属性取值
            count[linenumber][2] = itr.nextToken();//类标识号
            count[linenumber][3] = itr.nextToken();//记录个数
            linenumber++;
        }
        count[linenumber][0] = null;
        count[linenumber][1] = null;
        count[linenumber][2] = null;
        count[linenumber][3] = null;

        InfoGainRatio gainObj = new InfoGainRatio(linenumber, count);
        double gainratio = gainObj.gainRatio(C45.entropy);
        context.write(key, new DoubleWritable(gainratio));//属性序号作为key，信息增益比作为value
        writeToFile(key + " " + gainratio);    //中间结果写入文件，便于程序调试
    }

    public static void writeToFile(String text) {
        try {
            BufferedWriter bw = new BufferedWriter(new FileWriter(new File("/Users/kami/C4.5Parallelization/result/JOB2/intermediate" + C45.current_index + ".txt"), true));
            bw.write(text);
            bw.newLine();
            bw.close();//关闭流
        } catch (Exception e) {
        }
    }
}


