package com.shujia.wyh.moniqiefen2;

import java.io.*;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

public class ReduceThread {
    public static void main(String[] args) throws Exception {
        //将map任务结果封装成File对象
        File file = new File("hadoop/src/main/java/com/shujia/wyh/moniqiefen2/part_ms");

        BufferedReader br = null;


        //创建集合
        HashMap<String, Integer> map = new HashMap<>();

        File[] files = file.listFiles();
        if(files!=null){
            for (File file1 : files) {
                //创建字符缓冲输入流对象
                br = new BufferedReader(new FileReader(file1.getAbsolutePath()));

                String line = null;
                while ((line = br.readLine()) != null) {
                    String[] s = line.split(" ");
                    String clazz = s[0];
                    int number = Integer.parseInt(s[1]);
                    if (map.containsKey(clazz)) {
                        map.put(clazz, map.get(clazz) + number);
                    }else {
                        map.put(clazz,number);
                    }
                }

            }
        }

        //将最终的结果写入到一个文件中
        BufferedWriter bw = new BufferedWriter(new FileWriter("hadoop/src/main/java/com/shujia/wyh/moniqiefen2/part_r/part-r-00000"));

        //遍历集合将最终结果写入到文件中
        Set<Map.Entry<String, Integer>> entries = map.entrySet();
        for (Map.Entry<String, Integer> entry : entries) {
            String clazz = entry.getKey();
            Integer number = entry.getValue();

            bw.write(clazz+" "+number);
            bw.newLine();
            bw.flush();
        }

        System.out.println("reduce任务执行完毕！！");


    }
}
