package com.gedi.data.OutPutFormate;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.Iterator;

/**
 * @Author:RenPu
 * @Date: 2020/1/2 18:29
 * @Version: 1.0
 * @description:
 */


/**
 * preduce阶段： 统计和计算合计
 *
 * keyIn:类型等价于mapper的keyOut
 * valueIn:类型等价于mapper的valueOut
 * keyOut: 单词 Text
 * valueOut: 总次数：IntWritable
 */
public class MyPreduce extends Reducer<Text, IntWritable,WordWritable, NullWritable> {


    /**
     *
     * @param key 单词
     * @param values key相同的初始值的集合
     * @param context 上下文对象
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {

        int count=0;
        Iterator<IntWritable> iterator = values.iterator();
        while (iterator.hasNext()){
            int num = iterator.next().get();//1
            count +=num ;
        }
        //计算完成后输出结果
        context.write(new WordWritable(key.toString(),count),null);

    }
}
