package com.mapreduce.wordcount;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
//Reduce阶段

//泛型 k3  v3   k4  v4
public class WordCountReduce  extends Reducer<Text, IntWritable,Text,IntWritable> {


    //reduce方法执行次数由多少个key来决定
    @Override
    protected void reduce(Text k3, Iterable<IntWritable> v3, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {

        //1. k4 = k3
        Text k4 = k3;
        //2. v4 是 v3元素的和：遍历v3求和
        //定义出现次数的总数
        int total = 0;
        for (IntWritable value: v3) {  // k3  Hello  v3:  <1,1>
            //把值转存int
            int num = value.get();
            //次数累加
            total+=num;
        }
        //3.输出k4, v4：context.write()方法
        IntWritable v4 = new IntWritable();
        v4.set(total);
        context.write(k4,v4);

    }
}
