package com.mapreduce.wordcount;


import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

//reduce阶段
//泛型 k3  v3  k4 v4
public class WordCountReduce extends Reducer<Text, IntWritable,Text,IntWritable> {


    //reduce方法执行次数是由key的总数决定（即有多少个key）
    @Override
    protected void reduce(Text k3, Iterable<IntWritable> v3, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {

        //1. k4 = k3
        Text k4= k3;
        //2. v4 为v3集合的元素之和：遍历v3求和
        //定义一个次数的总和的变量
        int total=0;
        //遍历集合
        for (IntWritable value : v3) {  // k3 Hello  v3 <1 ,1>
            //获取到value的值
            int num = value.get();
            //累加求和
            total+=num;
        }
        //3.输出k4, v4：context.write()方法
        IntWritable v4 = new IntWritable();
        v4.set(total);
        context.write(k4,v4);

    }
}
