package cn.xiao.mr;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * Mapper的 【WCMapper】
 *
 * context.write(key, value)
 *
 *
 *    <KEYIN,                   VALUEIN,                    KEYOUT,         VALUEOUT>
 * reduce输入key类型       reduce输入value类型         运行后的结果key类型     运行后的结果value类型
 *
 */
public class WCReducer extends Reducer<Text, LongWritable, Text, LongWritable>  {


    @Override
    protected void reduce(Text key, Iterable<LongWritable> values, Context context)
            throws IOException, InterruptedException {
//        super.reduce(key, values, context);

//
//        // 计数器
//        long counter = 0;
//
//        for (LongWritable l : values) {
//            counter += l.get();
//        }
//
//        // 输出
//        context.write(key, new LongWritable(counter));

        //define a counter
        long counter = 0;
        //loop
        for(LongWritable l : values){
            counter += l.get();
        }
        //write
        context.write(key, new LongWritable(counter));
    }
}


