package com.mr.wordcount;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class WCReducer extends Reducer<Text, LongWritable, Text, LongWritable>{

	// reduce一次处理一组数据，key相同的视为一组
	@Override
	protected void reduce(Text key, Iterable<LongWritable> values,
						  Reducer<Text, LongWritable, Text, LongWritable>.Context context) throws IOException, InterruptedException {

		int sum = 0;

		for(LongWritable value : values) {
			sum += value.get();
		}

		// 将累加后的值写出
		context.write(key, new LongWritable(sum));

	}

}
