package mrdemo002;

import java.io.IOException;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class QuChongReduce extends Reducer<Text, Text, Text, NullWritable>{
	
	@Override
	protected void reduce(Text k2, Iterable<Text> v2s, Reducer<Text, Text, Text, NullWritable>.Context context)
			throws IOException, InterruptedException {
		 //相同的行汇聚在一起，只输出k2即可，不用处理v2s了
		
		// 输出的k3就是k2， v3不用输出什么东西，输出一个null
		context.write(k2, NullWritable.get());
	}
}
