package cn.pengpeng.day05.mywc;

import java.io.IOException;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Reducer;

public class WcCombiner extends Reducer<Bean, NullWritable, Bean, NullWritable>{
	
	public WcCombiner() {
		
		System.out.println("WcCombiner类被实例化了...................");
	}
	
	
	@Override
	protected void reduce(Bean bean, Iterable<NullWritable> nulls,
			Reducer<Bean, NullWritable, Bean, NullWritable>.Context context) throws IOException, InterruptedException {
		System.out.println("WcCombiner类的reduce()方法执行了.................");
		int count = 0;
		for (NullWritable nullWritable : nulls) {
			count += bean.getCount();
		}
		Bean b = new Bean();
		b.set(bean.getWord(), count);
		context.write(b, NullWritable.get());
		
	}
	

}
