package mrdemo004;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class AvgReduce extends Reducer<Text, IntWritable, Text, IntWritable>{
	
	//序号计数器（所有的数据都汇聚到这个reduce处理中）
	private IntWritable num = new IntWritable(1);
	
	 
	 @Override
	protected void reduce(Text k2, Iterable<IntWritable> v2s,
			Reducer<Text, IntWritable, Text, IntWritable>.Context context)
			throws IOException, InterruptedException {
		 //输出： 数据  排序值
		 //人数计数器
		 int count = 0;
		 //分数统计
		 int sum = 0;
		 for(IntWritable v : v2s) {
		 	count++;
		 	sum += v.get();
		 }
		 //输出：	姓名	平均分
		 context.write(k2, new IntWritable(sum / count));
	 }
	 
}
