package mrdemo005;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class MaxMinReduce extends Reducer<IntWritable, Text, Text, IntWritable>{
	
	//保存最大值
	private int maxNum = Integer.MIN_VALUE;
	
	//保存最小值
	private int minNum = Integer.MAX_VALUE;
 
	 @Override
	protected void reduce(IntWritable k2, Iterable<Text> v2s,
			Reducer<IntWritable, Text, Text, IntWritable>.Context context)
			throws IOException, InterruptedException {
		 if(k2.get() >= maxNum) {
			 maxNum = k2.get();
		 }
		 if(k2.get() <= minNum) {
			 minNum = k2.get();
		 }
	 }
	 
//	 Called once at the end of the task
	 @Override
	protected void cleanup(Reducer<IntWritable, Text, Text, IntWritable>.Context context)
			throws IOException, InterruptedException {
		 context.write(new Text("最大的数是："), new IntWritable(maxNum));
		 
		 context.write(new Text("最小的数是："), new IntWritable(minNum));
	}
	 
}
