package bigdata.mapreduce.statistics;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class Reduce extends Reducer<IntWritable,IntWritable,IntWritable,Text> {
	private IntWritable key=new IntWritable();
	private Text value = new Text();

	@Override
	public void reduce(IntWritable key,Iterable<IntWritable> values, Context context) 
			throws InterruptedException, IOException {
		int max=Integer.MIN_VALUE;
		int min=Integer.MAX_VALUE;
		int sum=0;
		int price;
		System.out.print(key.get());
		for(Iterator<IntWritable> i$=values.iterator();i$.hasNext();sum++) {
			price=((IntWritable)i$.next()).get();
			if(max<price)
				max=price;
			if(min>price)
				min=price;
		}
		this.key=key;
		String string=String.format("%d\t%d\t%d", max,min,sum);
		System.out.println("\t"+string);
		this.value.set(string);	
		context.write(this.key, this.value);
	}
}
