package com.wsx.demo;

import java.io.IOException;

import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class TestReducer extends Reducer<Text, Text, Text, FloatWritable> {

	private float max = Float.MIN_VALUE;
	private float min = Float.MAX_VALUE;
	
	private double sum = 0;
	private long num = 0;

	@Override
	protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, Text, FloatWritable>.Context context)
			throws IOException, InterruptedException {
		if ("max".equals(key.toString())) {
			for (Text text : values) {
				if(Float.parseFloat(text.toString()) > max){
					max = Float.parseFloat(text.toString());
				}
			}
		}else if ("min".equals(key.toString())) {
			for (Text text : values) {
				if(Float.parseFloat(text.toString()) < min){
					min = Float.parseFloat(text.toString());
				}
			}
		}else if ("aver".equals(key.toString())) {
			for (Text text : values) {
				String[] strs = text.toString().trim().split(":");
				sum += Float.parseFloat(strs[0]);
				num += Float.parseFloat(strs[1]);
			}
		}
	}

	@Override
	protected void cleanup(Reducer<Text, Text, Text, FloatWritable>.Context context)
			throws IOException, InterruptedException {
		context.write(new Text("max"), new FloatWritable(max));
		context.write(new Text("min"), new FloatWritable(min));
		context.write(new Text("aver"), new FloatWritable((float)sum/num));
	}

}
