
/*
 * HadoopReducer.java
 *
 * Created on May 13, 2012, 4:12:49 PM
 */

package AverageSize;


import java.io.IOException;
import java.util.Iterator;
// import org.apache.commons.logging.Log;
// import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;

import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

/**
 *
 * @author crazymooner
 */
public class HadoopReducer extends MapReduceBase implements Reducer<IntWritable,IntWritable,Text,DoubleWritable> {
    // The Karmasphere Studio Workflow Log displays logging from Apache Commons Logging, for example:
    // private static final Log LOG = LogFactory.getLog("AverageSize.HadoopReducer");

	@Override
    public void reduce(IntWritable key, Iterator<IntWritable> value, OutputCollector<Text, DoubleWritable> output, Reporter reporter)
            throws IOException {
        // TODO code reducer logic here
		double sum = 0.0;
		int count = 0;
        while(value.hasNext()){
        	sum += value.next().get();
        	count++;		
        }
        if(key.get() == 1){
        	output.collect(new Text("Weighted AverageSize: "), new DoubleWritable(sum/count));
        }else{
        	output.collect(new Text("AverageBurnCount: "), new DoubleWritable(sum/count));
        }
        
    }
}
