/**
 * 
 */
package eval;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;

import dataStructures.ErrorElementWritable;

/**
 * @author zvikadori
 *
 */
public class EvalReducer extends Reducer<IntWritable, ErrorElementWritable, IntWritable, DoubleWritable> {

	@Override
	public void reduce(IntWritable key, Iterable<ErrorElementWritable> values,
			Context context) throws IOException, InterruptedException {
		double sampleCount = 0;
		double squareError = 0;
		for (ErrorElementWritable value: values){
//			ErrorElementWritable value = values.next();
			sampleCount += value.getProfileSampleSize();
			squareError += value.getTotalSquareError();
		}
		
		DoubleWritable outVal = new DoubleWritable( Math.sqrt(squareError / sampleCount) );
		context.write(key, outVal);
	}
	
	

}
