/**
 * 
 */
package eval;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Reducer;

import dataStructures.ErrorElementWritable;

/**
 * @author zvikadori
 *
 */
public class EvalCombiner extends Reducer<IntWritable, ErrorElementWritable, IntWritable, ErrorElementWritable> {
	@Override
	public void reduce(IntWritable key, Iterable<ErrorElementWritable> values,
			Context context) throws IOException, InterruptedException {
		
		int sampleCount = 0;
		long squareError = 0;
		for(ErrorElementWritable value :values){
			
			sampleCount += value.getProfileSampleSize();
			squareError += value.getTotalSquareError();
		}
		
		ErrorElementWritable w = new ErrorElementWritable();
		w.setProfileSampleSize(sampleCount);
		w.setTotalSquareError(squareError);
		
		context.write(key, w);
	}
	
}
