package dm.ass2.eval;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Reducer;
import dm.ass2.eval.io.*;
import dm.ass2.io.CodebookItemsWritable;

public class EvalCombiner extends Reducer<IntWritable, ErrorWritable, IntWritable, ErrorWritable>{
	@Override
	protected void reduce(IntWritable key, Iterable<ErrorWritable> errorItem, Context context) throws IOException, InterruptedException {
		int count = 0;
		List<Integer> newArrayNum=new ArrayList<Integer>();
		List<Double> newArrayError = new ArrayList<Double>();
		for (ErrorWritable error : errorItem){
			newArrayNum.add(count,error.getNum()[0]);
			newArrayError.add(count, error.getError()[0]);
			count++;
	    }
		int nums[] = new int[count];
		double errors[] = new double[count];
		int counter = 0;
		for (int i = 0 ; i < count ;i++){
			nums[counter] = newArrayNum.get(counter);
			errors[counter] = newArrayError.get(counter);
			counter++;
	    }
	    context.write(new IntWritable(1),new ErrorWritable(count,nums,errors));
	}
}
	
	
