package db.prob.operators.project;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import db.prob.io.LineParser;
import db.prob.io.PTuple;

public class ReducerProject extends Reducer<Text, Text, Text, Text> {

	@Override
	protected void reduce(Text key, Iterable<Text> values,Context context) throws IOException, InterruptedException {
		if (key.toString().equals(MapperProject.PASS)) {
			for (Text value : values) {
				context.write(null, value);
			}
			return;
		}

		
		Configuration conf = context.getConfiguration();
		String joinTableName = conf.get(MapperProject.RESULT_TABLE);
		
		Iterator<Text> iter = values.iterator();
		Text firstValue = iter.next();
		PTuple firstTuple = LineParser.createPTuple(firstValue.toString(), "\\t");
		float probability = 1 - firstTuple.getProbability();
		
		while (iter.hasNext()) {
			Text text = (Text) iter.next();
			PTuple tuple = LineParser.createPTuple(text.toString(),"\\t");
			probability *= (1- tuple.getProbability());
		}
		
		PTuple newTuple = new PTuple(joinTableName, 1-probability, firstTuple.getAttr());
		context.write(null, new Text(newTuple.toString()));
	}

}
