package com.bytes32.preprocess;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.bytes32.classification.tree.dataset.DefaultFeatureProcessor;
import com.bytes32.classification.tree.dataset.Feature;
import com.bytes32.classification.tree.dataset.FeatureType;

public class DatasetBuilder implements Reducer<IntWritable, Feature, IntWritable, Feature> {

	/*
	 * Default logger
	 */
	private static final Logger LOG = LoggerFactory.getLogger(DatasetBuilder.class);

	@Override
	public void configure(JobConf arg0) {
		// TODO Auto-generated method stub

	}

	@Override
	public void close() throws IOException {
		// TODO Auto-generated method stub

	}

	private final int bufferSize = 20;

	@Override
	public void reduce(IntWritable key, Iterator<Feature> values, OutputCollector<IntWritable, Feature> out, Reporter rep)
			throws IOException {
		if (values.hasNext()) {
			/* take the first feature counter it doesn't really matter which one it is */
			Feature feature = values.next();

			if (feature.getProcessor() instanceof DefaultFeatureProcessor || feature.getFeatureType() == FeatureType.UNKNOWN)
				throw new IOException(" Invalid feature ");

			Collection<Feature> buffer = new ArrayList<Feature>(bufferSize);
			while (values.hasNext()) {
				if (buffer.size() < bufferSize) {
					Feature next = values.next();
					if (next.getProcessor() instanceof DefaultFeatureProcessor || next.getFeatureType() == FeatureType.UNKNOWN) {
						LOG.error(" Unkown features in Reducer ");
						continue;
					}
				} else {
					/* buffer is full */
					feature.merge(buffer.iterator());
					buffer.clear();
				}
			}
			
			if (buffer.size() > 0){
				/* we're done but there still stuff in the buffer*/
				feature.merge(buffer.iterator());
				buffer.clear();
			}
			out.collect(key, feature);
		}
	}

}
