package skewreduce.framework;

import java.io.IOException;

import java.util.HashMap;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.ReflectionUtils;

import skewreduce.lib.Partition;


public class PartitionOutputs<K,V> {
    MultiOutputFormat<K,V> outFmt;

    private Map<Partition,RecordWriter<K,V>> writers = new HashMap<Partition,RecordWriter<K,V>>();

	public PartitionOutputs(TaskAttemptContext context)
    throws IOException, InterruptedException {
        Configuration conf = context.getConfiguration();
        
        try {
	        Class<? extends OutputFormat<?, ?>> cls = context.getOutputFormatClass(); // should be multioutput format?
	        outFmt = (MultiOutputFormat<K,V>)ReflectionUtils.newInstance(cls, conf);
        } catch ( ClassNotFoundException ex ) {
        	throw new IOException("Failed to initialize output format",ex);
        }
    }
    
    @SuppressWarnings("unchecked")
	public RecordWriter<K,V> createRecordWriter(TaskAttemptContext context,Partition part) throws IOException, InterruptedException {
    	RecordWriter<K,V> writer = writers.get(part);
    	if ( writer == null ) {
    		writer = outFmt.getRecordWriter(context,String.format("part-%d-%d",part.getLevel(),part.getID()),false);
    		writers.put(part,writer);
    	}
    	return writer;
    }
    
    public void close(TaskAttemptContext context) throws IOException, InterruptedException {
    	for ( RecordWriter<K,V> w : writers.values() ) {
    		w.close(context);
    	}
    }
}
