package skewreduce.framework;

import java.io.DataOutputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FSDataOutputStream;

import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.util.*;

public abstract class MultiOutputFormatBase<K,V> 
extends MultiOutputFormat<K,V> {
    @Override
    public RecordWriter<K,V> getRecordWriter(TaskAttemptContext job,String name,boolean unique)
    throws IOException, InterruptedException {
        Configuration conf = job.getConfiguration();
        boolean isCompressed = getCompressOutput(job);
        CompressionCodec codec = null;
        String extension = "";
        if ( isCompressed ) {
            Class<? extends CompressionCodec> codecClass =
                    getOutputCompressorClass(job, GzipCodec.class);
            codec = (CompressionCodec)ReflectionUtils.newInstance(codecClass,conf);
            extension = codec.getDefaultExtension();
        }

        Path file = getDefaultWorkFile(job,extension);
        if ( name != null ) {
            if ( job instanceof TaskInputOutputContext<?,?,?,?> ) {
            	if ( unique ) {
            		file = getPathForWorkFile( (TaskInputOutputContext<?,?,?,?>)job, name, extension );
            	} else {
            		file = new Path( getWorkOutputPath((TaskInputOutputContext<?, ?, ?, ?>) job), name.concat(extension) ); // MUST BE GUARANTEED TO BE UNIQUE
            	}
            } else {
                // we need a hacky way
                Path dir = file.getParent();
                if ( unique ) {
                	file = new Path(dir,getUniqueFile(job,name,extension));
                } else {
                	file = new Path(dir,name.concat(extension)); // MUST BE GUARANTEED TO BE UNIQUE
                }
            }
        }

        FileSystem fs = file.getFileSystem(conf);
        if ( !isCompressed ) {
            FSDataOutputStream fileOut = fs.create(file,false);
            return createRecordWriter(fileOut);
        } else {
            FSDataOutputStream fileOut = fs.create(file,false);
            return createRecordWriter(new DataOutputStream(codec.createOutputStream(fileOut)));
        }
    }

    protected abstract RecordWriter<K,V> createRecordWriter(DataOutputStream out) throws IOException, InterruptedException;
}
