package skewreduce.framework;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.util.*;

public class MuxOutputFormat
extends MultiOutputFormat<ByteWritable,MuxData> {
    public RecordWriter<ByteWritable,MuxData> getRecordWriter(TaskAttemptContext job,String name,boolean unique)
    throws IOException, InterruptedException {
        Configuration conf = job.getConfiguration();
        boolean isCompressed = getCompressOutput(job);
        CompressionCodec codec = null;
        String extension = "";
        CompressionType compressionType = CompressionType.NONE;

        if ( isCompressed ) {
            compressionType = CompressionType.valueOf( conf.get("mapred.output.compression.type",CompressionType.RECORD.toString()) );

            Class<? extends CompressionCodec> codecClass =
                    getOutputCompressorClass(job, GzipCodec.class);
            codec = (CompressionCodec)ReflectionUtils.newInstance(codecClass,conf);
            extension = codec.getDefaultExtension();
        }

        Path file = getDefaultWorkFile(job,extension);
        if ( name != null ) {
            if ( job instanceof TaskInputOutputContext<?,?,?,?> ) {
            	if ( unique ) {
            		file = getPathForWorkFile( (TaskInputOutputContext<?,?,?,?>)job, name, extension );
            	} else {
            		file = new Path( getWorkOutputPath((TaskInputOutputContext<?, ?, ?, ?>) job), name.concat(extension) ); // MUST BE GUARANTEED TO BE UNIQUE
            	}
            } else {
                // we need a hacky way
                Path dir = file.getParent();
                if ( unique ) {
                	file = new Path(dir,getUniqueFile(job,name,extension));
                } else {
                	file = new Path(dir,name.concat(extension)); // MUST BE GUARANTEED TO BE UNIQUE
                }
            }
        }

        FileSystem fs = file.getFileSystem(conf);
        final SequenceFile.Writer out =
            SequenceFile.createWriter(fs, conf, file,
                    ByteWritable.class,
                    MuxData.class,
                    compressionType,
                    codec,
                    job);

        return new RecordWriter<ByteWritable,MuxData>() {
            public void write(ByteWritable key,MuxData value) throws IOException {
                out.append(key,value);
            }

            public void close(TaskAttemptContext context) throws IOException {
                out.close();
            }
        };
    }
}
