package skewreduce.framework;

import java.io.IOException;

import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;

public class MuxOutput {
    private RecordWriter<ByteWritable,MuxData> writer;
    private ByteWritable streamIndex = new ByteWritable();
    private MuxData      streamValue = new MuxData();
    private Counter[]    streamCounters;

    public MuxOutput(TaskInputOutputContext<?,?,?,?> job,String name,int nstream) throws IOException,InterruptedException {
        writer = new MuxOutputFormat().getRecordWriter(job,name);
        streamCounters = new Counter[nstream];
        for ( int i = 0; i < nstream; ++i ) {
            streamCounters[i] = job.getCounter("skewreduce","MUX_WRITE_"+i);
        }
    }

    public final <K extends Writable,V extends Writable>
    void write(Enum<?> stream,K key,V value)
    throws IOException, InterruptedException {
        write(stream.ordinal(),key,value);
    }


    public final <K extends Writable,V extends Writable>
    void write(int stream,K key,V value)
    throws IOException, InterruptedException {
        streamIndex.set((byte)stream);
        streamValue.set(key,value);
        writer.write(streamIndex,streamValue);
        streamCounters[stream].increment(1L);
    }

    public void close(TaskAttemptContext context) throws IOException, InterruptedException {
        if ( writer != null )
            writer.close(context);
    }
}
