package wordsort.test;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import java.io.IOException;

public class CustomWriter extends RecordWriter<IntWritable, IntWritable> {
    private FSDataOutputStream out;
    private IntWritable seq = new IntWritable(1);
    CustomWriter(FSDataOutputStream outStream){
        this.out = outStream;
    }
    @Override
    public void write(IntWritable int1, IntWritable int2) throws IOException, InterruptedException {
        // for each line, change origin output <key,num> pair to: <seq,num>
        // seq is increased globally
        final String str = seq.get()+"\t"+int2.get()+"\n";
        out.write(str.getBytes());
        seq.set(seq.get()+1);
    }

    @Override
    public void close(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        IOUtils.closeStream(out);
    }
}
