package com.supertool.tong.merger.util;

import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.supertool.tong.merger.TongMerger;

public class MergeOutputFormat<K, V> extends TextOutputFormat<K, V> {
    private static Logger LOG = LoggerFactory.getLogger(MergeOutputFormat.class);

    public MergeOutputFormat(){
        LOG.info("==============MergeOutputFormat===========================");
    }

    public Path getDefaultWorkFile(TaskAttemptContext context, String extension)
            throws IOException {
        org.apache.hadoop.mapreduce.Mapper<K,V,K,V>.Context mapcontext=(org.apache.hadoop.mapreduce.Mapper<K,V,K,V>.Context)context;
        LOG.info("extension="+mapcontext.getInputSplit());
        Path path=((CombineFileSplit)mapcontext.getInputSplit()).getPath(0);

        FileOutputCommitter committer = (FileOutputCommitter) getOutputCommitter(context);
        return new Path(committer.getWorkPath(),getFileName(path));
    }

    public String getFileName(Path path){
        return path.getName();
    }
}