package com.hjs.mr.merge_and_sort;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * 自定义的OutputFormat，泛型是根据Mapper传出来的结果定义的
 */
public class SortOutputFormat extends FileOutputFormat<SortBean, NullWritable> {
    @Override
    public RecordWriter<SortBean, NullWritable> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        Configuration configuration = taskAttemptContext.getConfiguration();
        FileSystem fileSystem = FileSystem.get(configuration);
        String outputDir = configuration.get("mapreduce.output.fileoutputformat.outputdir");
        FSDataOutputStream outputStream = fileSystem.create(new Path(outputDir + "\\sort\\sort.log"));//定义文件的输出位置
        SortRecordWriter recordWriter = new SortRecordWriter(outputStream);//创建自定义的RecordWriter，并指定输出流
        return recordWriter;//将RecordWriter返回
    }
}
