package com.lagou.mr.output;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class CustomOutputFormat extends FileOutputFormat {
    @Override
    public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
        //获取文件系统对象
        Configuration conf = context.getConfiguration();
        String outputDir = conf.get("mapreduce.output.fileoutputformat.outputdir");
        //System.out.println(outputDir);
        //System.exit(1);
        FileSystem fs = FileSystem.get(conf);
        //指定输出数据的文件
        Path lgPath = new Path(outputDir + "\\lg.log");
        Path otherPath = new Path(outputDir + "\\other.log");
        //获取输出流
        FSDataOutputStream lgOut = fs.create(lgPath);
        FSDataOutputStream otherOut = fs.create(otherPath);
        return new CustomWriter(lgOut,otherOut);
    }
}
