package com.lagou.mr.comment.myself.step3;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class CommentOutputFormat extends FileOutputFormat<CommentBean, NullWritable> {


    @Override
    public RecordWriter<CommentBean, NullWritable> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException {

        Configuration conf = job.getConfiguration();

        FileSystem fs = FileSystem.get(conf);

        //当前reducetask处理的分区编号来创建文件获取输出流
        //获取到在Driver指定的输出路径;0是好评，1是中评，2是差评
        String outputDir = conf.get("mapreduce.output.fileoutputformat.outputdir");

        FSDataOutputStream goodOut=null;
        FSDataOutputStream commonOut=null;
        FSDataOutputStream badOut=null;

        //当前reducetask处理的分区编号
        int id = job.getTaskAttemptID().getTaskID().getId();
        if (id == 0){
            goodOut=  fs.create(new Path(outputDir+ "\\good\\good.log"));
        }else if(id == 1){
            commonOut=   fs.create(new Path(outputDir+ "\\good\\common.log"));
        }else{
            badOut = fs.create(new Path(outputDir+ "\\good\\bad.log"));
        }

        return new CommentRecorderWrtier(goodOut,commonOut,badOut);
    }
}
