package cn.hesion.mergefile.step2;
/**
 * ClassName: CommentOutputFormat <br/>
 * Description: <br/>
 * date: 2021/2/4 10:58<br/>
 *
 * @author Hesion<br />
 * @version
 * @since JDK 1.8
 */

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * @program: ClientDemo
 * @description:
 * @author: hesion
 * @create: 2021-02-04 10:58
 **/
public class CommentOutputFormat extends FileOutputFormat<CommentBean, NullWritable> {
    @Override
    public RecordWriter<CommentBean, NullWritable> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException {
        Configuration conf = job.getConfiguration();
        FileSystem fs = FileSystem.get(conf);
        //当前reduceTask处理的分区编号来创建文件获取输出流
        //获取到在Driver指定的输出路径；0是好评，1中评，2差评
        String outputdir = conf.get("mapreduce.output.fileoutputformat.outputdir");
        FSDataOutputStream goodOut=null;
        FSDataOutputStream commomOut=null;
        FSDataOutputStream badOut=null;
        //获取当前reduceTask处理的分区编号
        int id = job.getTaskAttemptID().getTaskID().getId();
        if(id==0){
            //好评数据
            goodOut = fs.create(new Path(outputdir+"\\good\\good.log"));
        }else if(id==1){
            commomOut=fs.create(new Path(outputdir+"\\common\\common.log"));
        }else {
            badOut=fs.create(new Path(outputdir+"\\bad\\bad.log"));
        }
        return new CommenRecordWriter(goodOut,commomOut,badOut);

    }
}
