package MapReduce.outPutFormat;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

import java.io.IOException;

public class LogRecordWriter extends RecordWriter<Text, NullWritable> {

    private FSDataOutputStream othersLog;
    private FSDataOutputStream atguiguLog;

    public LogRecordWriter(TaskAttemptContext job) {

        // 创建两条流
        try {
            FileSystem fs = FileSystem.get(job.getConfiguration());

            atguiguLog = fs.create(new Path("D:\\中国科学院大学硕士\\学习类文件夹\\BigDataDev\\Output\\atguiguOutput.log"));

            othersLog = fs.create(new Path("D:\\中国科学院大学硕士\\学习类文件夹\\BigDataDev\\Output\\otherOutput.log"));
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Override
    public void write(Text text, NullWritable nullWritable) throws IOException, InterruptedException {

        //具体写的内容
        String log = text.toString();
        if(log.contains("atguigu")){
            atguiguLog.writeBytes(log +"\n");
        }else{
            othersLog.writeBytes(log +"\n");
        }
    }

    @Override
    public void close(TaskAttemptContext taskAttemptContext) {

        //关闭两条流
        IOUtils.closeStream(atguiguLog);
        IOUtils.closeStream(othersLog);
    }
}
