package com.bdqn.mr.output;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

import java.io.IOException;

public class LogRecordWriter extends RecordWriter<Text, NullWritable> {

    private TaskAttemptContext cxt;

    public LogRecordWriter(TaskAttemptContext job) throws IOException {
        this.cxt = job;
        // 通过hdfs的API接口
        FileSystem fileSystem = FileSystem.get(cxt.getConfiguration());

        outputStreamBDQN = fileSystem.create(new Path("D:\\mr\\output\\file\\bdqn.log"));
        outputStreamOTHER = fileSystem.create(new Path("D:\\mr\\output\\file\\other.log"));
    }

    private FSDataOutputStream outputStreamBDQN;
    private FSDataOutputStream outputStreamOTHER;


    @Override
    public void write(Text key, NullWritable value) throws IOException, InterruptedException {
        String line = key.toString();
        if (line.contains("bdqn")) {
            outputStreamBDQN.writeBytes(line + "\n");
        } else {
            outputStreamOTHER.writeBytes(line + "\n");
        }
    }

    @Override
    public void close(TaskAttemptContext context) throws IOException, InterruptedException {
        outputStreamBDQN.close();
        outputStreamOTHER.close();
    }
}
