package example.outputformat;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class LogOutputFormat extends FileOutputFormat<Text, NullWritable> {
    @Override
    public RecordWriter<Text, NullWritable> getRecordWriter(TaskAttemptContext taskAttemptContext)
            throws IOException, InterruptedException {

        return new RecordWriter<Text, NullWritable>() {

            FSDataOutputStream infoStream = null;
            FSDataOutputStream errorStream = null;
            {
                FileSystem fs = FileSystem.get(taskAttemptContext.getConfiguration());
                infoStream = fs.create(new Path("D:\\output\\log\\info"));
                errorStream = fs.create(new Path("D:\\output\\log\\error"));
            }

            @Override
            public void write(Text text, NullWritable nullWritable)
                    throws IOException, InterruptedException {
                if (text.toString().contains("GET")) {
                    errorStream.writeBytes(text.toString());
                    errorStream.writeBytes("\n");
                } else {
                    infoStream.writeBytes(text.toString());
                    infoStream.writeBytes("\n");
                }
            }

            @Override
            public void close(TaskAttemptContext taskAttemptContext)
                    throws IOException, InterruptedException {
                IOUtils.closeStream(infoStream);
                IOUtils.closeStream(errorStream);
            }
        };
    }
}
