package com.shujia.MR.output;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

import java.io.*;

public class MyRecordWriter extends RecordWriter<Student, NullWritable> {
    FileSystem fileSystem;
    DataOutputStream dataOutputStreamMore;
    DataOutputStream dataOutputStreamLess;


    public MyRecordWriter(Configuration configuration) throws IOException {
        fileSystem = FileSystem.get(configuration);
        dataOutputStreamMore = new DataOutputStream(fileSystem.create(new Path("D:\\workspace19\\HadoopCode\\output\\more450.txt")));
        dataOutputStreamLess = new DataOutputStream(fileSystem.create(new Path("D:\\workspace19\\HadoopCode\\output\\less450.txt")));
    }

    /**
     * 在该类中实现将学生成绩分开写入至两个文件中
     * 由于写操作，需要有对象进行，那么选取那些对象可以实现
     */

    @Override
    public void write(Student student, NullWritable value) throws IOException, InterruptedException {
        if (Integer.valueOf(student.score) >= 450){
            dataOutputStreamMore.writeUTF(student.toString()+"\n");
        }else {
            dataOutputStreamLess.writeUTF(student.toString()+"\n");
        }
    }

    @Override
    public void close(TaskAttemptContext context) throws IOException, InterruptedException {
        dataOutputStreamMore.close();
        dataOutputStreamLess.close();
        fileSystem.close();
    }
}
