package yz.mr.OutPut;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

import java.io.IOException;

public class MyRecordWriter extends RecordWriter<NullWritable,OutputStudent> {
    FileSystem fileSystem;
    FSDataOutputStream more;
    FSDataOutputStream less;
    public MyRecordWriter(Configuration configuration) throws IOException {
         fileSystem = FileSystem.get(configuration);
        more = fileSystem.create(new Path("output/output/more450.txt"));
        less = fileSystem.create(new Path("output/output/less450.txt"));
    }
    /**
     *在write中通过输出流写出数据
     * 输出流在哪里？在哪里定义
     * 需要有FileSystem
     */
    @Override
    public void write(NullWritable key, OutputStudent value) throws IOException, InterruptedException {
        if (value.getScore()>450){
         more.writeUTF(value.toString());
         more.flush();
        }
        else {
            less.writeUTF(value.toString());
            less.flush();
        }
    }

    @Override
    public void close(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        more.close();
        less.close();
        fileSystem.close();
    }


}
