package newPutFormat;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

import java.io.*;

public class NewRecordWriter extends RecordWriter<Text, DoubleWritable> {
    FileSystem fileSystem = null;
    FSDataOutputStream f1 = null;
    FSDataOutputStream f2 = null;
    FSDataOutputStream f3 = null;

    public NewRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException {
        fileSystem = FileSystem.get(taskAttemptContext.getConfiguration());
        f1 = fileSystem.create(new Path("/MR/city/bj.txt"), false);
        f2 = fileSystem.create(new Path("/MR/city/nj.txt"), false);
        f3 = fileSystem.create(new Path("/MR/city/sh.txt"), false);
    }

    @Override
    public void write(Text key, DoubleWritable value) throws IOException, InterruptedException {
        if ("北京".equals(key.toString())) {
            f1.write((key + "\t" + value).getBytes());
            f1.flush();
        } else if ("南京".equals(key.toString())) {
            f2.write((key + "\t" + value).getBytes());
            f2.flush();
        } else {
            f3.write((key + "\t" + value).getBytes());
            f3.flush();
        }
    }

    @Override
    public void close(TaskAttemptContext context) throws IOException, InterruptedException {
        f1.close();
        f2.close();
        f3.close();
    }
}
