package com.mango.ch12;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.Logger;

import java.io.FileWriter;
import java.io.IOException;

/**
 * 自定义reduce 后的文件输出逻辑
 */
public class MyOutputFormat extends OutputFormat<Text, Text> {
    private Logger logger = Logger.getLogger(this.getClass());
    private Path OutPath = null;
    private FSDataOutputStream hdfsOut = null;//用来王hdfs中写数据

    @Override
    public RecordWriter<Text, Text> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        Configuration conf = taskAttemptContext.getConfiguration();
        String OUTPUT = conf.get(FileOutputFormat.OUTDIR);
        OUTPUT = OUTPUT.substring(6);
        FileSystem fileSystem = null;
        fileSystem = FileSystem.get(conf);
        //指定文件的输出的hdfs路径
        OutPath = new Path(OUTPUT + "/output");
        if (!fileSystem.exists(OutPath))
            hdfsOut = fileSystem.create(OutPath.suffix("/result"));
        else {
            hdfsOut = fileSystem.appendFile(OutPath.suffix("/result")).build();
        }
        FileWriter fw = new FileWriter(OUTPUT + "/newCluster.txt", true);
        return new MySelfRecordWriter(hdfsOut, fw);
    }

    @Override
    public void checkOutputSpecs(JobContext jobContext) throws IOException, InterruptedException {
        //检查输出路径的   可以检查也可以不写检查的
 /*       String name = jobContext.getConfiguration().get("mapreduce.output.fileoutputformat.outputdir");
        Path outDir = null;
        if (name != null) outDir = new Path(name);
        if (outDir == null) {
            throw new InvalidJobConfException(" Mango Output directory not set.");
        }
        else {
            TokenCache.obtainTokensForNamenodes(jobContext.getCredentials(), new Path[]{outDir}, jobContext.getConfiguration());
            if (outDir.getFileSystem(jobContext.getConfiguration()).exists(outDir)) {
                throw new FileAlreadyExistsException(" Mango  Output directory " + outDir + " already exists");
            }
        }*/
    }

    @Override
    public OutputCommitter getOutputCommitter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        return new FileOutputCommitter(OutPath, taskAttemptContext);
    }

    /**
     * 自定义一个类 继承RecordWriter 实现写的操作
     */
    private class MySelfRecordWriter extends RecordWriter<Text, Text> {

        private FSDataOutputStream outputStream = null;
        private FileWriter fw = null;

        public MySelfRecordWriter(FSDataOutputStream outputStream, FileWriter fw) {
            this.outputStream = outputStream;
            this.fw = fw;
        }

        @Override
        public void write(Text key, Text value) throws IOException, InterruptedException {
            //现将文件写出到 hdfs  然后再写入到本地磁盘中
            outputStream.writeBytes(key.toString());
            outputStream.writeBytes("-->");
            outputStream.writeBytes(value.toString());
            outputStream.writeBytes("\n");
            outputStream.flush();
            //将文件写入本地
            fw.write(key.toString());
            fw.write("-->");
            fw.write(value.toString());
            fw.write("\n");
            fw.flush();
        }

        @Override
        public void close(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
            if (outputStream != null)
                this.outputStream.close();
            if (fw != null)
                fw.close();
        }
    }
}
