package com.learn.lb.mapred;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.InputStream;
import java.net.URI;

/**
 * @author laibo
 * @since 2019/7/3 15:11
 * ===================================
 * <p>
 * ===================================
 */
public final class GenerateMapReducerUtils {

    private static Logger logger = LoggerFactory.getLogger(WordCount.class);

    private static final String HDFS_PATH = "hdfs://master:9000";


    private GenerateMapReducerUtils() {

    }

    /**
     * 数据准备阶段
     *
     * @param localFilePath 本地目标文件（需要将该文件上传到hdfs上，作为源数据文件）
     * @param dfsInputPath  需要计算的目标文件地址 (hdfs文件路径)
     * @param dfsOutputPath 数据输出目录(hdfs路径)
     */
    public static void buildJobBefore(Configuration configuration, String localFilePath, Path dfsInputPath, Path dfsOutputPath) throws Exception {
        FileSystem hdfsFileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "root");
        logger.info("开始运行mapreducer任务.....");
        logger.info("检查源文件是否存在....");
        if (hdfsFileSystem.exists(dfsInputPath)) {
            logger.warn("源文件存在，删除该文件，重新上传数据...");
            hdfsFileSystem.delete(dfsInputPath, Boolean.TRUE);
        }
        logger.info("开始上传输入文件到hdfs...");
        try (FSDataOutputStream fsDataOutputStream = hdfsFileSystem.create(dfsInputPath, Boolean.TRUE);
             InputStream is = ClassLoader.getSystemResourceAsStream(localFilePath)) {
            byte[] ss = new byte[is.available()];
            is.read(ss);
            fsDataOutputStream.writeUTF(new String(ss));
            logger.info("读取到的本地数据内容为：" + new String(ss));
            logger.info("写入数据到HDFS成功.....，文件大小：{}", ss.length);
        } catch (Exception ex) {
            logger.error(ex.getMessage());
        }
        logger.info("检查MapReducer任务的输出目录是否存在...");
        if (hdfsFileSystem.exists(dfsOutputPath)) {
            //如果输出文件目录存在，则直接删除
            logger.warn("存在输出目录，删除该目录...");
            hdfsFileSystem.delete(dfsOutputPath, Boolean.TRUE);
        }
    }
}
