package com.ky.pro.big.data.spark.distcp;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.shaded.org.apache.commons.cli.*;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class Main {

    public static void main(String[] args) throws Exception {

        SparkConf conf = new SparkConf().setAppName("Word Count").setMaster("local[*]");
        JavaSparkContext sc = new JavaSparkContext(conf);

        // 解析命令行参数
        DistCpConfig distCpConfig = parseArgs(args);
        distCpConfig.dump();

        // 遍历源文件夹
        Configuration hdfsConf = new Configuration();
        hdfsConf.set("fs.defaultFS", "hdfs://localhost:9000");

        List<String> filePaths = new ArrayList<>();
        filePaths = getFilesRecursive(hdfsConf, distCpConfig.getSourcePath(), filePaths);

        // 拷贝到目标文件夹
        filePaths.forEach(filePath -> {
            try {
                sc.textFile(filePath, distCpConfig.getMaxConc())
                        .saveAsTextFile(filePath.replace(distCpConfig.getSourcePath(),
                                distCpConfig.getDistPath()));
            }
            catch (Exception e) {
                System.err.println("Failed to copy file: " + filePath);
                e.printStackTrace();
                if(!distCpConfig.isIgnoreFailure()) {
                    System.exit(-1);
                }
            }
        });
    }


    private static List<String> getFilesRecursive(Configuration hdfsConf, String path, List<String> filePaths) throws IOException {
        FileStatus[] files = FileSystem.get(hdfsConf).listStatus(new Path(path));
        for (FileStatus fileStatus : files) {
            if (!fileStatus.isDirectory()) {
                filePaths.add(fileStatus.getPath().toString());
            } else {
                filePaths = getFilesRecursive(hdfsConf, fileStatus.getPath().toString(), filePaths);
            }
        }
        return filePaths;
    }

    private static DistCpConfig parseArgs(String[] args) throws ParseException {
        CommandLineParser cliParser = new BasicParser();
        Options options = new Options();
        options.addOption("i", false, "ignore failure");
        options.addOption("m", true, "max concurrency");
        CommandLine commandLine = cliParser.parse(options, args);
        List<String> cmds = commandLine.getArgList();
        if (cmds.size() < 2) {
            System.err.println("Usage: DistCp <source path> <dist path> [-i] [-m <max concurrency>]");
            System.exit(1);
        }

        DistCpConfig distCpConfig = new DistCpConfig();
        distCpConfig.setSourcePath(cmds.get(0));
        distCpConfig.setDistPath(cmds.get(1));
        distCpConfig.setIgnoreFailure(commandLine.hasOption("i"));
        int maxConc = 1;
        String maxConcStr = commandLine.getOptionValue("m");
        try {
            maxConc = Integer.valueOf(maxConcStr);
        } catch (NumberFormatException nfe) {
            System.err.println("wrong input format for max concurrency! integer value expected instead of " + maxConcStr);
        }
        distCpConfig.setMaxConc(maxConc);

        return distCpConfig;
    }

}
