package com.mango.HDFSTools;

import com.mango.Tools.FileTools;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Test;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;

public class HDFS_HA_API {
    private static String HDFS_URL = "hdfs://ns1/";
    private Configuration conf;
    private static FileSystem fs;

    public HDFS_HA_API() {
        init("root");
    }

    /**
     * 上传本地文件到 hdfs集群
     *
     * @param local
     * @param dest
     */
    public static void upFile(Path local, Path dest) {
        try {
            fs.copyFromLocalFile(local, dest);
            System.out.println("文件上传成功 -----");

            System.out.println(local.toUri().getScheme());
            System.out.println(dest.toUri().getScheme());

        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public static void delFile(Path file) {
        try {
            if (fs.isDirectory(file)) {
                //如果是文件
                fs.delete(file, true);
            } else
                fs.deleteOnExit(file);
            System.out.println("文件删除成功");
        } catch (IOException e) {
            System.out.println("文件不存在，删除失败");
            e.printStackTrace();
        }
    }

    public void checkInputPath(Path input) {
        URI uri = input.toUri();
        String scheme = uri.getScheme();
        if (scheme != null && scheme.equals("file")) {
            String inputDir = uri.getHost() + ":" + uri.getRawPath();
            FileTools.CheckInput(new File(inputDir));
        } else {
            try {
                if (fs.exists(input)) {
                    //如果路径存在则检查路径下 是否有文件
                    RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fs.listFiles(input, true);
                    int size = 0;
                    while (locatedFileStatusRemoteIterator.hasNext()) {
                        size += 1;
                    }
                    if (!(size > 0)) {
                        System.out.println("hdfs 集群上的该input路径下没有文件，请先上传文件该路径下\n" + input.toString());
                    } else {
                        System.out.println("input 路径各项条件满足");
                    }
                } else {
                    fs.mkdirs(input);
                    System.out.println("该输入路径不存在，已为您自动创建，请上传文件到集群中的该路径下\n" + input.toString());

                }
            } catch (IOException e) {
                System.out.println("路径不存在，抛出异常");
                e.printStackTrace();
            }
        }
    }

    public void checkOutputPath(Path output) {
        URI uri = output.toUri();
        String scheme = uri.getScheme();
        if (scheme != null && scheme.equals("file")) {
            String localFile = uri.getHost() + ":" + uri.getPath();
            System.out.println("输出路径已经删除");
            FileTools.delFileOrDir(new File(localFile));
        } else {
            try {
                if (fs.exists(output)) {
                    fs.delete(output);
                    System.out.println("输出路径已经删除");
                }
            } catch (IOException e) {

                e.printStackTrace();
            }
        }


    }

    @Test
    public void testFunc() {
        init("root");
        Path local = new Path("file://d:/test.txt");
        Path dest = new Path(HDFS_URL);
//        upFile(local, dest);
//        delFile(new Path("/Spark_SecondarySortJOb"));
//        checkInputPath(new Path("/HelooW/DSa"));
        checkOutputPath(new Path("file://d:/aaa"));
    }

    private void init(String user) {
        this.conf = new Configuration();
        conf.set("fs.defaultFS", HDFS_URL);
        conf.set("dfs.nameservices", "ns1");
        conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
        conf.set("dfs.namenode.rpc-address.ns1.nn1", "hdf1:9000");
        conf.set("dfs.namenode.rpc-address.ns1.nn2", "hdf4:9000");
        // conf.setBoolean(name, value);
        conf.set("dfs.client.failover.proxy.provider.ns1",
                "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        try {
            this.fs = FileSystem.get(new URI(HDFS_URL), conf, user);
        } catch (IOException | InterruptedException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }
    }

    public static Configuration getConf() {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://ns1");
        conf.set("dfs.nameservices", "ns1");
        conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
        conf.set("dfs.namenode.rpc-address.ns1.nn1", "hdf1:9000");
        conf.set("dfs.namenode.rpc-address.ns1.nn2", "hdf4:9000");
        // conf.setBoolean(name, value);
        conf.set("dfs.client.failover.proxy.provider.ns1",
                "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        return conf;
    }

    public static Configuration getRunOnRemoteConf(String localJarPath) {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://ns1");
        conf.set("dfs.nameservices", "ns1");
        conf.set("mapreduce.framework.name", "yarn");
        conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
        conf.set("dfs.namenode.rpc-address.ns1.nn1", "hdf1:9000");
        conf.set("dfs.namenode.rpc-address.ns1.nn2", "hdf4:9000");
        conf.set("dfs.client.failover.proxy.provider.ns1",
                "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        conf.set("mapreduce.app-submission.cross-platform", "true");
//        本地jar的 路径"D:\\IdeaProjects\\hadoop-demo\\job\\target\\demo-job-1.0-SNAPSHOT.jar"
        conf.set("mapreduce.job.jar", localJarPath);
        return conf;
    }

    /*
     * 往hdfs中写数据
     */
    public static void writeToHdfs(String filename, String text) {
        FSDataOutputStream out = null;
        String charset = "UTF-8";
        try {
            Path path = new Path(filename);
            if (!fs.exists(path)) {
                //创建文件数据的输出流
                out = fs.create(new Path(filename));
                //通过输出流往hdfs中写入数据
                out.write(text.getBytes(charset), 0, text.getBytes(charset).length);
                out.write("\n".getBytes(charset), 0, "\n".getBytes(charset).length);
                out.flush();
            } else {
                //往文件中追加数据
                out = fs.append(path);
                out.write(text.getBytes(charset), 0, text.getBytes(charset).length);
                out.write("\n".getBytes(charset), 0, "\n".getBytes(charset).length);
                out.flush();
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            //关闭输出流
            if (out != null) {
                try {
                    out.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /*
     * 从hdfs中读取数据
     */
    public static void readFromHdfs(String fileName) {
        Path filePath = new Path(fileName);
        try {
            if (fs.exists(filePath)) {
                String charset = "UTF-8";
                //打开文件数据输入流
                FSDataInputStream fsDataInputStream = fs.open(filePath);
                //创建文件输入
                InputStreamReader inputStreamReader = new InputStreamReader(fsDataInputStream, charset);
                String line = null;
                //把数据读入到缓冲区中
                BufferedReader reader = null;
                reader = new BufferedReader(inputStreamReader);
                //从缓冲区中读取数据
                while ((line = reader.readLine()) != null) {
                    System.out.println("line=" + line);
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

}
