package com.yunfei.commonbase.hdutil;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;

import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;

public class HDFSUtil {

    private static final Logger logger = Logger.getLogger(HDFSUtil.class.toString());


    /**
     <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>2.6.5</version>
     </dependency>
     <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-client</artifactId>
        <version>2.6.5</version>
     </dependency>
     */


    /**
     *
     * @param fis  文件的输入流
     * @param targetHdfsPath  文件要上次的目的路径
     * @param hdfsConfPath    带权限的hdfs的配置文件路径
     * @param fsDefaultFS      hdfs参数
     * @param dfsNamenodeKerberosPrincipal    hdfs参数
     * @param dfsDatanodeKerberosPrincipal    hdfs参数
     * @param hdfsKey          hdfs参数
     * @param hdfsKeytabPath   hdfs参数
     * @throws IOException
     */
    public static void fileToHdfs(FileInputStream fis, String targetHdfsPath, String hdfsConfPath, String fsDefaultFS, String dfsNamenodeKerberosPrincipal,
                                  String dfsDatanodeKerberosPrincipal, String hdfsKey, String hdfsKeytabPath) throws IOException {
        logger.info("开始加载hdfs");
//        FileSystem fileSystem = null;
        System.setProperty("java.security.krb5.conf", hdfsConfPath);
        Configuration configuration = new Configuration();
        configuration.set("hadoop.security.authentication", "kerberos");
        configuration.set("fs.defaultFS", fsDefaultFS);
        configuration.set("dfs.namenode.kerberos.principal", dfsNamenodeKerberosPrincipal);
        configuration.set("dfs.datanode.kerberos.principal", dfsDatanodeKerberosPrincipal);
        UserGroupInformation.setConfiguration(configuration);
        FileSystem fileSystem = null;
        try {
            UserGroupInformation.loginUserFromKeytab(hdfsKey, hdfsKeytabPath);
            fileSystem = FileSystem.get(configuration);
//            FileInputStream in = new FileInputStream(new File("H:\\test.txt"));//获取本地文件

            FSDataOutputStream out = fileSystem.create(new Path(targetHdfsPath)); //选择上传路径
            byte[] b = new byte[1024];
            int len = 0;
            while ((len = fis.read(b)) != -1) {
                out.write(b, 0, len);
            }
            fis.close();
            out.close();
            fileSystem.close();
        } catch (IOException e) {
            fileSystem.close();
            e.printStackTrace();
            throw new RuntimeException("上传失败");
        }
    }

    /**
     * 创建hdfs的目录
     * @param fileSystem
     * @param path
     */
    public static void makeDir(FileSystem fileSystem,String path){
        try {
            Path hdfsPath1 = new Path(path);
            if (!fileSystem.exists(hdfsPath1)) {
                logger.info("目录[" + hdfsPath1 + "]不存在！");
                boolean result = fileSystem.mkdirs(hdfsPath1);
                if (result) {
                    logger.info("创建目录[" + path + "]成功！");
                } else {
                    logger.info("创建目录[" + path + "]失败！");
                }
            } else {
                logger.info("目录[" + path + "]已经存在！");
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 下载hdfs文件
     * @param out      输出流
     * @param downloadPath  需要下载文件的hdfs路径
     * @param hdfsConfPath   hdfs的权限参数
     * @param fsDefaultFS    hdfs的权限参数
     * @param dfsNamenodeKerberosPrincipal  hdfs的参数
     * @param dfsDatanodeKerberosPrincipal  hdfs的参数
     * @param hdfsKey        hdfs的参数
     * @param hdfsKeytabPath    hdfs的参数
     * @throws IOException
     */
    public static void download(OutputStream out, String downloadPath, String hdfsConfPath, String fsDefaultFS, String dfsNamenodeKerberosPrincipal,
                                String dfsDatanodeKerberosPrincipal, String hdfsKey, String hdfsKeytabPath) throws IOException {

        System.setProperty("java.security.krb5.conf", hdfsConfPath);
        Configuration configuration = new Configuration();
        configuration.set("hadoop.security.authentication", "kerberos");
        configuration.set("fs.defaultFS", fsDefaultFS);
        configuration.set("dfs.namenode.kerberos.principal", dfsNamenodeKerberosPrincipal);
        configuration.set("dfs.datanode.kerberos.principal", dfsDatanodeKerberosPrincipal);
        UserGroupInformation.setConfiguration(configuration);
        FileSystem fileSystem = null;
        try {
            UserGroupInformation.loginUserFromKeytab(hdfsKey, hdfsKeytabPath);
            fileSystem = FileSystem.get(configuration);
            FSDataInputStream in = fileSystem.open(new Path(downloadPath));//获取服务器文件
            IOUtils.copyBytes(in,out,configuration);
            in.close();
            out.close();
        } catch (IOException e) {
            fileSystem.close();
            e.printStackTrace();
        }
        fileSystem.close();
    }

    /**
     * 删除指定文件或目录
     * @param delPath
     * @param hdfsConfPath
     * @param fsDefaultFS
     * @param dfsNamenodeKerberosPrincipal
     * @param dfsDatanodeKerberosPrincipal
     * @param hdfsKey
     * @param hdfsKeytabPath
     * @throws IOException
     */
    public static void delete( String delPath, String hdfsConfPath, String fsDefaultFS, String dfsNamenodeKerberosPrincipal,
                                String dfsDatanodeKerberosPrincipal, String hdfsKey, String hdfsKeytabPath) throws IOException {
        System.setProperty("java.security.krb5.conf", hdfsConfPath);
        Configuration configuration = new Configuration();
        configuration.set("hadoop.security.authentication", "kerberos");
        configuration.set("fs.defaultFS", fsDefaultFS);
        configuration.set("dfs.namenode.kerberos.principal", dfsNamenodeKerberosPrincipal);
        configuration.set("dfs.datanode.kerberos.principal", dfsDatanodeKerberosPrincipal);
        UserGroupInformation.setConfiguration(configuration);
        FileSystem fileSystem = null;
        try {
            UserGroupInformation.loginUserFromKeytab(hdfsKey, hdfsKeytabPath);
            fileSystem = FileSystem.get(configuration);
            fileSystem.delete(new Path(delPath),true);
        } catch (IOException e) {
            fileSystem.close();
            e.printStackTrace();
        }
        fileSystem.close();
    }


    //定义方法下载文件到本地
    public static List<String> autoList(String path, String hdfsConfPath, String fsDefaultFS, String dfsNamenodeKerberosPrincipal,
                                      String dfsDatanodeKerberosPrincipal, String hdfsKey, String hdfsKeytabPath) throws IOException {
        List<String> paths = new ArrayList<>();
        System.setProperty("java.security.krb5.conf", hdfsConfPath);
        Configuration configuration = new Configuration();
        configuration.set("hadoop.security.authentication", "kerberos");
        configuration.set("fs.defaultFS", fsDefaultFS);
        configuration.set("dfs.namenode.kerberos.principal", dfsNamenodeKerberosPrincipal);
        configuration.set("dfs.datanode.kerberos.principal", dfsDatanodeKerberosPrincipal);
        UserGroupInformation.setConfiguration(configuration);
        FileSystem fileSystem = null;
        try {
            UserGroupInformation.loginUserFromKeytab(hdfsKey, hdfsKeytabPath);
            fileSystem = FileSystem.get(configuration);
            //通过fs的listFiles方法可以自动实现递归(自带递归)列出文件类型，返回的是一个远程可迭代对象,需要传入两个参数，第一个参数是服务器路径，第二个参数是否递归
            RemoteIterator<LocatedFileStatus> iterator = fileSystem.listFiles(new Path(path), true);
            while (iterator.hasNext()){
                LocatedFileStatus fileStatus = iterator.next();
                Path fullPath = fileStatus.getPath();
                paths.add(fullPath+"");
            }
        } catch (IOException e) {
            fileSystem.close();
            e.printStackTrace();
        }
        fileSystem.close();
        return paths;
    }



    public static void main(String[] args) throws IOException {
        FileSystem fileSystem = null;
        Configuration configuration = null;
        String hdfsKey = "oss";// 认证用户
        String hdfsKeytabPath = "C:/test/oss.keytab";// 认证文件路径
        String hdfsConfPath = "C:/test/krb5.conf";// krb5.conf路径
        String hdfsSitePath = "F:\\hdfs_file\\hdfs-site.xml";// hdfs-site.xml文件路径
        String coreSitePath = "F:\\hdfs_file\\core-site.xml";// core-site.xml文件路径

        System.setProperty("java.security.krb5.conf", hdfsConfPath);
        //System.setProperty("HADOOP_USER_NAME","oss");
        configuration = new Configuration();
        configuration.set("hadoop.security.authentication", "kerberos");
        //configuration.set("dfs.namenode.kerberos.principal", hdfsKey);
        //configuration.set("kerberos.principal", "oss");
        configuration.set("dfs.namenode.kerberos.principal", "hdfs/_HOST@OSSOAS.COM");
        //configuration.set("dfs.datanode.kerberos.principal", "hdfs/_HOST@OSSOAS.COM");
        configuration.set("fs.defaultFS", "hdfs://pk-dsp-150:8020");
//		configuration.set("fs.hdfs.impl",org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
//		configuration.set("fs.file.impl",org.apache.hadoop.fs.LocalFileSystem.class.getName());
//		configuration.addResource(hdfsSitePath);// hdfs-site.xml配置文件
//		configuration.addResource(coreSitePath);// core-site.xml配置文件
        UserGroupInformation.setConfiguration(configuration);
        try {
            UserGroupInformation.loginUserFromKeytab(hdfsKey, hdfsKeytabPath);
            fileSystem = FileSystem.get(configuration);
        } catch (Exception e) {
            e.printStackTrace();
        }


//        String fileName = "F:/huguiping/20201123-test/sssssffffff.txt";
//        String hdfsPath = "/user/oss/test/yf";
//        try {
//            Path localPath1 = new Path(fileName);
//            Path hdfsPath1 = new Path(hdfsPath);
//            if (!fileSystem.exists(hdfsPath1)) {
//                logger.debug("目录[" + hdfsPath + "]不存在！");
//                boolean result = fileSystem.mkdirs(hdfsPath1);
//                if (result) {
//                    logger.debug("创建目录[" + hdfsPath + "]成功！");
//                } else {
//                    logger.debug("创建目录[" + hdfsPath + "]失败！");
//                }
//            } else {
//                logger.debug("目录[" + hdfsPath + "]已经存在！");
//            }
//            fileSystem.copyFromLocalFile(localPath1, hdfsPath1);
//        } catch (IOException e) {
//            e.printStackTrace();
//        }





		/*
		String fileName = "F:/huguiping/20201123-test/sssssffffff.rar";
		String hdfsPath = "/user/oss/test/";
		try {
			Path localPath1 = new Path(fileName);
			Path hdfsPath1 = new Path(hdfsPath);
			if (!fileSystem.exists(hdfsPath1)) {
				logger.debug("目录[" + hdfsPath + "]不存在！");
				boolean result = fileSystem.mkdirs(hdfsPath1);
				if (result) {
					logger.debug("创建目录[" + hdfsPath + "]成功！");
				} else {
					logger.debug("创建目录[" + hdfsPath + "]失败！");
				}
			} else {
				logger.debug("目录[" + hdfsPath + "]已经存在！");
			}
			fileSystem.copyFromLocalFile(localPath1, hdfsPath1);
		} catch (IOException e) {
			e.printStackTrace();
		}
		*/




        RemoteIterator<LocatedFileStatus> iterator = fileSystem.listFiles(new Path("/user/oss/test"), true);
        while (iterator.hasNext()){
            LocatedFileStatus fileStatus = iterator.next();
            Path fullPath = fileStatus.getPath();
            System.out.println(fullPath);
        }

//		boolean result1 = fileSystem.mkdirs(new Path("/user/oss/test/1"));//若目录已经存在，则创建失败，返回false
//		System.out.println("result1 = " + result1);
//		boolean result2 = fileSystem.exists(new Path("/user/oss/test/1"));
//		System.out.println("result2 = " + result2);
//		fileSystem.close();

    }



}
