package com.sdehualu.utils;

/**
 * @Author: 吴敬超
 * @Date: 2021/7/21 9:22
 */

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;

public class HDFSUtils {


    // private String


    public boolean FileExist(String hdfsurl, String fileName) {

        boolean fileflag = true;
        try {


            //加载配置项
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", hdfsurl);
            // conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

            //创建文件系统实例
            FileSystem fs = FileSystem.get(conf);

            //创建文件实例
            // String fileName = "文件或目录名";

            System.out.println("fileName is :" + fileName);
            Path file = new Path(fileName);

            //判断文件是否存在
            if (fs.exists(file)) {
                System.out.println(fileName + "存在");


            } else {
                System.out.println(fileName + "不存在");

                fileflag = false;
            }

            //关闭文件系统
            fs.close();

        } catch (Exception e) {
            e.printStackTrace();
        }
        return fileflag;
    }


    public boolean upfile(String hdfsurl, String localfile,
                          String hdfsfile) {

        boolean fileflag = false;
        System.out.println("runing HDFDUtils upfile");

        System.setProperty("HADOOP_USER_NAME", "hdfs");


        //1 创建连接
        Configuration conf = new Configuration();
        //2 连接端口
        conf.set("fs.defaultFS", hdfsurl);
        //3 获取连接对象
        // FileSystem fs = null;
        try {
            FileSystem fs = FileSystem.get(conf);
            fs.copyFromLocalFile(new Path(localfile), new Path(hdfsfile));
            fs.close();

            System.out.println("HDFDUtils upfile fileflag is true");
            fileflag = true;
        } catch (IOException e) {
            e.printStackTrace();
            fileflag = false;
        }
        //本地文件上传到 hdfs

        return fileflag;
    }


    /**
     * @param hdfsurl  hdfs url
     * @param in       输入流
     * @param hdfsfile hdfs上的文件
     * @return 成功返回true
     */
    public boolean ioupfile(String hdfsurl, InputStream in,
                            String hdfsfile) {

        boolean fileflag = false;
        System.out.println("HDFSUtils ioupfile");

        System.out.println("hdfsurl");
        System.out.println("hdfsfile");

        System.setProperty("HADOOP_USER_NAME", "hdfs");


        //1 创建连接
        Configuration conf = new Configuration();
        //2 连接端口
        conf.set("fs.defaultFS", hdfsurl);
        //3 获取连接对象
        // FileSystem fileSystem = null;
        try {
            FileSystem fs = FileSystem.get(conf);

            // fs.create()

            OutputStream out = fs.create(new Path(hdfsfile));

            IOUtils.copyBytes(in, out, 4096, true);
            System.out.println("HDFSUtils ioupfile fileflag true");
            fs.close();
            System.out.println("HDFSUtils ioupfile fs.close");

            fileflag = true;
            return fileflag;

        } catch (IOException e) {
            e.printStackTrace();
            fileflag = false;
        }
        //本地文件上传到 hdfs

        return fileflag;
    }


/*
    public static void main(String[] args) {

        HDFSUtils hdfsUtils = new HDFSUtils();

        // System.out.println(hdfsUtils.FileExist("hdfs://192.168.22.9:8020", "/par0"));


        hdfsUtils.upfile("hdfs://192.168.22.9:8020", "D:/nc2json/crcm0.json", "/par0/crcm3.json");

    }
*/
}
