package com.academic.common.utils.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.InputStream;
import java.net.URI;
@Component
public class HdfsUtlis {
    @Value( "${hdfs.uploadPath}" )
    private  String userPath;



    @Value( "${hdfs.host}" )
    private  String hdfsPath;



    public void test1(){

        System.out.println( userPath);

        System.out.println( hdfsPath );

    }



    /**

     * @功能 获取HDFS配置信息

     * @return

     */

    private Configuration getHdfsConfig() {

        Configuration config = new Configuration();



        return config;

    }



    /**

     * @功能 获取FS对象

     */

    private FileSystem getFileSystem() throws Exception {

        //客户端去操作hdfs时，是有一个用户身份的,默认情况下，hdfs客户端api会从jvm中获取一个参数来作为自己的用户身份：-DHADOOP_USER_NAME=hadoop

        //FileSystem hdfs = FileSystem.get(getHdfsConfig());

        //也可以在构造客户端fs对象时，通过参数传递进去

        FileSystem hdfs = FileSystem.get(new URI(hdfsPath), getHdfsConfig(), "hadoop");

        return hdfs;

    }



    /**

     * 递归创建目录

     *

     */

    public void mkdir(String path) throws Exception {
        FileSystem fs = null;
        try {
            fs = getFileSystem();
            Path srcPath = new Path(path);
            boolean isOk = fs.mkdirs(srcPath);

            if (isOk) {
                System.out.println("create dir success...");
            } else {
                System.out.println("create dir failure...");
            }
        } catch (Exception e) {
            System.out.println("Error occurred while creating directory: " + e.getMessage());
            throw e;
        } finally {
            if (fs != null) {
                fs.close();
            }
        }
    }


    /**

     * 在HDFS创建文件，并向文件填充内容

     */

    public void createFile(String filePath, byte[] files){

        try {

            FileSystem fs = getFileSystem();

            //目标路径

            Path path = new Path( filePath );

            //打开一个输出流

            FSDataOutputStream outputStream = fs.create( path );

            outputStream.write( files );

            outputStream.close();

            fs.close();

            System.out.println( "创建文件成功！" );

        } catch (Exception e) {

            System.out.println( "创建文件失败！" );

        }

    }



    /**

     * 读取HDFS文件内容

     */

    public void readFile(String filePath) throws Exception {

        FileSystem fs = getFileSystem();

        Path path = new Path(filePath);

        InputStream in = null;

        try {

            in = fs.open(path);

            //复制到标准输出流

            IOUtils.copyBytes(in, System.out, 4096, false);

            System.out.println( "\n读取文件成功！" );

        } catch (Exception e) {

            System.out.println( "\n读取文件失败！" );

        }

        finally

        {

            IOUtils.closeStream(in);

        }

    }



    /**

     * 读取HDFS目录详细信息

     */

    public void pathInfo(String filePath) throws Exception {

        FileSystem fs = getFileSystem();

        FileStatus[] listStatus = fs.listStatus(new Path(filePath));

        for (FileStatus fileStatus : listStatus) {

            System.out.println(fileStatus.getPath() + ">>>>>" + fileStatus.toString());

        }

    }



    /**

     * 读取HDFS文件列表

     */

    public void listFile(String filePath) throws Exception {

        FileSystem fs = getFileSystem();

        //递归找到所有的文件

        RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path(filePath), true);

        while (listFiles.hasNext()) {

            LocatedFileStatus next = listFiles.next();

            String name = next.getPath().getName();

            Path path = next.getPath();

            System.out.println(name + "---" + path.toString());

        }

    }



    /**

     *  文件重命名

     */

    public void renameFile(String oldName, String newName) throws Exception {

        FileSystem fs = getFileSystem();

        Path oldPath = new Path(oldName);

        Path newPath = new Path(newName);

        boolean isOk = fs.rename(oldPath, newPath);

        if (isOk) {

            System.out.println("rename success...");

        } else {

            System.out.println("rename failure...");

        }

        fs.close();

    }



    /**

     *  删除指定文件

     */

    public void deleteFile(String filePath) throws Exception {

        FileSystem fs = getFileSystem();

        Path path = new Path(filePath);

        boolean isOk = fs.deleteOnExit(path);

        if (isOk) {

            System.out.println("delete success...");

        } else {

            System.out.println("delete failure...");

        }

        fs.close();

    }



    /**

     * 上传文件

     */

    public void uploadFile(InputStream inputStream, String uploadPath) throws Exception {

        // 获取HDFS文件系统实例
        FileSystem fs = getFileSystem();

        // 目标路径
        Path serverPath = new Path(uploadPath);

        // 创建目标文件的输出流
        FSDataOutputStream outputStream = fs.create(serverPath, true);

        // 缓冲区用于读取和写入文件流
        byte[] buffer = new byte[4096]; // 4KB 缓冲区
        int bytesRead;

        // 从输入流读取文件内容并写入HDFS
        try {
            while ((bytesRead = inputStream.read(buffer)) != -1) {
                outputStream.write(buffer, 0, bytesRead);
            }
        } finally {
            // 关闭输入流和输出流
            inputStream.close();
            outputStream.close();
            fs.close();
        }

        System.out.println("上传文件成功！");
    }



    /**

     * 下载文件

     */

    public void downloadFile(String fileName, String downPath) throws Exception {

        FileSystem fs = getFileSystem();

        fs.copyToLocalFile(new Path(fileName), new Path(downPath));

        fs.close();

        System.out.println( "下载文件成功！" );

    }



    /**

     *  判断文件是否存在

     */

    public boolean existFile(String FileName) throws Exception {

        FileSystem hdfs = getFileSystem();

        Path path = new Path(FileName);

        boolean isExists = hdfs.exists(path);

        return isExists;

    }

}
