package com.hub.realtime.common.utils.hadoop;

import com.hub.realtime.common.core.domain.model.ClusterInfo;
import com.hub.realtime.common.exception.UtilException;
import com.hub.realtime.common.utils.RequireUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.RPC;

import java.io.ByteArrayOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.*;
import java.util.stream.Collectors;

@Slf4j
public class HdfsUtil {




    /**
     * 获取hdfs文件系统
     *
     * @param clusterInfo
     * @return
     */
    public static String getDefaultFS(ClusterInfo clusterInfo) {
        return HadoopUtil.hadoopConf(clusterInfo).get(FileSystem.FS_DEFAULT_NAME_KEY);
    }

    /**
     * 获取文件列表
     *
     * @param clusterInfo
     * @param src
     * @return
     */
    public static List<FileStatus> list(ClusterInfo clusterInfo, String src) {
        try {
            return Arrays.stream(HadoopUtil.hdfs(clusterInfo).listStatus(getPath(src))).collect(Collectors.toList());
        } catch (IOException e) {
            log.error("Hdfs 执行list失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }
    }

    /**
     * 移动
     *
     * @param clusterInfo
     * @param src
     * @param dst
     */
    public static void move(ClusterInfo clusterInfo, String src, String dst) {
        try {
            HadoopUtil.hdfs(clusterInfo).rename(getPath(src), getPath(dst));
        } catch (IOException e) {
            log.error("Hdfs 执行移动操作失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }
    }

    /**
     * 创建目录
     *
     * @param clusterInfo
     * @param path
     */
    public static void mkdirs(ClusterInfo clusterInfo, String path) {
        try {
            HadoopUtil.hdfs(clusterInfo).mkdirs(getPath(path));
        } catch (IOException e) {
            log.error("Hdfs 创建文件夹失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }
    }

    /**
     * 复制文件
     *
     * @param clusterInfo
     * @param src         源路径
     * @param dst         目标路径
     * @param delSrc      是否删除源文件 默认false
     * @param overwrite   是否覆盖目标路径 默认true
     */
    public static void copyHdfs(ClusterInfo clusterInfo, String src, String dst,
                                boolean delSrc, boolean overwrite) {
        try {
            FileUtil.copy(HadoopUtil.hdfs(clusterInfo), getPath(src), HadoopUtil.hdfs(clusterInfo), getPath(dst), delSrc, overwrite, HadoopUtil.hadoopConf(clusterInfo));
        } catch (IOException e) {
            log.error("Hdfs 复制失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }
    }

    /**
     * 复制文件夹
     *
     * @param clusterInfo
     * @param src
     * @param dst
     * @param delSrc      是否删除源文件 默认false
     * @param overwrite   是否覆盖目标路径 默认true
     */
    public static void copyHdfsDir(ClusterInfo clusterInfo, String src, String dst,
                                   boolean delSrc, boolean overwrite) {
        list(clusterInfo, src).forEach(c -> {
            try {
                FileUtil.copy(HadoopUtil.hdfs(clusterInfo), c, HadoopUtil.hdfs(clusterInfo), getPath(dst), delSrc, overwrite, HadoopUtil.hadoopConf(clusterInfo));
            } catch (IOException e) {
                log.error("Hdfs 复制失败：" + e.getMessage());
                e.printStackTrace();
            }
        });
    }

    /**
     * 上传单个文件
     *
     * @param clusterInfo
     * @param src
     * @param dst
     * @param delSrc      是否删除源文件 默认false
     * @param overwrite   是否覆盖目标路径 默认true
     */
    public static void upload(ClusterInfo clusterInfo, String src, String dst, boolean delSrc, boolean overwrite) {
        try {
            HadoopUtil.hdfs(clusterInfo).copyFromLocalFile(delSrc, overwrite, getPath(src), getPath(dst));
        } catch (IOException e) {
            log.error("Hdfs 上传失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }
    }


    /**
     * 多文件上传
     *
     * @param clusterInfo
     * @param src
     * @param dst
     * @param delSrc      是否删除源文件 默认false
     * @param overwrite   是否覆盖目标路径 默认true
     */
    public static void uploadMulti(ClusterInfo clusterInfo, List<String> src, String dst, boolean delSrc, boolean overwrite) {
        src.forEach(s -> {
            try {
                HadoopUtil.hdfs(clusterInfo).copyFromLocalFile(delSrc, overwrite, getPath(s), getPath(dst));
            } catch (IOException e) {
                log.error("Hdfs 上传失败：" + e.getMessage());
                e.printStackTrace();
            }
        });

    }


    /**
     * 下载文件
     *
     * @param clusterInfo
     * @param src
     * @param dst
     * @param delSrc                默认false
     * @param useRawLocalFileSystem 默认false
     */
    public static void download(ClusterInfo clusterInfo, String src, String dst, boolean delSrc, boolean useRawLocalFileSystem) {
        try {
            HadoopUtil.hdfs(clusterInfo).copyToLocalFile(delSrc, getPath(src), getPath(dst), useRawLocalFileSystem);
        } catch (IOException e) {
            log.error("Hdfs 文件下载失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }
    }

    /**
     * 获取NameNode
     *
     * @param clusterInfo
     * @return
     */
    public static String getNameNode(ClusterInfo clusterInfo) {
        try {
            return getAddressOfActive(HadoopUtil.hdfs(clusterInfo)).getHostString();
        } catch (Exception ex) {
            log.error("获取Hadoop的NameNode失败：" + ex.getMessage());
            ex.printStackTrace();
            throw new IllegalArgumentException(ex);
        }

    }


    /**
     * 创建文件，并写入内容
     *
     * @param clusterInfo
     * @param fileName
     * @param content
     */
    public static void create(ClusterInfo clusterInfo, String fileName, String content) {
        Path path = getPath(fileName);
        try {
            RequireUtil.require(HadoopUtil.hdfs(clusterInfo).exists(path), "hdfs创建文件失败：".concat(fileName).concat("存在"));
        } catch (IOException e) {
            e.printStackTrace();
        }
        try {
            FSDataOutputStream outputStream = HadoopUtil.hdfs(clusterInfo).create(path);
            outputStream.writeUTF(content);
            outputStream.flush();
            outputStream.close();
        } catch (IOException e) {
            log.error("hdfs 创建文件失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }


    }


    /**
     * 查看文件是否存在
     *
     * @param clusterInfo
     * @param path
     * @return
     */
    public static boolean exists(ClusterInfo clusterInfo, String path) {
        try {
            return HadoopUtil.hdfs(clusterInfo).exists(getPath(path));
        } catch (IOException e) {
            e.printStackTrace();
            log.error("访问hdfs 文件出错："+e.getMessage());
            throw new UtilException("访问hdfs 文件出错："+e.getMessage());
        }
    }


    /**
     * 读取hdfs内容
     *
     * @param clusterInfo
     * @param fileName
     * @return
     */
    public static String read(ClusterInfo clusterInfo, String fileName) {
        Path path = getPath(fileName);
        try {
            RequireUtil.require(HadoopUtil.hdfs(clusterInfo).exists(path) && !HadoopUtil.hdfs(clusterInfo).isDirectory(path)
                    , "读取hdfs文件失败，".concat(fileName).concat("不存在或者是一个文件夹"));
            FSDataInputStream in = HadoopUtil.hdfs(clusterInfo).open(path);
            ByteArrayOutputStream out = new ByteArrayOutputStream();
            IOUtils.copyBytes(in, out, 4096, false);
            out.flush();
            IOUtils.closeStream(in);
            IOUtils.closeStream(out);
            return out.toString();
        } catch (IOException e) {
            log.error("hdfs 读取失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }


    }

    /**
     * 删除文件
     *
     * @param clusterInfo
     * @param src
     */
    public static void delete(ClusterInfo clusterInfo, String src) {
        Path path = getPath(src);

        try {
            if (HadoopUtil.hdfs(clusterInfo).exists(path)) {
                HadoopUtil.hdfs(clusterInfo).delete(path, true);
            } else {
                log.warn("hdfs 删除文件：".concat(src)
                        .concat(", ".concat(src).concat("不存在！")));
            }
        } catch (Exception e) {
            log.error("hdfs 删除失败：" + e.getMessage());
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }


    }

    /**
     * 文件MD5
     *
     * @param clusterInfo
     * @param fileName
     * @return
     */
    public static String fileMd5(ClusterInfo clusterInfo, String fileName) {
        Path path = getPath(fileName);
        FSDataInputStream in = null;
        try {
            in = HadoopUtil.hdfs(clusterInfo).open(path);
            String mdfStr = DigestUtils.md5Hex(in);
            in.close();
            return mdfStr;
        } catch (Exception ex) {
            log.error("hdfs md5获取失败：" + ex.getMessage());
            ex.printStackTrace();
            throw new IllegalArgumentException(ex);
        }
    }

    /**
     * 下载文件
     *
     * @param clusterInfo
     * @param hdfsPath
     * @param localPath
     */
    public static void downToLocal(ClusterInfo clusterInfo, String hdfsPath, String localPath) {
        Path path = getPath(hdfsPath);
        try {
            FSDataInputStream input = HadoopUtil.hdfs(clusterInfo).open(path);
            String content = input.readUTF();
            FileWriter fw = new FileWriter(localPath);
            fw.write(content);
            fw.close();
            input.close();
        } catch (Exception ex) {
            log.error("hdfs 文件下载失败：" + ex.getMessage());
            ex.printStackTrace();
            throw new IllegalArgumentException(ex);
        }
    }


    private static InetSocketAddress getAddressOfActive(FileSystem fs) throws IOException {
        if (!(fs instanceof DistributedFileSystem)) {
            throw new IllegalArgumentException("FileSystem $fs is not a DFS.");
        }
        fs.exists(new Path("/"));
        DistributedFileSystem dfs = (DistributedFileSystem) fs;
        DFSClient dfsClient = dfs.getClient();
        return RPC.getServerAddress(dfsClient.getNamenode());
    }

    private static Path getPath(String hdfsPath) {
        return new Path(hdfsPath);
    }

}
