package git.soulbgm.hdfs.common;

import git.soulbgm.hdfs.annotation.HdfsOperate;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.xml.bind.DatatypeConverter;
import java.io.*;
import java.net.URI;
import java.nio.charset.Charset;
import java.security.MessageDigest;

/**
 * hdfs工具
 * 一般使用HDFS的类，只需注入使用
 * 实现了HDFS的常用方法
 * 不够的话{@link #getHdfsClient()}拿HdfsClient，记得{@link #returnHdfsClient(HdfsClient)}
 *
 * @author SoulBGM
 * @date 2022/06/29 10:54
 */
public class HdfsTool {

    private static final Logger log = LoggerFactory.getLogger(HdfsTool.class);

    /**
     * 默认缓冲区大小
     */
    private final static int DEFAULT_BUFFER_SIZE = 1024 * 1024 * 4;

    /**
     * 用于切面操作{@link #getHdfsClient()}自动获取和返回
     */
    private final static ThreadLocal<HdfsClient> THREAD_LOCAL = new ThreadLocal<>();

    /**
     * 对象池，通过对象池获取{@link #getHdfsClient()}进行原生操作
     */
    private final HdfsPool hdfsPool;

    public HdfsTool(HdfsPool hdfsPool) {
        this.hdfsPool = hdfsPool;
    }

    public static void setHdfsClient(HdfsClient hdfsClient) {
        THREAD_LOCAL.set(hdfsClient);
    }

    public static void removeThreadLocal() {
        THREAD_LOCAL.remove();
    }

    /**
     * 通过对象池获取{@link HdfsClient}对象，并进行本Service暂不提供的原生操作，记得{@link #returnHdfsClient(HdfsClient)}
     *
     * @return {@link HdfsClient}
     * @throws Exception 获取{@link HdfsClient}对象异常
     */
    public HdfsClient getHdfsClient() throws Exception {
        return this.hdfsPool.borrowObject();
    }

    /**
     * 返回{@link HdfsClient}对象到对象池
     *
     * @param hdfsClient {@link HdfsClient}
     */
    public void returnHdfsClient(HdfsClient hdfsClient) {
        this.hdfsPool.returnObject(hdfsClient);
    }

    /**
     * 获得URI，详见{@link HdfsClient#getUri FileSystem.getUri()}
     *
     * @return {@link URI}
     */
    @HdfsOperate
    public URI getUri() {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.getUri();
    }

    /**
     * 获得输出输入流，详见{@link HdfsClient#open FileSystem.getUri(Path f, int bufferSize)}
     *
     * @param f          地址
     * @param bufferSize buff大小
     * @return {@link FSDataInputStream}输入流
     * @throws IOException 异常
     */
    @HdfsOperate
    public FSDataInputStream open(Path f, int bufferSize) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.open(f, bufferSize);
    }

    @HdfsOperate
    public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.create(f, permission, overwrite, bufferSize, replication, blockSize, progress);
    }

    /**
     * <p>在指定的路径上创建FSDataOutputStream</p>
     *
     * @param f         要创建的文件名
     * @param overwrite 覆盖，如果文件已经存在，那么如果是“true”，文件将被重写，如果“false”会引发错误
     * @throws Exception Exception
     * @see HdfsClient#create(Path, boolean, int)
     */
    @HdfsOperate
    public void create(Path f, boolean overwrite, byte[] bytes) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        FSDataOutputStream out = hdfsClient.create(f, overwrite);
        IOUtils.copyBytes(new ByteArrayInputStream(bytes), out, DEFAULT_BUFFER_SIZE, true);
    }

    @HdfsOperate
    public void append(Path f, byte[] bytes) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        FSDataOutputStream out;
        if (exists(f)) {
            out = hdfsClient.append(f);
        } else {
            out = hdfsClient.create(f, false);
        }
        IOUtils.copyBytes(new ByteArrayInputStream(bytes), out, DEFAULT_BUFFER_SIZE, true);
    }

    /**
     * 重命名
     *
     * @param src 要重命名的路径
     * @param dst 重命名后的新路径
     * @return 如果重命名成功，则为true
     * @throws IOException IO异常
     * @see HdfsClient#rename(Path, Path)
     */
    @HdfsOperate
    public boolean rename(Path src, Path dst) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.rename(src, dst);
    }

    /**
     * 删除
     *
     * @param f         文件/目录的Path对象
     * @param recursive 是否递归删除
     * @return boolean
     * @throws Exception 异常
     */
    @HdfsOperate
    public boolean delete(Path f, boolean recursive) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.delete(f, recursive);
    }

    @HdfsOperate
    public FileStatus[] listStatus(Path f) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.listStatus(f);
    }

    @HdfsOperate
    public void setWorkingDirectory(Path newDir) {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        hdfsClient.setWorkingDirectory(newDir);
    }

    @HdfsOperate
    public Path getWorkingDirectory() {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.getWorkingDirectory();
    }

    @HdfsOperate
    public boolean mkdirs(Path f, FsPermission permission) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.mkdirs(f, permission);
    }

    @HdfsOperate
    public boolean mkdirs(Path f) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.mkdirs(f);
    }

    @HdfsOperate
    public FileStatus getFileStatus(Path f) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.getFileStatus(f);
    }

    /**
     * 文件/目录是否存在
     *
     * @param f 文件/目录的Path对象
     * @return true 存在 false 不存在
     * @throws IOException IO异常
     */
    @HdfsOperate
    public boolean exists(Path f) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.exists(f);
    }

    /**
     * 获取文件的长度（字节）
     *
     * @param f 地址
     * @return 文件的长度（字节）
     * @throws Exception 异常
     * @see HdfsClient#getFileStatus(Path)
     * @see FileStatus#getLen()
     */
    @HdfsOperate
    public long getLen(Path f) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        return hdfsClient.getFileStatus(f).getLen();
    }

    @HdfsOperate
    public String getMd5(Path f) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        FSDataInputStream os = hdfsClient.open(f);
        byte[] buffer = new byte[5242880];
        MessageDigest md5 = MessageDigest.getInstance("MD5");
        try {
            while (true) {
                int bytesRead = os.read(buffer);
                if (bytesRead <= -1) {
                    break;
                } else if (bytesRead > 0) {
                    md5.update(buffer, 0, bytesRead);
                }
            }
        } finally {
            os.close();
        }
        byte[] result = md5.digest();
        return DatatypeConverter.printHexBinary(result);
    }

    /**
     * 下载
     *
     * @param src       要下载的地址
     * @param dst       目标地址
     * @param delSrc    删除HDFS文件
     * @param overwrite 覆盖
     * @return 下载情况
     * @throws Exception 异常
     */
    @HdfsOperate
    public boolean downFile(String src, String dst, boolean delSrc, boolean overwrite) throws Exception {
        return downFile(src, dst, delSrc, overwrite, DEFAULT_BUFFER_SIZE);
    }

    /**
     * 下载
     *
     * @param src        要下载的地址
     * @param dst        目标地址
     * @param delSrc     删除HDFS文件
     * @param overwrite  覆盖
     * @param bufferSize bufferSize
     * @return 下载情况
     * @throws Exception 异常
     */
    @HdfsOperate
    public boolean downFile(String src, String dst, boolean delSrc, boolean overwrite, int bufferSize) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        Path srcPath = new Path(src);
        if (!hdfsClient.exists(srcPath)) {
            log.warn("下载{}到{}时，文件不存在，下载失败", src, dst);
            return false;
        }
        if (hdfsClient.getFileStatus(srcPath).isDirectory()) {
            log.warn("下载{}到{}时，该地址是目录，下载失败", src, dst);
            return false;
        }
        File dstFile = new File(dst);
        if (!dstFile.exists()) {
            File parentFile = dstFile.getParentFile();
            boolean flag = true;
            if (!parentFile.exists()) {
                flag = parentFile.mkdirs();
            }
            if (!flag) {
                log.warn("下载{}到{}时，本地文件夹创建失败", src, dst);
                return false;
            }
        }
        /*if (!dstFile.canWrite()) {
            log.warn("下载{}到{}时，本地文件没有写权限，下载失败", src, dst);
            return false;
        }*/
        if (overwrite) {
            FileUtils.deleteQuietly(dstFile);
            /*if (!FileUtils.deleteQuietly(dstFile)) {
                log.warn("下载{}到{}时，删除本地文件失败", src, dst);
                return false;
            }*/
        } else {
            log.warn("下载{}到{}时，本地有该地址，且不许覆盖，下载失败", src, dst);
            return false;
        }

        try (
                FSDataInputStream input = hdfsClient.open(srcPath, bufferSize);
                OutputStream output = new FileOutputStream(dst)
        ) {
            byte[] buffer = new byte[bufferSize];
            int length;
            while ((length = input.read(buffer)) > 0) {
                output.write(buffer, 0, length);
            }
            output.flush();
        } catch (Exception e) {
            log.error("下载{}到{}时出错", src, dst, e);
            return false;
        }

        if (delSrc) {
            if (!hdfsClient.delete(srcPath, false)) {
                log.warn("下载{}到{}时，下载完成，但删除HDFS文件是失败", src, dst);
                return false;
            }
        }
        return true;
    }

    /**
     * 读文本文件，只适用小文件
     *
     * @param srcPath 文件地址
     * @param charset 编码
     * @return 文本内容
     * @throws Exception 文件不存在、内存等异常
     */
    @HdfsOperate
    public String readTextFile(Path srcPath, Charset charset) throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        if (!hdfsClient.exists(srcPath)) {
            log.warn("文件{}不存在", srcPath.toString());
            throw new FileNotFoundException("文件不存在");
        }
        if (hdfsClient.getFileStatus(srcPath).isDirectory()) {
            log.warn("地址{}不是文件", srcPath.toString());
            throw new Exception("地址{}不是文件");
        }
        StringBuilder content = new StringBuilder();
        try (
                FSDataInputStream dis = hdfsClient.open(srcPath, DEFAULT_BUFFER_SIZE);
                InputStreamReader inputStreamReader = new InputStreamReader(dis, charset);
                BufferedReader bf = new BufferedReader(inputStreamReader, DEFAULT_BUFFER_SIZE)
        ) {
            String line;
            while ((line = bf.readLine()) != null) {
                content.append(line).append("\n");
            }
            if (content.length() > 0) {
                content.deleteCharAt(content.length() - 1);
            }
        } catch (Exception e) {
            log.error("读{}时出错", srcPath.toString(), e);
            throw e;
        }
        return content.toString();
    }

    @HdfsOperate
    public void test() throws Exception {
        HdfsClient hdfsClient = THREAD_LOCAL.get();
        boolean exists = hdfsClient.exists(new Path("/test/"));
    }

}
