package edu.zju.gis.dbfg.model.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.sql.Date;

public class HDFSHelper implements Serializable, Closeable {
    private FileSystem dfs;

    /**
     * @param conf Hadoop配置，一般把$HADOOP_HOME/etc/hadoop/core-site.xml
     *             放在classpath下，通过{@link Configuration#Configuration()}生成
     */
    private HDFSHelper(Configuration conf) {
        try {
            conf.set("dfs.support.append", "true");
            conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
            conf.set("dfs.client.block.write.replace-datanode-on-failure.enable", "true");
            this.dfs = FileSystem.get(conf);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    public static HDFSHelper getInstance() {
        return HDFSHelper.InstanceHolder.instance;
    }

    /**
     * 判断路径是否存在
     *
     * @return true - 路径已存在
     */
    public boolean exists(Path path) throws IOException {
        return dfs.exists(path);
    }

    public boolean exists(String path) throws IOException {
        return this.exists(new Path(path));
    }

    /**
     * 判断路径是否是文件
     */
    public boolean isFile(Path path) throws IOException {
        return dfs.isFile(path);
    }

    /**
     * 列出所有子路径，输入路径为文件时，直接返回该路径
     */
    public Path[] listFiles(Path path) throws IOException {
        return listFiles(path, null);
    }

    /**
     * 列出所有子路径，输入路径为文件时，直接返回该路径
     *
     * @param filter 路径过滤器，满足条件的路径被返回
     */
    public Path[] listFiles(Path path, PathFilter filter) throws IOException {
        FileStatus[] fileStatuses = filter == null ? dfs.listStatus(path) : dfs.listStatus(path, filter);
        Path[] paths = new Path[fileStatuses.length];
        for (int i = 0; i < fileStatuses.length; i++)
            paths[i] = fileStatuses[i].getPath();
        return paths;
    }

    /**
     * 新建目录
     *
     * @return true - 创建成功
     */
    public boolean mkdirs(Path dir) throws IOException {
        return dfs.mkdirs(dir);
    }

    /**
     * 新建空文件
     *
     * @return true - 创建成功，false - 文件已存在或创建失败
     */
    public boolean createFile(Path file) throws IOException {
        return dfs.createNewFile(file);
    }

    /**
     * @param path 待删除路径,当路径为目录时，请调用{@link #deletePath(Path, boolean)}，并将recursive设置为true
     * @return true - 删除成功
     */
    public boolean deletePath(Path path) throws IOException {
        return dfs.delete(path, false);
    }

    /**
     * @param path      待删除路径
     * @param recursive 当路径为目录时，设置为true可完全删除该目录，否则删除失败；当路径为文件时，该参数不影响删除的执行
     * @return true - 删除成功
     */
    public boolean deletePath(Path path, boolean recursive) throws IOException {
        return dfs.delete(path, recursive);
    }

    public FSDataInputStream read(Path path) throws IOException {
        return dfs.open(path);
    }

    /**
     * 写文件到HDFS
     *
     * @param path      目标文件
     * @param overwrite 是否覆盖已有文件
     */
    public FSDataOutputStream write(Path path, boolean overwrite) throws IOException {
        return dfs.create(path, overwrite);
    }

    /**
     * 追加内容到HDFS文件
     *
     * @param path 目标文件
     */
    public FSDataOutputStream append(Path path) throws IOException {
        return dfs.append(path);
    }

    /***
     * 上传文件
     * @param src 待上传文件
     * @param tar hdfs存储目录
     * @return 是否成功
     */
    public boolean copyFromLocalFile(Path src, Path tar) {
        try {
            dfs.copyFromLocalFile(src, tar);
            return true;
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
    }

    /***
     * 下载文件
     * @param src hdfs存储目录
     * @param tar 下载位置
     * @return 是否成功
     */
    public boolean copyToLocalFile(Path src, Path tar) {
        try {
            dfs.copyToLocalFile(src, tar);
            return true;
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
    }

    /***
     * 获取Home目录
     * @return
     */
    public String getHomeDirectory() {
        return dfs.getHomeDirectory().toString();
    }

    @Override
    public void close() throws IOException {
        dfs.close();
    }

    /***
     * get Info[type, date, length]
     * @param src
     * @return
     * @throws IOException
     */
    public String[] fileInfo(Path src) throws IOException {
        FileStatus info = dfs.getFileStatus(src);
        String[] result = new String[3];
        result[0] = info.isDirectory() ? "Directory" : "File";
        result[1] = new Date(info.getModificationTime()).toString();
        result[2] = info.isDirectory() ? "..." : String.valueOf(info.getLen());
        return result;
    }

    public void renameMv(Path src, Path tar) throws IOException {
        dfs.rename(src, tar);
    }

    public boolean upload(FSDataOutputStream outputStream, InputStream inputStream) throws IOException {
        try {
            byte[] buffer = new byte[256];
            int bytesRead = 0;
            while ((bytesRead = inputStream.read(buffer)) > 0) {
                outputStream.write(buffer, 0, bytesRead);
            }
            inputStream.close();
            outputStream.close();
            return true;
        } catch (Exception e) {
            return false;
        }
    }

    public boolean download(String filename, InputStream fis, HttpServletResponse response) {
        try {
            OutputStream fos = response.getOutputStream();
            byte[] buffer = new byte[1024 * 1024 * 4];
            response.setContentType("application/octet-stream");
            //防止中文名乱码
            response.addHeader("Content-Disposition", "attachment;filename=" + new String(filename.getBytes(), "ISO8859-1"));
            response.addHeader("Content-Length", String.valueOf(fis.available()));
            int length;
            while ((length = fis.read(buffer)) != -1)
                fos.write(buffer, 0, length);
            fis.close();
            fos.close();
            return true;
        } catch (IOException e) {
            response.setStatus(500);
            return false;
        }
    }

    public String readFromhdfsAsString(Path hdfsPath, int size) {
        StringBuilder output = new StringBuilder();
        try {
            HDFSHelper hdfsHelper = HDFSHelper.getInstance();
            FSDataInputStream fsinput = hdfsHelper.read(hdfsPath);
            byte[] bs = new byte[1024 * 1024];
            int len = 0;
            int round = 0;
            //限制读取大小10M
            while ((len = fsinput.read(bs)) != -1 & round < size) {
                output.append(new String(bs, 0, len));
                round++;
            }
            output.append(round == 10 ? "\r\n..." : "\r\nEND");
        } catch (IOException e) {
            output.append("读取失败。");
        }
        return output.toString();
    }

    private static class InstanceHolder {
        static HDFSHelper instance = new HDFSHelper(new Configuration());
    }

}