package pres.niufen.hdfsbase;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.util.Progressable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.net.URI;

/**
 * @Description 通过 FileSystem 操作 HDFS 文件
 * @Author haijun.zhang@luckincoffee.com
 * @Date 2018-09-28 18:37
 **/
public class HdfsAPI {
    /**
     * 日志
     */
    private static final Logger LOGGER = LoggerFactory.getLogger(HdfsAPI.class);

    /**
     * 通过 FileSystem API 从 HDFS 一个文件中读取数据
     * @param url 在HDFS 中的一个文件路径
     * @return 文件中的字符串
     * @throws IOException
     */
    public static String getStrForURL(String url) throws IOException {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(url), conf);
        InputStream in = fs.open(new Path(url));
        return IOUtils.toString(in,"UTF-8");
    }

    /**
     * 通过 FileSystem API 从 HDFS 一个文件中读取数据，并通过 seek 方法截取
     * @param url 在HDFS 中的一个文件路径
     * @param seek 截取位置
     * @return 文件中的字符串
     * @throws IOException
     */
    public static String seekStrForURL(String url,long seek) throws IOException {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(url), conf);
        FSDataInputStream in = fs.open(new Path(url));
        in.seek(seek);
        return IOUtils.toString(in,"UTF-8");
    }

    /**
     * 通过 FileSystem API 从 HDFS 一个文件中读取数据，并通过 read 方法截取
     * @param url 在HDFS 中的一个文件路径
     * @param position 截取位置
     * @return 文件中的字符串
     * @throws IOException
     */
    public static String readStrForURL(String url,long position,int offset,int length) throws IOException {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(url), conf);
        FSDataInputStream in = fs.open(new Path(url));
        byte [] buffer = new byte [(int) (position+length+offset)];
        in.read(position,buffer,offset,length);
        return IOUtils.toString(buffer,"UTF-8");
    }

    /**
     * 将本地文件复制到 Hadoop 文件系统
     * @param hdfsFilePath
     * @param localFilePath
     * @throws IOException
     */
    public static void fileCopyWithProgress(String hdfsFilePath,String localFilePath) throws IOException {
        InputStream inputStream = new BufferedInputStream(new FileInputStream(localFilePath));
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(URI.create(hdfsFilePath),configuration);
        OutputStream outputStream = fileSystem.create(new Path(hdfsFilePath), new Progressable() {
            @Override
            public void progress() {
                LOGGER.debug(".");
            }
        });
        org.apache.hadoop.io.IOUtils.copyBytes(inputStream,outputStream,4096,true);
        LOGGER.info("upload success:{}",hdfsFilePath);
    }

    /**
     * 通过 FileSystem.mkdirs 在 HDFS 创建一个目录
     * @param hdfsDirPath
     * @throws IOException
     */
    public static void mkdirs(String hdfsDirPath) throws IOException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(URI.create(hdfsDirPath),configuration);
        if(!fileSystem.mkdirs(new Path(hdfsDirPath))){
            LOGGER.error("目录创建失败:{}",hdfsDirPath);
        }
    }

    /**
     * 通过 FileSystem.delete 在 HDFS 递归删除文件或目录
     * @param hdfsDirPath
     * @throws IOException
     */
    public static void delete(String hdfsDirPath) throws IOException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(URI.create(hdfsDirPath),configuration);
        fileSystem.delete(new Path(hdfsDirPath),true);
    }

    /**
     * 通过 FileSystem.getFileStatus 返回指定文件或路径的文件状态对象
     * @param hdfsDirPath
     * @return
     * @throws IOException
     */
    public static FileStatus getFileStatus(String hdfsDirPath) throws IOException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(URI.create(hdfsDirPath),configuration);
        FileStatus fileStatus  = fileSystem.getFileStatus(new Path(hdfsDirPath));
        LOGGER.debug(fileStatus.toString());
        return fileStatus;
    }

    /**
     * 显示 Hadoop 文件系统中一组路径的文件信息。
     * @param hdfsDirPaths
     * @return
     * @throws IOException
     */
    public static void listStatus(String [] hdfsDirPaths) throws IOException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(URI.create(hdfsDirPaths[0]),configuration);

        Path[] paths = new Path[hdfsDirPaths.length];
        for (int i = 0; i < paths.length; i++) {
            paths[i] = new Path(hdfsDirPaths[i]);
        }

        FileStatus[] fileStatuses = fileSystem.listStatus(paths);
        Path[] listedPaths = FileUtil.stat2Paths(fileStatuses);
        for (Path listedPath : listedPaths) {
            LOGGER.info(listedPath+"");
        }
    }

}
