package com.button.springboothdfs.service.impl;

import bio.nvwa.boot.hdfs.HdfsClient;
import bio.nvwa.boot.hdfs.HdfsService;
import com.button.springboothdfs.service.HdfsFileService;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;

/**
 * @Description
 * @Author Administrator
 * @Date 2023/1/30 13:53
 **/
@Service
public class HdfsFileServiceImpl implements HdfsFileService {

    @Autowired
    private HdfsService hdfsService;

    /**
     * 文件上传
     * @Description 文件上传
     * @Param sourcePath
     * @Param destPath
     * @Return void
     * @Author Administrator
     * @Date 2023/1/30 13:55
     **/
    @Override
    public void uploadFile(Path sourcePath, Path destPath) throws Exception {
        // 也可用 hdfsService.open()
        HdfsClient hdfsClient = hdfsService.getHdfsClient();
        hdfsClient.copyFromLocalFile(sourcePath, destPath);
        hdfsService.returnHdfsClient(hdfsClient);
    }

    /**
     * 下载文件
     * @Description 下载文件
     * @Param filePath
     * @Param downloadPath
     * @Return void
     * @Author Administrator
     * @Date 2023/1/30 14:27
     **/
    @Override
    public void download(Path filePath, String downloadPath) throws IOException {
        FileOutputStream fos = null;
        try {
            // 也可用 hdfsService.open()
            HdfsClient hdfsClient = hdfsService.getHdfsClient();
            fos = new FileOutputStream(new File(downloadPath));
            FSDataInputStream open = hdfsClient.open(filePath);
            byte[] resBytes = IOUtils.readFullyToByteArray(open);
            fos.write(resBytes);
            hdfsService.returnHdfsClient(hdfsClient);
        } catch (Exception e) {

        } finally {
            if(null != fos) {
                fos.close();
            }
        }
    }

    /**
     * 检测文件是否存在
     * @Description 检测文件是否存在
     * @Param filePath
     * @Return java.lang.Boolean
     * @Author Administrator
     * @Date 2023/1/30 14:54
     **/
    @Override
    public Boolean isExist(Path filePath) throws Exception {
        return hdfsService.exists(filePath);
    }

    /**
     * 重命名文件
     * @Description 重命名文件
     * @Param sourcePath
     * @Param destPath
     * @Return java.lang.Boolean
     * @Author Administrator
     * @Date 2023/1/30 15:01
     **/
    @Override
    public Boolean rename(Path sourcePath, Path destPath) throws Exception {
        return hdfsService.rename(sourcePath, destPath);
    }

    /**
     * 删除文件
     * @Description  删除文件
     * @Param path
     * @Return java.lang.Boolean
     * @Author Administrator
     * @Date 2023/1/30 15:12
     **/
    @Override
    public Boolean delete(Path path) throws Exception {
        // 如果path是一个空目录或者文件，那么recursive的值就会被忽略。只有recursive＝true时，一个非空目录及其内容才会被删除。
        return hdfsService.delete(path, true);
    }

    /**
     * 获取文件大小
     *
     * @param path
     * @Description
     * @Param path
     * @Return java.lang.Long
     * @Author Administrator
     * @Date 2023/1/30 15:19
     */
    @Override
    public Long getFileLength(String path) throws Exception {
        Path hdfsPath = new Path(path);
        return hdfsService.getLen(hdfsPath);
    }
}
