package com.edu.springboot_disk_erban.handler;


import com.edu.springboot_disk_erban.dao.FileDao;
import com.edu.springboot_disk_erban.entity.FileBean;
import com.edu.springboot_disk_erban.utils.FileUtils;
import com.edu.springboot_disk_erban.utils.HDFSConnectUtils;
import com.edu.springboot_disk_erban.utils.TimeUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import java.io.IOException;
import java.io.InputStream;

@Component
public class HDFSHandler {

    @Resource
    HDFSConnectUtils hdfs;

    @Resource
    FileDao fileDao;

    //创建文件夹
    public boolean addDir(String hadoopPath) throws IOException {
        FileSystem fs = hdfs.getFileSystem();

        return fs.mkdirs(new Path(hadoopPath));
    }

    public int upload(MultipartFile file, String filepath) throws IOException {
        // 可以抓取到文件，且可以获取文件名字
        FileSystem fs = hdfs.getFileSystem();
        // 用户的文件路径是前端传过的来的 filepath
        //获取file的文件名
        String name = file.getOriginalFilename();
        String path = filepath + "/" + name;
        int num;

        //缺文件大小
        long size = file.getSize();

        // 获取文件的输入流
        InputStream is = file.getInputStream();

        FSDataOutputStream os = fs.create(new Path(path));

        // 实现文件的上传
        IOUtils.copyBytes(is, os, hdfs.getConf(), true);
        FileBean fileBean = new FileBean(name, path, FileUtils.FileSizeUnitConvert(size), TimeUtils.getNowTime(), 0);

        FileBean byFilePath = fileDao.findByFilePath(path);
        System.out.println(path);
        System.out.println(byFilePath);
        if (byFilePath == null) {
            // 实现对数据的操作
            fileDao.addFileBean(fileBean);
            num = 1;
        } else {
            num = 0;
        }


        return num;

    }

    public void alterFile(String oldFilePath, String newFilePath){
        FileSystem fs = hdfs.getFileSystem();
        //在HDFS里面的修改
        try {
            fs.rename(new Path(oldFilePath),new Path(newFilePath));
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        //操作MySQL
        FileBean byFilePath = fileDao.findByFilePath(oldFilePath);
        byFilePath.setFileName(newFilePath.substring(newFilePath.lastIndexOf("/")+1));
        byFilePath.setFilePath(newFilePath);
        byFilePath.setFileTime(TimeUtils.getNowTime());
        fileDao.update(byFilePath);
    }

    public void deleteRubbishFiles(String hadoopPath) {
        FileSystem fs = hdfs.getFileSystem();

        try {
            fs.delete(new Path(hadoopPath),true);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

}
