package com.wentao.thisService.common.upload;

import com.wentao.thisService.common.CommonFunction;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Paths;
import java.util.Objects;

@Service("FileExecutor")
public class FileExecutor {
    @Value("${upload.dir.path}")
    private String dir_path;

    @Value("${upload.hadoop.path}")
    private String hadoop_path;

    @Value("${upload.hadoop.uri}")
    private String hadoop_uri;

    @Autowired
    private AliyunOSSUtil aliyunOSSUtil;

    private FileSystem fs = null;

    private FileSystem createFs() throws Exception {
        if (null == fs) {
            fs = FileSystem.get(new URI(hadoop_uri), new Configuration());
        }
        return fs;
    }

    private String getFileName() {
        return String.format("%s.fs", CommonFunction.CreateUUIDString());
    }

    // 获取随机uuid作为文件名
    private String getUUIDFileName() {
        return CommonFunction.CreateUUIDString();
    }

    // 获取文件后缀名
    public String getFileSuffixName(MultipartFile file) {
        String suffixName = "fs";
        if (!file.isEmpty()) {
            //对文文件的全名进行截取然后在后缀名进行删选。
            int begin = Objects.requireNonNull(file.getOriginalFilename()).lastIndexOf(".");
            int last = file.getOriginalFilename().length();
            //获得文件后缀名
            suffixName = file.getOriginalFilename().substring(begin, last);
        }
        return suffixName;
    }

    public UploadFileInfo uploadHdfs(MultipartFile file, String type) throws Exception {
        String file_name = getFileName();
        FileSystem fs = createFs();
        java.nio.file.Path file_path = Paths.get(hadoop_path, type, file_name);
        Path dst = new Path(file_path.toString());
        FSDataOutputStream outputStream = fs.create(dst, true);
        IOUtils.copy(file.getInputStream(), outputStream);
        UploadFileInfo fileInfo = new UploadFileInfo();
        fileInfo.setFilePath(file_path.toString());
        fileInfo.setRelativePath(Paths.get(type, file_name).toString());
        return fileInfo;
    }

    public UploadFileInfo upladFile(MultipartFile file, String type) throws Exception {
        UploadFileInfo file_info = new UploadFileInfo();
        String suffixName = getFileSuffixName(file);
        String beginName = getUUIDFileName();
        String file_name = beginName + suffixName;
        java.nio.file.Path file_path = Paths.get(dir_path, type, file_name);
//        file.transferTo(file_path);
        FileUtils.copyInputStreamToFile(file.getInputStream(), new File(String.valueOf(file_path)));
        file_info.setFilePath(file_path.toString());
        file_info.setRelativePath(Paths.get(type, file_name).toString());
        return file_info;
    }

    public InputStream readHdfs(String file_name) throws Exception {
        FileSystem fs = createFs();
        Path path = new Path(file_name);
        if (fs.exists(path)) {
            return fs.open(path);
        }
        return null;
    }

    public InputStream readFile(String file_name) throws Exception {
        File fs = new File(file_name);
        if (fs.exists()) {
            return new FileInputStream(fs);
        }
        return null;
    }

    public UploadFileInfo uploadOSS(MultipartFile file) throws Exception {
        String path = aliyunOSSUtil.upload(file);
        UploadFileInfo file_info = new UploadFileInfo();
        file_info.setFilePath(path);
        file_info.setRelativePath(path);
        return file_info;
    }
}
