package com.woldier.filesystem.controller;

import com.woldier.base.exception.CommonError;
import com.woldier.base.exception.DoopfsException;
import com.woldier.base.model.R;
import com.woldier.filesystem.service.HdfsService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;

/**
 * @author woldier
 * @version 1.0
 * @description TODO
 * @date 2023/3/24 15:28
 **/
@RestController
@RequestMapping("/hdfs")
@Slf4j
@RequiredArgsConstructor
public class HdfsController {
    private final HdfsService hdfsService;


    @PutMapping("/upload/**")
    public R upload(
            @RequestPart("filedata") MultipartFile upload,
            HttpServletRequest httpServletRequest
    ) throws DoopfsException {
        String prefix = "/upload";
        String hadoopPath = getHadoopPath(httpServletRequest, prefix);
        File tempFile = null;
        try {
            tempFile = File.createTempFile("hadoop", ".temp");
        } catch (IOException e) {
            log.error("创建临时文件出错");
            DoopfsException.cast(CommonError.UNKOWN_ERROR);
        }
        try (FileOutputStream fileOutputStream = new FileOutputStream(tempFile)) {
            IOUtils.copy(upload.getInputStream(), fileOutputStream);
        } catch (Exception e) {
            log.error("文件拷贝出错");
            DoopfsException.cast(CommonError.UNKOWN_ERROR);
        }
        hdfsService.upload(tempFile.getPath(), hadoopPath);
        tempFile.delete();
        return R.success("上传成功");
    }

    private static String getHadoopPath(HttpServletRequest httpServletRequest, String prefix) {
        int i = httpServletRequest.getRequestURI().indexOf(prefix) + prefix.length();
        String hadoopPath = httpServletRequest.getRequestURI().substring(i);
        return hadoopPath;
    }

    @GetMapping("/download/**")
    public void download(HttpServletRequest request, HttpServletResponse response) throws DoopfsException {
        String hadoopPath = getHadoopPath(request, "download");
        try (
                FSDataInputStream hadoopFileStream = hdfsService.getHadoopFileStream(hadoopPath)
        ) {
            IOUtils.copy(hadoopFileStream,response.getOutputStream());
        } catch (Exception e) {
            log.error("文件下载出错");
            DoopfsException.cast(CommonError.UNKOWN_ERROR);
        }
    }

}
