package com.dong.minio.controller;

import com.alibaba.fastjson.JSONObject;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.*;
import com.dong.minio.config.FileChunkInfo;
import com.dong.minio.config.OssProperties;
import com.dong.minio.result.BigFileResp;
import com.dong.minio.result.ResultEntity;
import com.dong.minio.service.UploadService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.*;

/**
 * 文件上传
 */
@RestController
@RequestMapping("file")
@RequiredArgsConstructor
@Slf4j
public class UploadController {
 
    private final AmazonS3 amazonS3;
 
    private final OssProperties ossProperties;
 
    private final RedisTemplate<String, String> redisTemplate;

    @Autowired
    UploadService uploadService;

    @RequestMapping("/upload")
    public String  upload1(){
        return uploadService.upload1();
    }
 
    /**
     * 初始化分片上传
     *
     * @param fileInfo
     * @return
     */
    @PostMapping("/init")
    public ResultEntity<BigFileResp> uploadInit(FileChunkInfo fileInfo) {
        // 自己生成文件路径
        String path = getPath(fileInfo);
 
        // 初始化
        InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(ossProperties.getBucket(), path);
        InitiateMultipartUploadResult multipartUploadResult = amazonS3.initiateMultipartUpload(initRequest);
        String uploadId = multipartUploadResult.getUploadId();
        BigFileResp response = new BigFileResp();
        response.setFileUploadId(uploadId);
        response.setFileUploadPath(path);
        return ResultEntity.success(response);
    }
 
    /**
     * 上传分片
     *
     * @param fileInfo
     * @return
     * @throws Exception
     */
    @PostMapping("/part")
    public ResultEntity<BigFileResp> uploadPart(FileChunkInfo fileInfo) throws Exception {
        UploadPartRequest request = new UploadPartRequest()
                .withBucketName(ossProperties.getBucket())
                .withKey(fileInfo.getFileUploadPath())
                .withUploadId(fileInfo.getFileUploadId())
                .withPartNumber(fileInfo.getCurrentChunkNumber())
                .withInputStream(fileInfo.getFile().getInputStream())
                .withPartSize(fileInfo.getChunkSize());
 
        // 上传切片
        UploadPartResult uploadPartResult = amazonS3.uploadPart(request);
        PartETag partETag = uploadPartResult.getPartETag();
        String fileUploadId = fileInfo.getFileUploadId();
        String etagString = JSONObject.toJSONString(partETag);
 
        // 将已上传文件分片信息存入redis
        redisTemplate.opsForHash().put(fileUploadId, fileInfo.getCurrentChunkNumber()+"", etagString);
 
        BigFileResp response = new BigFileResp();
        response.setFileUploadId(fileInfo.getFileUploadId());
        response.setPartETag(partETag);
        return ResultEntity.success(response);
    }
 
 
    /**
     * 文件合并
     *
     * @param fileInfo
     * @return
     */
    @PostMapping("/merge")
    public ResultEntity<BigFileResp> merge(FileChunkInfo fileInfo) {
        // 获取上传的etag集合
        Map<Object, Object> map = redisTemplate.opsForHash().entries(fileInfo.getFileUploadId());
        List<PartETag> etagList = new ArrayList<>();
        for (Map.Entry<Object, Object> entry : map.entrySet()) {
            String value = (String) entry.getValue();
            PartETag partETag = JSONObject.parseObject(value, PartETag.class);
            etagList.add(partETag);
        }
 
        // 合并分片
        CompleteMultipartUploadRequest request = new CompleteMultipartUploadRequest(
                ossProperties.getBucket(),
                fileInfo.getFileUploadPath(),
                fileInfo.getFileUploadId(),
                etagList);
        CompleteMultipartUploadResult completeMultipartUploadResult = amazonS3.completeMultipartUpload(request);
        log.info("merge result:{}",completeMultipartUploadResult);

        // 删除缓存
        redisTemplate.delete(fileInfo.getFileUploadId());
 
        StringBuilder url = new StringBuilder("");
        url.append("https://").append("static-test.ebonex.io/pdfList").append("/").append(fileInfo.getFileUploadPath());
        BigFileResp response = new BigFileResp();
        response.setUrl(url.toString());
        return ResultEntity.success(response);
    }
 
    private String getPath(FileChunkInfo fileInfo) {
        String uuid = UUID.randomUUID().toString().replace("-","");
        String suffix = fileInfo.getOriginalFileName();
        Date now = new Date();
        String format = DateFormatUtils.format(now, "yyyy-MM-dd");
        String path = format + "-" + uuid + "-" + suffix;
        return path ;
    }
}