package com.briup.pai.service.impl;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.CharsetUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.ZipUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.briup.pai.common.constant.DatasetConstant;
import com.briup.pai.common.enums.DatasetStatusEnum;
import com.briup.pai.common.enums.ResultCodeEnum;
import com.briup.pai.common.enums.UploadStatusEnum;
import com.briup.pai.common.exception.BriupAssert;
import com.briup.pai.common.exception.CustomException;
import com.briup.pai.convert.FileChunkConvert;
import com.briup.pai.convert.FileInfoConvert;
import com.briup.pai.entity.dto.UploadChunkDTO;
import com.briup.pai.entity.dto.UploadVerifyFileDTO;
import com.briup.pai.entity.po.*;
import com.briup.pai.entity.vo.UploadVerifyFileVO;
import com.briup.pai.service.*;
import org.springframework.aop.framework.AopContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.util.*;

@Service
@CacheConfig(cacheNames = DatasetConstant.DATASET_CACHE_PREFIX)
public class UploadServiceImpl implements IUploadService {
    @Autowired
    @Lazy
    private IDatasetService datasetService;
    @Autowired
    private IFileInfoService fileInfoService;
    @Autowired
    private IFileChunkService fileChunkService;
    @Autowired
    private IClassifyService classifyService;
    @Autowired
    private IEntityService entityService;

    @Autowired
    private FileInfoConvert fileInfoConvert;
    @Autowired
    private FileChunkConvert fileChunkConvert;

    @Value("${upload.nginx-file-path}")
    private String nginxFilePath;
    @Value("${upload.file-directory-name}")
    private String fileDirectoryName;
    @Value("${upload.chunk-directory-name}")
    private String chunkDirectoryName;

    @Override
    @Transactional
    @CacheEvict(key = "T(com.briup.pai.common.constant.CommonConstant).DETAIL_CACHE_PREFIX+':'+#datasetId")
    public void modifyDatasetStatus(Integer datasetId, Integer status) {
        // 参数校验
        Dataset dataset = BriupAssert.requireNotNull(
                datasetService,
                Dataset::getId,
                datasetId,
                ResultCodeEnum.DATA_NOT_EXIST
        );
        BriupAssert.requireIn(
                status,
                DatasetStatusEnum.statusList(),
                ResultCodeEnum.PARAM_IS_ERROR);
        // 你要改的状态和原状态要不一样
        BriupAssert.requireNotEqual(
                dataset.getDatasetStatus(),
                status,
                ResultCodeEnum.DATASET_STATUS_ERROR
        );
        // 更新
        dataset.setDatasetStatus(status);
        datasetService.updateById(dataset);
    }

    @Override
    @Transactional
    public UploadVerifyFileVO verifyFile(UploadVerifyFileDTO dto) {
        // 准备对象用于返回
        UploadVerifyFileVO uploadVerifyFileVO = new UploadVerifyFileVO();
        uploadVerifyFileVO.setUploaded(true);
        // 根据dto的hash查询看是否存在，如果存在就是上传过，如果不存在就是没有上传过
        LambdaQueryWrapper<FileInfo> lqw = new LambdaQueryWrapper<>();
        lqw.eq(FileInfo::getFileHash, dto.getFileHash());
        FileInfo fileInfo = fileInfoService.getOne(lqw);
        // 没有上传过
        if(ObjectUtil.isNull(fileInfo)){
            uploadVerifyFileVO.setUploaded(false);
            // 创建对应的文件夹
            String fileName = nginxFilePath+"/"+fileDirectoryName+"/"+dto.getFileHash();
            FileUtil.mkdir(fileName);
            // 将数据save到数据库里
            // 分片大小
            Long chunkSize = dto.getChunkSize();
            // 文件大小
            Long fileSize = dto.getFileSize();
            // 计算分片数量
            Long chunkNum = fileSize % chunkSize == 0 ? fileSize / chunkSize : fileSize / chunkSize + 1;
            FileInfo saveData = fileInfoConvert.uploadVerifyFileDTO2Po(dto);
            saveData.setFilePath(fileName+"/"+dto.getFileName());
            saveData.setChunkNum(chunkNum);
            saveData.setUploadStatus(UploadStatusEnum.INIT.getStatus());
            saveData.setChunkSize(chunkSize);
            fileInfoService.save(saveData);
            return uploadVerifyFileVO;
        }
        // 上传过但是没有上传完成
        if(ObjectUtil.notEqual(
                fileInfo.getUploadStatus(),
                UploadStatusEnum.UPLOADED.getStatus())){
            uploadVerifyFileVO.setUploaded(false);
            // 查询分片表中还没有上传完成的分片索引
            LambdaQueryWrapper<FileChunk> fclqw= new LambdaQueryWrapper<>();
            fclqw.eq(FileChunk::getFileHash, fileInfo.getFileHash());
            uploadVerifyFileVO.setUploadedChunks(
                    fileChunkService.list(fclqw).stream().map(FileChunk::getChunkIndex).toList());
            return uploadVerifyFileVO;
        }

        // 上传过，已完成（秒传）
        return uploadVerifyFileVO;
    }

    @Override
    @Transactional
    public void uploadChunk(UploadChunkDTO dto) {
        // 获取dto中的文件
        MultipartFile file = dto.getFile();
        // 提供分片的目录并保存文件
        String fileChunkPath = createFileChunkDirectory(dto.getFileHash()) + "/" + dto.getChunkIndex();
        try {
            file.transferTo(new File(fileChunkPath));
        } catch (IOException e) {
            throw new CustomException(ResultCodeEnum.FILE_UPLOAD_ERROR);
        }

        // 保存数据到分片数据库
        FileChunk fileChunk = fileChunkConvert.uploadChunkDTO2Po(dto);
        fileChunk.setChunkPath(fileChunkPath);
        fileChunk.setChunkSize(file.getSize());
        fileChunkService.save(fileChunk);
    }

    @Override
    @Transactional
    public void modifyUploadStatus(String fileHash, Integer uploadStatus) {
        // 文件必须存在
        FileInfo fileInfo = BriupAssert.requireNotNull(
                fileInfoService,
                FileInfo::getFileHash,
                fileHash,
                ResultCodeEnum.DATA_NOT_EXIST);
        // 状态必须合规
        BriupAssert.requireIn(
                uploadStatus,
                UploadStatusEnum.statusList(),
                ResultCodeEnum.PARAM_IS_ERROR);
        // 修改的状态不能和原状态一样
        BriupAssert.requireNotEqual(
                uploadStatus,
                fileInfo.getUploadStatus(),
                ResultCodeEnum.PARAM_IS_ERROR);
        // 更新
        fileInfo.setUploadStatus(uploadStatus);
        fileInfoService.updateById(fileInfo);
    }

    @Override
    @Transactional
    public void mergeChunks(String fileHash) {
        // 文件必须存在
        FileInfo fileInfo = BriupAssert.requireNotNull(
                fileInfoService,
                FileInfo::getFileHash,
                fileHash,
                ResultCodeEnum.DATA_NOT_EXIST);
        // 查询文件的所有分片
        LambdaQueryWrapper<FileChunk> lqw = new LambdaQueryWrapper<>();
        lqw.eq(FileChunk::getFileHash, fileHash)
                .orderByAsc(FileChunk::getChunkIndex);
        List<FileChunk> list = fileChunkService.list(lqw);
        // 合并分片 => 把分片里的文件内容写到一个大的文件中去
        String filePath = fileInfo.getFilePath();
        // 用什么流？
        try(
                BufferedOutputStream bos = new BufferedOutputStream(
                        new FileOutputStream(filePath))) {

            for (FileChunk fileChunk : list) {
                // 获取分片路径
                String chunkPath = fileChunk.getChunkPath();
                // 创建输入流
                try (BufferedInputStream bis = new BufferedInputStream(
                        new FileInputStream(chunkPath))) {
                    // 创建缓冲区
                    byte[] buffer = new byte[2 * 1024 * 1024]; // 缓冲区
                    int len = -1;
                    while ((len = bis.read(buffer)) != -1) {
                        bos.write(buffer, 0, len);
                    }
                }
            }
            bos.flush();
        }catch (Exception e){
            e.printStackTrace();
            // throw new CustomException(ResultCodeEnum.FILE_MERGE_ERROR);
        }

        // 数据库的数据更新
        // 更新文件状态
        fileInfo.setUploadStatus(UploadStatusEnum.UPLOADED.getStatus());
        fileInfoService.updateById(fileInfo);
        // 删除分片记录，删除分片文件
        fileChunkService.remove(lqw);
        FileUtil.del(createFileChunkDirectory(fileHash));
    }

    @Override
    @Transactional
    @CacheEvict(key = "T(com.briup.pai.common.constant.CommonConstant).DETAIL_CACHE_PREFIX+':'+#datasetId")
    public void unzipDataset(Integer datasetId, String fileHash) {
        // 数据集必须存在
        Dataset dataset = BriupAssert.requireNotNull(
                datasetService,
                Dataset::getId,
                datasetId,
                ResultCodeEnum.DATA_NOT_EXIST
        );
        // 数据集状态不能是未完成
        BriupAssert.requireNotEqual(
                dataset.getDatasetStatus(),
                DatasetStatusEnum.DONE.getStatus(),
                ResultCodeEnum.DATASET_STATUS_ERROR);
        // 文件必须存在
        FileInfo fileInfo = BriupAssert.requireNotNull(
                fileInfoService,
                FileInfo::getFileHash,
                fileHash,
                ResultCodeEnum.DATA_NOT_EXIST
        );
        // 文件状态必须是已完成
        BriupAssert.requireEqual(
                fileInfo.getUploadStatus(),
                UploadStatusEnum.UPLOADED.getStatus(),
                ResultCodeEnum.FILE_IS_NOT_UPLOADED
        );
        // 将文件从file中移动到html的dataSetId的目录中
        String sourceFilePath = fileInfo.getFilePath();
        String targetFilePath = nginxFilePath+"/"+datasetId;
        FileUtil.mkdir(targetFilePath);
        // 解压指定编码为GBK，否则windows下会报错
        ZipUtil.unzip(sourceFilePath, targetFilePath);
        // 存放分类和实体的信息
        // 从数据集文件夹下拿到的目录就是所有的分类的文件夹
        File[] classifyDirs = new File(targetFilePath).listFiles(File::isDirectory);
        // 判断是否为空，不为空的话可以遍历
        if(ObjectUtil.isNotNull(classifyDirs)){
            for (File classifyDir : classifyDirs) {
                // 准备分类的po进行数据保存
                Classify classify = new Classify();
                classify.setDatasetId(datasetId);
                classify.setClassifyName(classifyDir.getName());
                classifyService.save(classify);
                // 在分类的文件夹下找实体文件
                File[] entityFiles = classifyDir.listFiles(File::isFile);
                if(ObjectUtil.isNotNull(entityFiles)){
                    // 因为两个方法都有事务
                    UploadServiceImpl uploadService = (UploadServiceImpl) AopContext.currentProxy();
                    uploadService.saveEntityList(entityFiles, classify);
                }
            }
        }
        // 修改数据集的状态
        dataset.setDatasetStatus(DatasetStatusEnum.DONE.getStatus());
        datasetService.updateById(dataset);
    }

    @Override
    @Transactional
    @CacheEvict(key = "T(com.briup.pai.common.constant.CommonConstant).DETAIL_CACHE_PREFIX+':'+#datasetId")
    public void unzipClassify(Integer datasetId, Integer classifyId, String fileHash) {
        // 分类必须存在
        Classify classify = BriupAssert.requireNotNull(
                classifyService,
                Classify::getId,
                classifyId,
                ResultCodeEnum.DATA_NOT_EXIST);
        // 数据集必须存在（分类所属的数据集必须存在）
        Dataset dataset = BriupAssert.requireNotNull(
                datasetService,
                Dataset::getId,
                classify.getDatasetId(),
                ResultCodeEnum.DATA_NOT_EXIST);
        // 分类的数据集id和数据集id保持一致
        BriupAssert.requireEqual(
                dataset.getId(),
                datasetId,
                ResultCodeEnum.PARAM_IS_ERROR);
        // 文件必须存在
        FileInfo fileInfo = BriupAssert.requireNotNull(
                fileInfoService,
                FileInfo::getFileHash,
                fileHash,
                ResultCodeEnum.DATA_NOT_EXIST
        );
        // 文件的状态必须是已完成
        BriupAssert.requireEqual(
                fileInfo.getUploadStatus(),
                UploadStatusEnum.UPLOADED.getStatus(),
                ResultCodeEnum.FILE_IS_NOT_UPLOADED);

        // 解压
        // 1、原来分类中有的实体提前保存一下， 后续可以过滤，节省时间
        String sourceFilePath = fileInfo.getFilePath();
        String targetFilePath = nginxFilePath+"/"+datasetId+"/"+classify.getClassifyName();
        // 获取下面已经存在的实体的文件名字
        File file = new File(targetFilePath);
        File[] files = file.listFiles(File::isFile);
        List<String> oldEntityNames = new ArrayList<>();
        if(ObjectUtil.isNotNull(files)) {
            oldEntityNames = Arrays.stream(files).map(File::getName).toList();
        }
        ZipUtil.unzip(sourceFilePath, targetFilePath, CharsetUtil.CHARSET_GBK);
        // 2、新的图片单独存入
        List<String> finalEntityNames = oldEntityNames;
        File[] newEntityFiles = Objects.requireNonNull(
                file.listFiles(f -> f.isFile() && !finalEntityNames.contains(f.getName())));
        if(ObjectUtil.isNotNull(newEntityFiles)){
            ((UploadServiceImpl) AopContext.currentProxy()).saveEntityList(newEntityFiles, classify);
        }
    }

    // ==== 以下为封装的私有方法====

    /**
     * 创建分片保存的目录
     * @param fileHash
     * @return
     */
    @Transactional
    public String createFileChunkDirectory(String fileHash) {
        String chunkPath = nginxFilePath+"/"+chunkDirectoryName+"/"+fileHash;
        FileUtil.mkdir(chunkPath);
        return chunkPath;
    }

    /**
     * 批处理保存实体
     * @param entityFiles
     * @param classify
     */
    @Transactional
    public void saveEntityList(File[] entityFiles, Classify classify){
        // 传入file类型，得到entity的list
        List<Entity> entityList = Arrays.stream(entityFiles).map(
                entityFile -> {
                    // 保证图片名字不重复
                    String fileName = entityFile.getName();
                    String newFileName = "";
                    // 去除掉错误图片
                    if(fileName.contains("error")){
                        newFileName = fileName;
                    }else{
                        // 使用uuid给文件重命名
                        newFileName = UUID.randomUUID() + fileName.substring(fileName.lastIndexOf("."));
                    }
                    // 重命名
                    System.out.println("+++++++++++++++"+entityFile.getName());
                    FileUtil.rename(entityFile, newFileName, true);
                    // 转换成entity对象返回
                    Entity entity = new Entity();
                    entity.setEntityUrl(newFileName);
                    entity.setClassifyId(classify.getId());
                    return entity;
                }
        ).toList();
        entityService.saveBatch(entityList);
    }
}