/*
 * Copyright (c) 2025 Industrial Software Feature Database
 */
package com.comac.ins.isfd.service.impl;

import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.comac.ins.common.core.exception.base.BaseException;
import com.comac.ins.common.core.utils.MapstructUtils;
import com.comac.ins.common.core.utils.StringUtils;
import com.comac.ins.common.json.utils.JsonUtils;
import com.comac.ins.common.redis.utils.RedisUtils;
import com.comac.ins.isfd.constant.IsfdMeshModelConstants;
import com.comac.ins.isfd.constant.IsfdMeshOp2Constants;
import com.comac.ins.isfd.constant.enums.IsfdImportMethodEnum;
import com.comac.ins.isfd.constant.enums.IsfdLargeFileStatusEnum;
import com.comac.ins.isfd.constant.enums.IsfdLargeFileTypeEnum;
import com.comac.ins.isfd.constant.enums.IsfdMeshOp2BoStatusEnum;
import com.comac.ins.isfd.domain.IsfdLargeFile;
import com.comac.ins.isfd.domain.IsfdMeshOp2Collect;
import com.comac.ins.isfd.domain.bo.IsfdLargeFileBo;
import com.comac.ins.isfd.domain.bo.IsfdMeshOp2Bo;
import com.comac.ins.isfd.domain.chunks.ChunksCheckResultVo;
import com.comac.ins.isfd.domain.chunks.ChunksRedisInfoBo;
import com.comac.ins.isfd.domain.chunks.ChunksUploadResultVo;
import com.comac.ins.isfd.domain.vo.IsfdLargeFileVo;
import com.comac.ins.isfd.mapper.IsfdLargeFileMapper;
import com.comac.ins.isfd.mapper.IsfdMeshOp2CollectMapper;
import com.comac.ins.isfd.service.IIsfdDatabaseDataManagementService;
import com.comac.ins.isfd.service.IIsfdLargeFileService;
import com.comac.ins.isfd.service.IIsfdMeshOP2ParseService;
import com.comac.ins.system.service.ISysDictDataService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;

/**
 * 要素数据大文件Service业务层处理
 *
 * @author hxloongs
 * @date 2025-03-25
 */
@RequiredArgsConstructor
@Service
@Slf4j
public class IsfdLargeFileServiceImpl implements IIsfdLargeFileService {

    @Autowired
    private IsfdLargeFileMapper baseMapper;
    @Autowired
    private IIsfdDatabaseDataManagementService isfdDatabaseDataManagementService;
    @Autowired
    private IsfdMeshOp2CollectMapper isfdMeshOp2CollectMapper;

    @Autowired
    private ISysDictDataService iSysDictDataService;

    @Autowired
    private IIsfdMeshOP2ParseService isfdMeshOP2ParseService;

    @Autowired
    @Qualifier("op2ParseExecutor")
    private ExecutorService op2ParseExecutor;

    @Override
    public IsfdLargeFileVo queryById(String uploadId) {
        return baseMapper.selectVoById(uploadId);
    }

    @Override
    public void save(IsfdLargeFileBo isfdLargeFileBo) {
        IsfdLargeFile isfdLargeFile = this.queryByByFileHash(isfdLargeFileBo.getFileHash());
        if (isfdLargeFile != null) {
            return;
        }
        // 上传时间
        long uploadTime = System.currentTimeMillis();
        isfdLargeFileBo.setUploadTime(uploadTime);
        baseMapper.insert(MapstructUtils.convert(isfdLargeFileBo, IsfdLargeFile.class));
    }


    @Override
    public List<IsfdLargeFileVo> queryList(IsfdLargeFileBo bo) {
        LambdaQueryWrapper<IsfdLargeFile> lqw = buildQueryWrapper(bo);
        return baseMapper.selectVoList(lqw);
    }

    @Override
    public IsfdLargeFile queryByByFileHash(String fileHash) {
        return baseMapper.selectOne(new LambdaQueryWrapper<IsfdLargeFile>().eq(IsfdLargeFile::getFileHash, fileHash));
    }


    @Override
    @Transactional
    public ChunksUploadResultVo chunksUploadInit(IsfdLargeFileBo isfdLargeFileBo) {
        String fileHash = isfdLargeFileBo.getFileHash();
        String redisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_REDIS_PREFIX + fileHash;
        IsfdLargeFile isfdLargeFile = this.queryByByFileHash(fileHash);
        String uploadId = null;
        if (isfdLargeFile == null) {
            uploadId = UUID.randomUUID().toString();
            isfdLargeFileBo.setStatus(IsfdLargeFileStatusEnum.INCOMPLETE.getValue());
            isfdLargeFileBo.setUploadId(uploadId);
            this.save(isfdLargeFileBo);
        } else {
            uploadId = isfdLargeFile.getUploadId();
            //如果分片大小发发生变化 ，清空 redis 和 分片目录数据
            if (isfdLargeFile.getChunkSize().equals(isfdLargeFileBo.getChunkSize())) {
                try {
                    RedisUtils.deleteObject(redisKey);
                    FileUtils.deleteDirectory(new File(IsfdMeshOp2Constants.UPLOAD_CHUNKS_PREFIX + System.lineSeparator() + fileHash + System.lineSeparator() + IsfdMeshOp2Constants.UPLOAD_CHUNKS_SUFFIX));
                    isfdLargeFile.setChunkSize(isfdLargeFileBo.getChunkSize());
                    baseMapper.updateById(isfdLargeFile);
                } catch (Exception e) {
                    log.error("分片大小发生变化，清空分片记录失败！", e);
                    throw new BaseException("分片大小发生变化，清空分片记录失败！");
                }
            }
        }

        // 计算总分片数
        int totalChunks = (int) Math.ceil((double) isfdLargeFileBo.getFileSize() / isfdLargeFileBo.getChunkSize());

        // 检查Redis中已存在的分片
        ChunksRedisInfoBo chunksRedisInfoBo = RedisUtils.getCacheObject(redisKey);
        if (chunksRedisInfoBo == null) {
            chunksRedisInfoBo = new ChunksRedisInfoBo();
            chunksRedisInfoBo.setChunkSize(isfdLargeFileBo.getChunkSize());
            chunksRedisInfoBo.setTotalChunks(totalChunks);
            RedisUtils.setCacheObject(redisKey, chunksRedisInfoBo);
        }

        ChunksUploadResultVo resultVo = new ChunksUploadResultVo();
        String trunksRedisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_SET_REDIS_PREFIX + fileHash;
        Set<Integer> trunks = RedisUtils.getCacheSet(trunksRedisKey);
        resultVo.setUploadId(uploadId);
        resultVo.setExistChunks(new ArrayList<>(trunks));
        resultVo.setTotalChunks(totalChunks);
        resultVo.setFileHash(fileHash);
        return resultVo;
    }

    @Override
    @Transactional
    public ChunksUploadResultVo chunksUpload(MultipartFile file, String uploadId, String fileHash, Integer chunkIndex, Integer chunkSize) throws IOException {
        ChunksUploadResultVo resultVo = new ChunksUploadResultVo();
        String redisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_REDIS_PREFIX + fileHash;
        ChunksRedisInfoBo chunksRedisInfoBo = RedisUtils.getCacheObject(redisKey);
        if (chunksRedisInfoBo == null) {
            throw new BaseException("分片上传接口没有初始化，请初始化后进行上传！");
        }

        // 0.如果被占用了，就不要再继续
        String trunksRedisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_SET_REDIS_PREFIX + fileHash;
        Set<Integer> existChunks = RedisUtils.getCacheSet(trunksRedisKey);
        if (existChunks.contains(chunkIndex)) {
            resultVo.setUploadId(uploadId);
            resultVo.setFileHash(fileHash);
            resultVo.setExistChunks(new ArrayList<>(existChunks));
            resultVo.setChunkIndex(chunkIndex);
            resultVo.setTotalChunks(chunksRedisInfoBo.getTotalChunks());
            return resultVo;
        }

        IsfdLargeFileVo isfdLargeFileVo = this.queryById(uploadId);
        if (!Objects.nonNull(isfdLargeFileVo)) {
            throw new BaseException("未找到文件，请初始化后进行上传！");
        }
        String fileName = isfdLargeFileVo.getFileName();
        // 1.存储分片文件
        try {
            Path chunkPath = Paths.get(IsfdMeshOp2Constants.UPLOAD_CHUNKS_PREFIX, fileHash, IsfdMeshOp2Constants.UPLOAD_CHUNKS_SUFFIX, chunkIndex + IsfdMeshOp2Constants.UPLOAD_CHUNKS_SUFFIX_WITH_DOT);
            Files.createDirectories(chunkPath.getParent());
            file.transferTo(chunkPath);
        } catch (IOException e) {
            // 处理文件上传异常
            log.error("文件上传失败！", e);
            throw new RuntimeException("文件上传失败", e);
        }
        // 2. 直接记录redis占用位置
        try {
            RedisUtils.addCacheSet(trunksRedisKey, chunkIndex);
        } catch (Exception e) {
            log.error("Redis 记录分片索引失败，分片索引: {}", chunkIndex, e);
            throw new RuntimeException("Redis 记录分片索引失败", e);
        }


        // 3.检查是否所有分片都已上传 ,触发文件合并 并且 在结果库落库
        int totalChunks = isfdLargeFileVo.getTotalChunks();
        existChunks = RedisUtils.getCacheSet(trunksRedisKey);
        log.info("trunks {} 第 {} 分片， 总计 {} 分片.", fileHash, existChunks, totalChunks);
        if (totalChunks == existChunks.size()) {
            log.info("trunks合并文件");
            Path target = Paths.get(IsfdMeshOp2Constants.UPLOAD_CHUNKS_PREFIX, fileHash, fileName);
            // 状态变更 和 其他属性
            isfdLargeFileVo.setStatus(IsfdLargeFileStatusEnum.SUCCESS.getValue());
            isfdLargeFileVo.setFileUrl(target.toString());
            IsfdLargeFile isfdLargeFile = MapstructUtils.convert(isfdLargeFileVo, IsfdLargeFile.class);
            baseMapper.updateById(isfdLargeFile);

            // 自然网格结果库落库
            meshOp2InsertByUpload(isfdLargeFile, uploadId);

            try {
                // 分片文件合并
                Path mergedFile = mergeChunks(target, fileHash, totalChunks);
                // 清理临时数据
                FileUtils.deleteDirectory(new File(IsfdMeshOp2Constants.UPLOAD_CHUNKS_PREFIX + System.lineSeparator() + fileHash + System.lineSeparator() + IsfdMeshOp2Constants.UPLOAD_CHUNKS_SUFFIX));
                resultVo.setFileUrl(mergedFile.toString());
                RedisUtils.deleteObject(redisKey);
            } catch (IOException e) {
                // 处理文件上传异常
                log.error("文件合并失败！");
                throw new RuntimeException("文件合并失败", e);
            }

            // todo 启用一个线程进行结构化
            // 查询开关，如果定时器任务的开关为false，那么就进行异步线程的结构化
            // 入参是uploadId
            boolean executeJob = Boolean.parseBoolean(iSysDictDataService.selectDictValueByTypeAndLabel(IsfdMeshModelConstants.MODEL_MESH, IsfdMeshModelConstants.MESH_OP2_STRUCTURE_JOB_SWITCH));
            if (executeJob) {
                try{
                    CompletableFuture.runAsync(() -> {
                        log.info("执行异步OP2文件结构化任务");
                        isfdMeshOP2ParseService.executeParse(Collections.singletonList(uploadId));
                    }, op2ParseExecutor);
                } catch (Exception e) {
                    log.error("异步调度任务失败：", e);
                }
            }
        }
        resultVo.setUploadId(uploadId);
        resultVo.setExistChunks(new ArrayList<>(existChunks));
        resultVo.setFileHash(fileHash);
        resultVo.setTotalChunks(totalChunks);
        resultVo.setChunkIndex(chunkIndex);

        return resultVo;

    }


    /**
     * 自然网格结果库落库
     */
    @Override
    public void meshOp2InsertByUpload(IsfdLargeFile isfdLargeFile, String uploadId) {
        if (IsfdLargeFileTypeEnum.MESH_OP2.getValue().equals(isfdLargeFile.getType())) {
            List<IsfdMeshOp2Collect> isfdMeshOp2CollectList = isfdMeshOp2CollectMapper.selectList(new LambdaQueryWrapper<IsfdMeshOp2Collect>()
                .eq(IsfdMeshOp2Collect::getUploadId, uploadId));
            if (CollectionUtils.isNotEmpty(isfdMeshOp2CollectList)) {
                for (IsfdMeshOp2Collect isfdMeshOp2Collect : isfdMeshOp2CollectList) {
                    List<IsfdMeshOp2Bo> isfdMeshOp2BoList = new ArrayList<>();
                    IsfdMeshOp2Bo isfdMeshOp2Bo = new IsfdMeshOp2Bo();
                    BeanUtils.copyProperties(isfdMeshOp2Collect, isfdMeshOp2Bo);
                    String sectionNumbers = isfdMeshOp2Collect.getSectionNumbers();
                    isfdMeshOp2Bo.setSectionNumbers(sectionNumbers);
                    isfdMeshOp2Bo.setFileHash(isfdLargeFile.getFileHash());
                    isfdMeshOp2Bo.setFileUrl(isfdLargeFile.getFileUrl());
                    isfdMeshOp2Bo.setFileName(isfdLargeFile.getFileName());
                    isfdMeshOp2Bo.setFileSuffix(isfdLargeFile.getFileSuffix());
                    isfdMeshOp2Bo.setFullFileName(isfdLargeFile.getFullFileName());
                    isfdMeshOp2Bo.setStatus(IsfdMeshOp2BoStatusEnum.INITIAL.getValue());
                    isfdMeshOp2Bo.setFileSize(isfdLargeFile.getFileSize() + "");
                    isfdMeshOp2BoList.add(isfdMeshOp2Bo);
                    List<Map<String, Object>> entityListJson = JsonUtils.parseJsonStrToListMap(JsonUtils.toJsonString(isfdMeshOp2BoList));
                    isfdDatabaseDataManagementService.insertOrUpdateDataList(isfdMeshOp2Collect.getTableName(), entityListJson, IsfdImportMethodEnum.DIRECTLY, null, false);
                }
            }
        }
    }

    @Override
    public void meshOp2Insert(IsfdMeshOp2Collect isfdMeshOp2Collect, IsfdLargeFile isfdLargeFile) {
        List<IsfdMeshOp2Bo> isfdMeshOp2BoList = new ArrayList<>();
        IsfdMeshOp2Bo isfdMeshOp2Bo = new IsfdMeshOp2Bo();
        BeanUtils.copyProperties(isfdMeshOp2Collect, isfdMeshOp2Bo);
        String sectionNumbers = isfdMeshOp2Collect.getSectionNumbers();
        isfdMeshOp2Bo.setSectionNumbers(sectionNumbers);
        isfdMeshOp2Bo.setFileHash(isfdLargeFile.getFileHash());
        isfdMeshOp2Bo.setFileUrl(isfdLargeFile.getFileUrl());
        isfdMeshOp2Bo.setFileName(isfdLargeFile.getFileName());
        isfdMeshOp2Bo.setFileSuffix(isfdLargeFile.getFileSuffix());
        isfdMeshOp2Bo.setFullFileName(isfdLargeFile.getFullFileName());
        isfdMeshOp2Bo.setStatus(IsfdMeshOp2BoStatusEnum.INITIAL.getValue());
        isfdMeshOp2Bo.setFileSize(isfdLargeFile.getFileSize() + "");
        isfdMeshOp2BoList.add(isfdMeshOp2Bo);
        List<Map<String, Object>> entityListJson = JsonUtils.parseJsonStrToListMap(JsonUtils.toJsonString(isfdMeshOp2BoList));
        isfdDatabaseDataManagementService.insertOrUpdateDataList(isfdMeshOp2Collect.getTableName(), entityListJson, IsfdImportMethodEnum.DIRECTLY, null, false);
    }

    /**
     * 合并分片为一个文件
     */
    private Path mergeChunks(Path target, String fileHash, Integer totalChunks) {
        try {
            try (OutputStream os = Files.newOutputStream(target)) {
                for (int i = 0; i < totalChunks; i++) {
                    Path chunk = Paths.get(IsfdMeshOp2Constants.UPLOAD_CHUNKS_PREFIX, fileHash, IsfdMeshOp2Constants.UPLOAD_CHUNKS_SUFFIX, i + IsfdMeshOp2Constants.UPLOAD_CHUNKS_SUFFIX_WITH_DOT);
                    Files.copy(chunk, os);
                }
                os.flush();
            }
        } catch (Exception e) {
            log.error("分片和成失败", e);
            throw new BaseException("合并分片失败！");
        }
        return target;
    }

    @Override
    public ChunksCheckResultVo chunksUploadCheck(String uploadId, String fileHash) {
        ChunksCheckResultVo resultVo = new ChunksCheckResultVo();
        String redisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_REDIS_PREFIX + fileHash;
        if (!StringUtils.isNotBlank(redisKey)) {
            throw new BaseException("该状态不能查询进度！");
        }
        ChunksRedisInfoBo chunksRedisInfoBo = RedisUtils.getCacheObject(redisKey);

        String trunksRedisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_SET_REDIS_PREFIX + fileHash;
        Set<Integer> existChunks = RedisUtils.getCacheSet(trunksRedisKey);
        int totalChunks = chunksRedisInfoBo.getTotalChunks();
        double progress = (double) existChunks.size() / totalChunks * 100;
        resultVo.setUploadId(uploadId);
        resultVo.setTotalChunks(totalChunks);
        resultVo.setExistChunks(new ArrayList<>(existChunks));
        resultVo.setFileHash(fileHash);
        resultVo.setProgress(progress);
        return resultVo;
    }

    @Override
    public ChunksUploadResultVo reUpload(IsfdLargeFileBo isfdLargeFileBo) {
        // todo
        // 如果文件hash相同，直接返回分片。
        // 如果文件hash相同不同，检查是否有关联，删除原文件
        IsfdLargeFile isfdLargeFile = queryByByFileHash(isfdLargeFileBo.getFileHash());
        if (Objects.nonNull(isfdLargeFile)) {
            // 文件已存在，判断hash是否为同一个文件
            // 不同文件，判断文件状态，更改数据
        } else {
            // 文件不存在
        }

        return null;
    }

    @Override
    public Void deleteLargeFile(String fileHash) {
        IsfdLargeFile isfdLargeFile = queryByByFileHash(fileHash);
        if (Objects.nonNull(isfdLargeFile)) {
            // 删除文件
            baseMapper.delete(new LambdaQueryWrapper<IsfdLargeFile>().eq(IsfdLargeFile::getFileHash, fileHash));
            // 文件存在，删除redis
            String redisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_REDIS_PREFIX + fileHash;
            String trunksRedisKey = IsfdMeshOp2Constants.UPLOAD_CHUNKS_SET_REDIS_PREFIX + fileHash;
            RedisUtils.deleteObject(redisKey);
            RedisUtils.deleteObject(trunksRedisKey);
            //删除文件夹
            try {
                Path path = Paths.get(IsfdMeshOp2Constants.UPLOAD_CHUNKS_PREFIX, fileHash);
                deleteDirectory(path);
            } catch (IOException e) {
                // 处理文件上传异常
                log.error("文件夹删除失败！", e);
//                throw new RuntimeException("文件夹删除失败", e);
            }
        }
        return null;
    }

    public static void deleteDirectory(Path path) throws IOException {
        Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                Files.delete(file);
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
                Files.delete(dir);
                return FileVisitResult.CONTINUE;
            }
        });
    }

    private LambdaQueryWrapper<IsfdLargeFile> buildQueryWrapper(IsfdLargeFileBo bo) {
        Map<String, Object> params = bo.getParams();
        LambdaQueryWrapper<IsfdLargeFile> lqw = Wrappers.lambdaQuery();
        lqw.eq(bo.getUploadTime() != null, IsfdLargeFile::getUploadTime, bo.getUploadTime());
        lqw.eq(StringUtils.isNotBlank(bo.getType()), IsfdLargeFile::getType, bo.getType());
        lqw.like(StringUtils.isNotBlank(bo.getFullFileName()), IsfdLargeFile::getFullFileName, bo.getFullFileName());
        lqw.like(StringUtils.isNotBlank(bo.getFileName()), IsfdLargeFile::getFileName, bo.getFileName());
        lqw.eq(bo.getFileSize() != null, IsfdLargeFile::getFileSize, bo.getFileSize());
        lqw.eq(StringUtils.isNotBlank(bo.getFileSuffix()), IsfdLargeFile::getFileSuffix, bo.getFileSuffix());
        lqw.eq(StringUtils.isNotBlank(bo.getFileUrl()), IsfdLargeFile::getFileUrl, bo.getFileUrl());
        lqw.eq(StringUtils.isNotBlank(bo.getFileHash()), IsfdLargeFile::getFileHash, bo.getFileHash());
        lqw.eq(bo.getChunkSize() != null, IsfdLargeFile::getChunkSize, bo.getChunkSize());
        return lqw;
    }
}
