package cn.iocoder.yudao.module.infra.service.deduplication.impl;

import cn.hutool.core.util.IdUtil;
import cn.iocoder.yudao.framework.common.exception.ServiceException;
import cn.iocoder.yudao.module.infra.api.deduplication.dto.*;
import cn.iocoder.yudao.module.infra.controller.admin.file.vo.file.FileCreateReqVO;
import cn.iocoder.yudao.module.infra.dal.dataobject.deduplication.FileDerivativeDO;
import cn.iocoder.yudao.module.infra.dal.dataobject.deduplication.FileIndexDO;
import cn.iocoder.yudao.module.infra.dal.mysql.deduplication.FileDerivativeMapper;
import cn.iocoder.yudao.module.infra.dal.mysql.deduplication.FileIndexMapper;
import cn.iocoder.yudao.module.infra.framework.file.core.client.FileClient;
import cn.iocoder.yudao.module.infra.service.deduplication.DeduplicationFileService;
import cn.iocoder.yudao.module.infra.service.file.FileConfigService;
import cn.iocoder.yudao.module.infra.service.file.FileService;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import jakarta.annotation.Resource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * 去重文件服务实现类（简化版本）
 * 立即创建衍生版本占位记录，通过定时任务处理
 *
 * @author 芋道源码
 */
@Service
@Slf4j
public class DeduplicationFileServiceImpl implements DeduplicationFileService {

    @Resource
    private FileIndexMapper fileIndexMapper;
    
    @Resource
    private FileDerivativeMapper fileDerivativeMapper;
    
    @Resource
    private FileService fileService;
    @Resource
    private FileConfigService fileConfigService;
    @Override
    public FileHashCheckRespDTO checkFileHash(FileHashCheckReqDTO reqDTO) {
        log.info("[checkFileHash][前端校验文件哈希] sha256Hash: {}, fileSize: {}", reqDTO.getSha256Hash(), reqDTO.getFileSize());
        
        // 查询是否存在重复文件
        FileIndexDO existingFile = fileIndexMapper.selectByHashAndSize(reqDTO.getSha256Hash(), reqDTO.getFileSize());
        
        FileHashCheckRespDTO result = new FileHashCheckRespDTO();
        result.setIsDuplicate(existingFile != null);
        result.setCanSkipUpload(existingFile != null);
        
        if (existingFile != null) {
            // 构建重复文件信息
            FileHashCheckRespDTO.DuplicateFileInfo duplicateFileInfo = new FileHashCheckRespDTO.DuplicateFileInfo();
            duplicateFileInfo.setFileId(existingFile.getId());
            duplicateFileInfo.setFileName(existingFile.getName());
            duplicateFileInfo.setFileSize(existingFile.getSize());
            duplicateFileInfo.setContentType(existingFile.getType());
            duplicateFileInfo.setFileUrl(existingFile.getUrl());
            duplicateFileInfo.setCreateTime(existingFile.getCreateTime());
            duplicateFileInfo.setReferenceCount(existingFile.getReferenceCount());
            
            result.setDuplicateFileId(existingFile.getId());
            result.setDuplicateFileInfo(duplicateFileInfo);
            result.setSuggestedAction(FileHashCheckRespDTO.SuggestedAction.USE_EXISTING);
            
            // 查询可用的衍生版本
            List<FileDerivativeDO> derivatives = fileDerivativeMapper.selectByOriginalFileId(existingFile.getId());
            List<FileHashCheckRespDTO.DerivativeVersionInfo> availableVersions = derivatives.stream()
                    .filter(d -> FileDerivativeDO.Status.SUCCESS.equals(d.getStatus()))
                    .map(this::convertToDerivativeVersionInfo)
                    .collect(Collectors.toList());
            result.setAvailableVersions(availableVersions);
        } else {
            result.setSuggestedAction(FileHashCheckRespDTO.SuggestedAction.UPLOAD_NEW);
        }
        
        return result;
    }

    @Override
    @Transactional
    @SneakyThrows
    public DeduplicationFileUploadRespDTO uploadFile(DeduplicationFileUploadReqDTO reqDTO) {
        log.info("[uploadFile][开始去重上传] name: {}, size: {}", reqDTO.getName(), reqDTO.getEffectiveContentLength());
        
        // 1. 计算文件哈希
        String sha256Hash = calculateSHA256Hash(reqDTO.getEffectiveInputStream());
        
        // 2. 检查是否存在重复文件
        FileIndexDO existingFile = fileIndexMapper.selectByHashAndSize(sha256Hash, reqDTO.getEffectiveContentLength());
        
        if (existingFile != null) {
            // 文件已存在，增加引用计数
            fileIndexMapper.incrementReferenceCount(existingFile.getId());
            log.info("[uploadFile][文件去重成功] fileId: {}, referenceCount: {}", existingFile.getId(), existingFile.getReferenceCount() + 1);
            
            // 立即创建衍生版本占位记录
            if (reqDTO.getProcessingOptions() != null && needsProcessing(reqDTO.getProcessingOptions())) {
                createDerivativeFiles(existingFile.getId(), reqDTO.getProcessingOptions());
            }
            
            return buildUploadResponse(existingFile, true, null);
        } else {
            // 新文件，执行实际上传
            byte[] content = cn.hutool.core.io.FileUtil.readBytes(reqDTO.getTempFile());
            Long configId = reqDTO.getConfigId();
            FileClient fileClient = fileConfigService.getFileClient(configId);
            String url = fileClient.upload(content, reqDTO.getPath(), reqDTO.getType());

            // 保存文件索引
            FileIndexDO fileIndex = FileIndexDO.builder()
                    .configId(configId)
                    .name(reqDTO.getName())
                    .path(reqDTO.getPath())
                    .url(url)
                    .type(reqDTO.getType())
                    .size(reqDTO.getEffectiveContentLength())
                    .sha256Hash(sha256Hash)
                    .referenceCount(1)
                    .build();
            
            fileIndexMapper.insert(fileIndex);
            log.info("[uploadFile][新文件上传成功] fileId: {}, url: {}", fileIndex.getId(), url);
            
            // 立即创建衍生版本占位记录
            if (reqDTO.getProcessingOptions() != null && needsProcessing(reqDTO.getProcessingOptions())) {
                createDerivativeFiles(fileIndex.getId(), reqDTO.getProcessingOptions());
            }
            
            return buildUploadResponse(fileIndex, false, null);
        }
    }

    @Override
    public DeduplicationFileRespDTO getFileInfo(Long fileId) {
        FileIndexDO fileIndex = fileIndexMapper.selectById(fileId);
        if (fileIndex == null) {
            throw new ServiceException(404, "文件不存在");
        }
        
        return convertToFileRespDTO(fileIndex);
    }

    @Override
    public Map<Long, DeduplicationFileRespDTO> getFileInfoBatch(List<Long> fileIds) {
        List<FileIndexDO> fileIndexes = fileIndexMapper.selectBatchIds(fileIds);
        return fileIndexes.stream()
                .collect(Collectors.toMap(
                        FileIndexDO::getId,
                        this::convertToFileRespDTO
                ));
    }

    @Override
    public String batchProcessFiles(List<Long> fileIds, List<String> processingTypes, Map<String, Object> processingParams) {
        String taskId = IdUtil.simpleUUID();
        
        // 为每个文件创建衍生版本占位记录
        for (Long fileId : fileIds) {
            for (String processingType : processingTypes) {
                createDerivativeFileByType(fileId, processingType);
            }
        }
        
        log.info("[batchProcessFiles][批量处理任务创建成功] taskId: {}, fileCount: {}", taskId, fileIds.size());
        
        return taskId;
    }

    @Override
    public Map<String, Object> getProcessingTaskStatus(String taskId) {
        // 简化实现：通过衍生版本状态来判断处理进度
        Map<String, Object> result = new HashMap<>();
        result.put("taskId", taskId);
        result.put("status", "PROCESSING");
        result.put("progress", 50);
        result.put("message", "正在处理中...");
        
        return result;
    }

    @Override
    public List<Map<String, Object>> getFileDerivatives(Long fileId) {
        List<FileDerivativeDO> derivatives = fileDerivativeMapper.selectByOriginalFileId(fileId);
        return derivatives.stream()
                .map(this::convertToDerivativeMap)
                .collect(Collectors.toList());
    }

    @Override
    public DeduplicationStatsRespDTO getDeduplicationStats() {
        Long totalFiles = fileIndexMapper.selectTotalFileCount();
        Long totalSize = fileIndexMapper.selectTotalStorageSize();
        Long deduplicatedFiles = fileIndexMapper.selectDeduplicatedFileCount();
        Long savedSize = fileIndexMapper.selectSavedStorageSize();
        
        DeduplicationStatsRespDTO result = new DeduplicationStatsRespDTO();
        result.setTotalFileCount(totalFiles);
        result.setTotalStorageSize(totalSize);
        result.setDeduplicatedFileCount(deduplicatedFiles);
        result.setSavedStorageSize(savedSize);
        return result;
    }

    @Override
    @Transactional
    public Boolean deleteFile(Long fileId) {
        FileIndexDO fileIndex = fileIndexMapper.selectById(fileId);
        if (fileIndex == null) {
            return false;
        }
        
        // 减少引用计数
        int affected = fileIndexMapper.decrementReferenceCount(fileId);
        if (affected > 0) {
            // 如果引用计数变为0，可以考虑删除实际文件（这里暂时保留）
            FileIndexDO updated = fileIndexMapper.selectById(fileId);
            if (updated.getReferenceCount() <= 0) {
                log.info("[deleteFile][文件引用计数为0] fileId: {}, 可以清理", fileId);
                // TODO: 异步清理实际文件
            }
        }
        
        return true;
    }

    @Override
    public Integer cleanupZeroRefFiles() {
        List<FileIndexDO> zeroRefFiles = fileIndexMapper.selectZeroRefCountFiles(100);
        int cleanedCount = 0;
        
        for (FileIndexDO fileIndex : zeroRefFiles) {
            try {
                // 删除实际文件
                fileService.deleteFile(fileIndex.getId());
                
                // 删除衍生版本
                fileDerivativeMapper.deleteByOriginalFileId(fileIndex.getId());
                
                // 删除文件索引
                fileIndexMapper.deleteById(fileIndex.getId());
                
                cleanedCount++;
                log.info("[cleanupZeroRefFiles][清理文件成功] fileId: {}", fileIndex.getId());
            } catch (Exception e) {
                log.error("[cleanupZeroRefFiles][清理文件失败] fileId: {}", fileIndex.getId(), e);
            }
        }
        
        return cleanedCount;
    }

    // 私有辅助方法
    
    /**
     * 立即创建衍生版本占位记录
     */
    private void createDerivativeFiles(Long originalFileId, DeduplicationFileUploadReqDTO.FileProcessingOptions options) {
        if (options.getEnableCompress() != null && options.getEnableCompress()) {
            createDerivativeFileByType(originalFileId, "COMPRESSED");
        }
        
        if (options.getEnableWatermark() != null && options.getEnableWatermark()) {
            createDerivativeFileByType(originalFileId, "WATERMARKED");
        }
        
        if (options.getEnableHls() != null && options.getEnableHls()) {
            createDerivativeFileByType(originalFileId, "HLS");
        }
    }
    
    /**
     * 根据类型创建衍生版本占位记录
     */
    private void createDerivativeFileByType(Long originalFileId, String derivativeType) {
        FileIndexDO originalFile = fileIndexMapper.selectById(originalFileId);
        if (originalFile == null) {
            return;
        }
        
        // 生成衍生文件的临时URL（实际文件还未生成）
        String derivativeUrl = originalFile.getUrl().replace(originalFile.getName(), 
                derivativeType.toLowerCase() + "_" + originalFile.getName());
        
        // 创建衍生文件的索引记录（占位）
        FileIndexDO derivativeFile = FileIndexDO.builder()
                .configId(originalFile.getConfigId())
                .name(derivativeType.toLowerCase() + "_" + originalFile.getName())
                .path(originalFile.getPath())
                .url(derivativeUrl)
                .type(originalFile.getType())
                .size(0L) // 占位，实际大小后续更新
                .sha256Hash("") // 占位，实际哈希后续更新
                .referenceCount(1)
                .build();
        
        fileIndexMapper.insert(derivativeFile);
        
        // 创建衍生版本关系记录
        FileDerivativeDO derivative = FileDerivativeDO.builder()
                .originalFileId(originalFileId)
                .derivativeFileId(derivativeFile.getId())
                .derivativeType(derivativeType)
                .status(FileDerivativeDO.Status.PENDING) // 待处理状态
                .build();
        
        fileDerivativeMapper.insert(derivative);
        
        log.info("[createDerivativeFileByType][创建衍生版本占位记录] originalFileId: {}, derivativeType: {}, derivativeFileId: {}", 
                originalFileId, derivativeType, derivativeFile.getId());
    }
    
    private boolean needsProcessing(DeduplicationFileUploadReqDTO.FileProcessingOptions options) {
        return (options.getEnableCompress() != null && options.getEnableCompress()) ||
               (options.getEnableWatermark() != null && options.getEnableWatermark()) ||
               (options.getEnableHls() != null && options.getEnableHls());
    }
    
    private DeduplicationFileUploadRespDTO buildUploadResponse(FileIndexDO fileIndex, boolean isDuplicate, String processingTaskId) {
        DeduplicationFileUploadRespDTO result = new DeduplicationFileUploadRespDTO();
        result.setFileId(fileIndex.getId());
        result.setUrl(fileIndex.getUrl());
        result.setName(fileIndex.getName());
        result.setSize(fileIndex.getSize());
        result.setType(fileIndex.getType());
        result.setIsDuplicate(isDuplicate);
        result.setOriginalFileId(fileIndex.getId());
        result.setProcessingTaskId(processingTaskId);
        result.setCreateTime(fileIndex.getCreateTime());
        return result;
    }
    
    private FileHashCheckRespDTO.DerivativeVersionInfo convertToDerivativeVersionInfo(FileDerivativeDO derivative) {
        FileIndexDO derivativeFile = fileIndexMapper.selectById(derivative.getDerivativeFileId());
        return FileHashCheckRespDTO.DerivativeVersionInfo.builder()
                .derivativeId(derivative.getId())
                .derivativeType(derivative.getDerivativeType())
                .fileId(derivative.getDerivativeFileId())
                .quality(derivative.getQuality())
                .resolution(derivative.getResolution())
                .fileSize(derivativeFile != null ? derivativeFile.getSize() : null)
                .fileUrl(derivativeFile != null ? derivativeFile.getUrl() : null)
                .status(derivative.getStatus())
                .build();
    }

    private DeduplicationFileRespDTO convertToFileRespDTO(FileIndexDO fileIndex) {
        DeduplicationFileRespDTO result = new DeduplicationFileRespDTO();
        result.setId(fileIndex.getId());
        result.setName(fileIndex.getName());
        result.setPath(fileIndex.getPath());
        result.setUrl(fileIndex.getUrl());
        result.setType(fileIndex.getType());
        result.setSize(fileIndex.getSize());
        result.setRefCount(fileIndex.getReferenceCount());
        result.setCreateTime(fileIndex.getCreateTime());
        return result;
    }

    private Map<String, Object> convertToDerivativeMap(FileDerivativeDO derivative) {
        Map<String, Object> map = new HashMap<>();
        map.put("id", derivative.getId());
        map.put("derivativeType", derivative.getDerivativeType());
        map.put("quality", derivative.getQuality());
        map.put("resolution", derivative.getResolution());
        map.put("format", derivative.getFormat());
        map.put("status", derivative.getStatus());
        map.put("createTime", derivative.getCreateTime());
        return map;
    }

    /**
     * 计算文件SHA256哈希值
     */
    private String calculateSHA256Hash(java.io.InputStream inputStream) {
        try {
            return cn.hutool.crypto.digest.DigestUtil.sha256Hex(inputStream);
        } catch (Exception e) {
            log.error("[calculateSHA256Hash][计算文件哈希失败]", e);
            throw new ServiceException(500, "计算文件哈希失败");
        }
    }

}
