package com.cg.service.impl;

import com.cg.constant.RedisConstant;
import com.cg.dto.*;
import com.cg.entity.*;
import com.cg.mapper.*;
import com.cg.service.ResumeAnalysisService;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import dev.langchain4j.community.model.dashscope.QwenChatModel;
import dev.langchain4j.community.model.dashscope.QwenEmbeddingModel;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.service.AiServices;

import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import com.cg.service.AiResumeAnalysisService;

import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import jakarta.annotation.PostConstruct;

@Slf4j
@Service
public class ResumeAnalysisServiceImpl implements ResumeAnalysisService {

    @Value("${langchain4j.community.dashscope.chat-model.api-key}")
    private String dashscopeApiKey;

    @Value("${langchain4j.community.dashscope.chat-model.model-name}")
    private String dashscopeModelName;

    @Value("${app.file.upload.path:classpath:pdf}")
    private String uploadPath;

    @Autowired private ResumeMapper resumeMapper;
    @Autowired private ResumeParseResultMapper resumeParseResultMapper;
    @Autowired private EvaluationReportMapper evaluationReportMapper;
    @Autowired private ResumeOptimizationMapper resumeOptimizationMapper;
    @Autowired private AiAnalysisTaskMapper aiAnalysisTaskMapper;
    @Autowired private DocumentVectorMapper documentVectorMapper;
    

    @Autowired private RedisTemplate<String, Object> redisTemplate;

    private final Map<Long, EmbeddingStore<TextSegment>> vectorStores = new ConcurrentHashMap<>();
    private final Map<String, AiAnalysisTaskDTO> taskStore = new ConcurrentHashMap<>();
    private final Map<Long, ResumeParseResultDTO> parseStore = new ConcurrentHashMap<>();

    @Autowired
    private ObjectMapper objectMapper;

    // AI服务实例
    private AiResumeAnalysisService aiService;

    /**
     * 初始化AI服务
     */
    @PostConstruct
    public void initAiService() {
        QwenChatModel chatModel = QwenChatModel.builder()
            .apiKey(dashscopeApiKey)
            .modelName(dashscopeModelName)
            .temperature(0.1F)
            .maxTokens(2000)
            .build();
        
        this.aiService = AiServices.create(AiResumeAnalysisService.class, chatModel);
        log.info("AI服务初始化成功");
    }

    @Override
    public CompletableFuture<String> uploadAndParseResume(Long userId, MultipartFile file) {
        return CompletableFuture.supplyAsync(() -> {
            Path saved = null;
            try {
                // 1) 清空用户之前的简历分析缓存，确保前端显示被清空
                clearUserResumeAnalysisCache(userId);
                log.info("已清空用户简历分析缓存: userId={}", userId);
                
                // 2) 保存文件到resources/pdf目录
                String fileName = System.currentTimeMillis() + "_" + Optional.ofNullable(file.getOriginalFilename()).orElse("resume.pdf");
                
                // 获取resources目录的绝对路径
                String resourcesPath = getResourcesPath();
                saved = Paths.get(resourcesPath, "pdf", fileName);
                
                // 检查目录是否存在
                Files.createDirectories(saved.getParent());
                Files.copy(file.getInputStream(), saved);
                
                log.info("文件上传成功: userId={}, fileName={}, filePath={}, fileSize={} bytes", 
                    userId, fileName, saved, Files.size(saved));

                // 3) 创建任务
                String taskId = generateShortTaskId();
                // 先落库 resume
                Resume resume = new Resume();
                resume.setUserId(userId);
                String originalName = Optional.ofNullable(file.getOriginalFilename()).orElse("resume.pdf");
                resume.setTitle(saved.getFileName().toString());
                resume.setOriginalFileName(originalName);
                String lower = originalName.toLowerCase();
                String ext = lower.endsWith(".pdf") ? "pdf" : (lower.endsWith(".docx") ? "docx" : (lower.endsWith(".doc") ? "doc" : ""));
                resume.setFileType(ext);
                resume.setFilePath(saved.toString());
                resume.setFileSize(Files.size(saved));
                resume.setFileHash(String.valueOf(saved.toFile().hashCode()));
                resume.setStatus(1);
                resume.setCreateTime(LocalDateTime.now());
                resume.setUpdateTime(LocalDateTime.now());
                resumeMapper.insert(resume);

                taskId = generateShortTaskId();
                AiAnalysisTask task = new AiAnalysisTask();
                task.setId(taskId);
                task.setUserId(userId);
                task.setTaskType("resume_parse");
                task.setResumeId(resume.getId());
                task.setStatus(1);
                task.setProgress(0);
                task.setStartTime(LocalDateTime.now());
                task.setCreateTime(LocalDateTime.now());
                task.setUpdateTime(LocalDateTime.now());
                aiAnalysisTaskMapper.insert(task);

                // 4) 异步解析
                String finalTaskId = taskId;
                Path finalSaved = saved;
                CompletableFuture.runAsync(() -> doParse(finalSaved, resume.getId(), finalTaskId, userId));
                return taskId;
            } catch (Exception e) {
                log.error("简历上传失败: userId={}, error={}", userId, e.getMessage(), e);
                // 清理已上传的文件
                if (saved != null && Files.exists(saved)) {
                    try {
                        Files.delete(saved);
                        log.info("清理失败上传的文件: {}", saved);
                    } catch (Exception cleanupEx) {
                        log.warn("清理失败上传的文件失败: {}, error: {}", saved, cleanupEx.getMessage());
                    }
                }
                throw new RuntimeException("上传失败", e);
            }
        });
    }

    private void doParse(Path filePath, Long resumeId, String taskId, Long userId) {
        try {
            // 加载PDF文档文本
            List<Document> docs = loadPdfDocument(filePath);
            String resumeText = docs.stream()
                .map(Document::text)
                .collect(Collectors.joining("\n"));
            
            log.info("开始解析简历文件: {}, 文件大小: {} bytes, 页数: {}", 
                filePath, Files.size(filePath), docs.size());

            // 使用AI服务解析简历
            log.info("开始调用AI服务解析简历: resumeId={}", resumeId);
            
            String json;
            try {
                json = aiService.parseResume(resumeText);
                log.info("AI服务调用成功: resumeId={}", resumeId);
                log.debug("AI原始响应: {}", json);
            } catch (Exception e) {
                log.error("AI服务调用失败: resumeId={}, error={}", resumeId, e.getMessage(), e);
                throw new RuntimeException("AI解析服务暂时不可用，请稍后重试", e);
            }

            // 提取JSON内容，处理可能的Markdown格式
            String extractedJson = extractJsonFromResponse(json);
            log.debug("提取的JSON内容: {}", extractedJson);

            Map<String, Object> parsed = objectMapper.readValue(extractedJson, new TypeReference<Map<String, Object>>(){});

            ResumeParseResult entity = new ResumeParseResult();
            entity.setResumeId(resumeId);
            entity.setEducation(objectMapper.writeValueAsString(parsed.getOrDefault("education", List.of())));
            entity.setSkills(objectMapper.writeValueAsString(parsed.getOrDefault("skills", List.of())));
            entity.setAwards(objectMapper.writeValueAsString(parsed.getOrDefault("awards", List.of())));
            entity.setInternships(objectMapper.writeValueAsString(parsed.getOrDefault("internships", List.of())));
            entity.setProjects(objectMapper.writeValueAsString(parsed.getOrDefault("projects", List.of())));
            entity.setWorkExperience(objectMapper.writeValueAsString(parsed.getOrDefault("work_experience", List.of())));
            entity.setParseStatus(1);
            entity.setCreateTime(LocalDateTime.now());
            entity.setUpdateTime(LocalDateTime.now());
            resumeParseResultMapper.insert(entity);

            // 分段→向量化→落库
            var splitter = DocumentSplitters.recursive(300, 30);
            List<TextSegment> segments = new ArrayList<>();
            for (Document d : docs) {
                segments.addAll(splitter.split(d));
            }

            var embeddingModel = QwenEmbeddingModel.builder()
                .apiKey(dashscopeApiKey)
                .modelName("text-embedding-v4")
                .build();

            List<com.cg.entity.DocumentVector> toInsert = new ArrayList<>();
            for (TextSegment seg : segments) {
                try {
                    var emb = embeddingModel.embed(seg.text()).content();
                    List<Float> fl = emb.vectorAsList();
                    List<Double> vec = new ArrayList<>(fl.size());
                    for (Float f : fl) vec.add(f == null ? 0.0 : f.doubleValue());
                    com.cg.entity.DocumentVector dv = new com.cg.entity.DocumentVector();
                    dv.setResumeId(resumeId);
                    // 生成稳定 segmentId（简单用内容哈希）
                    String norm = com.cg.util.NftHashUtils.canonicalizeJsonText("{\"text\":" + objectMapper.writeValueAsString(seg.text()) + "}", objectMapper);
                    String hash = com.cg.util.NftHashUtils.sha256Hex(norm);
                    String segId = com.cg.util.NftHashUtils.toNftId(hash);
                    dv.setSegmentId(segId);
                    String content = seg.text();
                    if (content != null && content.length() > 500) content = content.substring(0, 500);
                    dv.setContent(content);
                    dv.setEmbeddingVector(objectMapper.writeValueAsString(vec));
                    // 可加入页码等信息
                    dv.setMetadata(null);
                    dv.setCreateTime(LocalDateTime.now());
                    toInsert.add(dv);
                } catch (Exception ignore) {}
            }
            if (!toInsert.isEmpty()) {
                documentVectorMapper.batchInsert(toInsert);
                log.info("已向量化并落库分段：{} 条", toInsert.size());
            }

            // NFT化与明细匹配展示已按需求移除

            // 4) 立即更新Redis缓存，让前端能获取到新的解析结果
            updateResumeAnalysisCache(resumeId, userId);
            log.info("已更新简历分析缓存: resumeId={}, userId={}", resumeId, userId);

            // 5) 完成任务
            AiAnalysisTask t = aiAnalysisTaskMapper.selectByTaskId(taskId);
            t.setStatus(2);
            t.setProgress(100);
            t.setResultData(objectMapper.writeValueAsString(Map.of("resumeId", resumeId)));
            t.setEndTime(LocalDateTime.now());
            aiAnalysisTaskMapper.update(t);
            
            log.info("简历解析完成: resumeId={}, taskId={}", resumeId, taskId);
        } catch (Exception e) {
            AiAnalysisTask t = aiAnalysisTaskMapper.selectByTaskId(taskId);
            t.setStatus(3);
            t.setErrorMessage(e.getMessage());
            t.setEndTime(LocalDateTime.now());
            aiAnalysisTaskMapper.update(t);
            log.error("简历解析失败: resumeId={}, taskId={}, error={}", resumeId, taskId, e.getMessage(), e);
        } finally {
            // 清理临时文件
            try {
                if (Files.exists(filePath)) {
                    Files.delete(filePath);
                    log.info("临时文件已清理: {}", filePath);
                }
            } catch (Exception e) {
                log.warn("清理临时文件失败: {}, error: {}", filePath, e.getMessage());
            }
        }
    }

    @Override
    public ResumeParseResultDTO getResumeParseResult(Long resumeId) {
        // Redis 缓存
        String cacheKey = RedisConstant.RESUME_PARSE_RESULT + resumeId;
        Object cached = redisTemplate.opsForValue().get(cacheKey);
        if (cached instanceof ResumeParseResultDTO dtoCached) {
            return dtoCached;
        }
        
        com.cg.entity.ResumeParseResult e = resumeParseResultMapper.selectByResumeId(resumeId);
        if (e == null) return null;
        
        ResumeParseResultDTO dto = new ResumeParseResultDTO();
        dto.setId(e.getId());
        dto.setResumeId(e.getResumeId());

        try {
            dto.setEducation(parseJsonField(e.getEducation(), "education"));
            dto.setSkills(parseJsonField(e.getSkills(), "skills"));
            dto.setAwards(parseJsonField(e.getAwards(), "awards"));
            dto.setInternships(parseJsonField(e.getInternships(), "internships"));
            dto.setProjects(parseJsonField(e.getProjects(), "projects"));
            dto.setWorkExperience(parseJsonField(e.getWorkExperience(), "workExperience"));
        } catch (Exception ex) {
            log.error("解析简历数据字段失败: resumeId={}, error={}", resumeId, ex.getMessage(), ex);
            dto.setEducation(List.of());
            dto.setSkills(List.of());
            dto.setAwards(List.of());
            dto.setInternships(List.of());
            dto.setProjects(List.of());
            dto.setWorkExperience(List.of());
        }
        
        dto.setParseStatus(e.getParseStatus());
        dto.setCreateTime(e.getCreateTime());
        
        // 写入缓存
        redisTemplate.opsForValue().set(cacheKey, dto, java.time.Duration.ofSeconds(RedisConstant.RESUME_CACHE_TTL_SECONDS));
        return dto;
    }

    /**
     * 解析JSON字段，确保返回正确的数据结构
     */
    private List<Map<String, Object>> parseJsonField(String jsonString, String fieldName) {
        if (jsonString == null || jsonString.trim().isEmpty()) {
            return List.of();
        }
        
        try {
            return objectMapper.readValue(jsonString, new TypeReference<List<Map<String, Object>>>(){});
        } catch (Exception e) {
            log.warn("解析字段 {} 失败: {}, 原始数据: {}", fieldName, e.getMessage(), jsonString);
            // 如果解析失败，尝试解析为字符串数组（针对skills字段）
            if ("skills".equals(fieldName)) {
                try {
                    List<String> skillsList = objectMapper.readValue(jsonString, new TypeReference<List<String>>(){});
                    // 拆分每个字符串中的逗号/顿号/分号/空白等分隔符，扁平化为多个技能项
                    return skillsList.stream()
                        .flatMap(s -> Arrays.stream(s.split("[,，、;；\\n\\t\\s]+")))
                        .map(String::trim)
                        .filter(str -> !str.isEmpty())
                        .map(str -> {
                            Map<String, Object> skillMap = new HashMap<>();
                            skillMap.put("skill", str);
                            return skillMap;
                        })
                        .collect(Collectors.toList());
                } catch (Exception ex2) {
                    log.warn("尝试解析skills为字符串数组也失败: {}", ex2.getMessage());
                }

                // 再尝试将其作为单个字符串（可能是逗号/顿号/空白分隔的技能）进行解析
                try {
                    String skillsText;
                    // 如果是JSON字符串（带引号），用Jackson解析一次去除引号转义
                    if ((jsonString.startsWith("\"") && jsonString.endsWith("\""))
                        || (jsonString.startsWith("'") && jsonString.endsWith("'"))) {
                        skillsText = objectMapper.readValue(jsonString, String.class);
                    } else {
                        skillsText = jsonString;
                    }
                    List<Map<String, Object>> result = Arrays.stream(skillsText.split("[,，、;；\\n\\t\\s]+"))
                        .map(String::trim)
                        .filter(s -> !s.isEmpty())
                        .map(s -> {
                            Map<String, Object> m = new HashMap<>();
                            m.put("skill", s);
                            return m;
                        })
                        .collect(Collectors.toList());
                    if (!result.isEmpty()) return result;
                } catch (Exception ex3) {
                    log.warn("尝试解析skills为分隔字符串失败: {}", ex3.getMessage());
                }
            }
            return List.of();
        }
    }

    /**
     * 清理简历解析结果缓存
     */
    public void clearResumeParseResultCache(Long resumeId) {
        String cacheKey = RedisConstant.RESUME_PARSE_RESULT + resumeId;
        redisTemplate.delete(cacheKey);
        log.info("已清理简历解析结果缓存: resumeId={}", resumeId);
    }

    @Override
    public CompletableFuture<String> generateEvaluationReport(Long userId, Long resumeId, Long targetPositionId) {
        return CompletableFuture.supplyAsync(() -> {
            String taskId = generateShortTaskId();
            AiAnalysisTask task = new AiAnalysisTask();
            task.setId(taskId);
            task.setUserId(userId);
            task.setResumeId(resumeId);
            task.setTaskType("evaluation");
            task.setStatus(1);
            task.setProgress(0);
            task.setStartTime(LocalDateTime.now());
            task.setCreateTime(LocalDateTime.now());
            task.setUpdateTime(LocalDateTime.now());
            aiAnalysisTaskMapper.insert(task);
            CompletableFuture.runAsync(() -> doEvaluation(resumeId, taskId));
            return taskId;
        });
    }

    private void doEvaluation(Long resumeId, String taskId) {
        try {
            ResumeParseResultDTO parse = getResumeParseResult(resumeId);
            if (parse == null) throw new RuntimeException("解析结果不存在");
            
            // 获取简历信息以获取用户ID
            Resume resume = resumeMapper.selectById(resumeId);
            if (resume == null) throw new RuntimeException("简历不存在，简历ID: " + resumeId);
            
            Long userId = resume.getUserId();
            if (userId == null) throw new RuntimeException("简历缺少用户ID信息");
            
            String userSkills = objectMapper.writeValueAsString(parse.getSkills());
            String jobRequirements = "请根据互联网软件工程师通用要求分析"; // 可从DB获取
            log.info("开始生成测评报告: resumeId={}, userSkills={}, jobRequirements={}", resumeId, userSkills, jobRequirements);
            String json = aiService.generateEvaluation(userSkills, jobRequirements);
            log.info("AI服务返回原始结果: resumeId={}, json={}", resumeId, json);

            // 提取JSON内容，处理可能的Markdown格式
            String extractedJson = extractJsonFromResponse(json);
            log.debug("测评提取的JSON内容: {}", extractedJson);

            Map<String, Object> eval;
            try {
                eval = objectMapper.readValue(extractedJson, new TypeReference<Map<String, Object>>(){});
            } catch (Exception e) {
                log.error("JSON解析失败，尝试智能分析: {}", e.getMessage());
                // 如果JSON解析失败，使用智能分析生成默认结果
                eval = generateSmartEvaluation(parse.getSkills());
            }
            EvaluationReport r = new EvaluationReport();
            r.setUserId(userId);  // 使用正确的用户ID
            r.setResumeId(resumeId);
            r.setOverallScore(eval.get("score") instanceof Number ? ((Number) eval.get("score")).doubleValue() : null);
            r.setGapAnalysis((String) eval.getOrDefault("gap_analysis", ""));
            r.setKeywords(objectMapper.writeValueAsString(eval.getOrDefault("keywords", List.of())));
            // 设置其他字段的默认值
            r.setStrengths(null);
            r.setWeaknesses(null);
            r.setImprovementSuggestions(null);
            r.setCreateTime(LocalDateTime.now());
            r.setUpdateTime(LocalDateTime.now());
            evaluationReportMapper.insert(r);

            // 更新Redis缓存，包含测评结果
            updateEvaluationCache(resumeId, userId, r.getId());
            log.info("已更新测评缓存: resumeId={}, userId={}, reportId={}", resumeId, userId, r.getId());

            AiAnalysisTask t = aiAnalysisTaskMapper.selectByTaskId(taskId);
            t.setStatus(2);
            t.setProgress(100);
            t.setResultData(objectMapper.writeValueAsString(Map.of("reportId", r.getId())));
            t.setEndTime(LocalDateTime.now());
            aiAnalysisTaskMapper.update(t);
        } catch (Exception e) {
            AiAnalysisTask t = aiAnalysisTaskMapper.selectByTaskId(taskId);
            t.setStatus(3);
            t.setErrorMessage(e.getMessage());
            t.setEndTime(LocalDateTime.now());
            aiAnalysisTaskMapper.update(t);
            log.error("测评失败", e);
        }
    }

    @Override
    public EvaluationReportDTO getEvaluationReport(Long reportId) {
        String cacheKey = RedisConstant.EVALUATION_REPORT + reportId;
        Object cached = redisTemplate.opsForValue().get(cacheKey);
        if (cached instanceof EvaluationReportDTO dtoCached) return dtoCached;
        com.cg.entity.EvaluationReport r = evaluationReportMapper.selectById(reportId);
        if (r == null) return null;
        EvaluationReportDTO dto = new EvaluationReportDTO();
        dto.setId(r.getId());
        dto.setUserId(r.getUserId());
        dto.setResumeId(r.getResumeId());
        dto.setTargetPositionId(r.getTargetPositionId());
        dto.setOverallScore(r.getOverallScore());
        dto.setGapAnalysis(r.getGapAnalysis());
        try { 
            dto.setKeywords(objectMapper.readValue(r.getKeywords(), new TypeReference<List<String>>(){})); 
        } catch (Exception e) {
            log.warn("解析keywords字段失败: {}", e.getMessage());
            dto.setKeywords(List.of());
        }
        // 设置其他字段的默认值
        dto.setStrengths(List.of());
        dto.setWeaknesses(List.of());
        dto.setImprovementSuggestions("");
        dto.setCreateTime(r.getCreateTime());
        redisTemplate.opsForValue().set(cacheKey, dto, Duration.ofSeconds(RedisConstant.RESUME_CACHE_TTL_SECONDS));
        return dto;
    }

    @Override
    public CompletableFuture<String> generateOptimizationSuggestions(Long userId, Long resumeId, Long targetPositionId) {
        return CompletableFuture.supplyAsync(() -> {
            String taskId = generateShortTaskId();
            AiAnalysisTask task = new AiAnalysisTask();
            task.setId(taskId);
            task.setUserId(userId);
            task.setResumeId(resumeId);
            task.setTaskType("optimization");
            task.setStatus(1);
            task.setProgress(0);
            task.setStartTime(LocalDateTime.now());
            task.setCreateTime(LocalDateTime.now());
            task.setUpdateTime(LocalDateTime.now());
            aiAnalysisTaskMapper.insert(task);
            CompletableFuture.runAsync(() -> doOptimization(resumeId, taskId));
            return taskId;
        });
    }

    private void doOptimization(Long resumeId, String taskId) {
        try {
            ResumeParseResultDTO parse = getResumeParseResult(resumeId);
            if (parse == null) throw new RuntimeException("解析结果不存在");
            
            // 获取简历信息以获取用户ID
            Resume resume = resumeMapper.selectById(resumeId);
            if (resume == null) throw new RuntimeException("简历不存在，简历ID: " + resumeId);
            
            Long userId = resume.getUserId();
            if (userId == null) throw new RuntimeException("简历缺少用户ID信息");
            
            String json = aiService.generateOptimization(objectMapper.writeValueAsString(parse));
            
            // 提取JSON内容，处理可能的Markdown格式
            String extractedJson = extractJsonFromResponse(json);
            log.debug("优化建议提取的JSON内容: {}", extractedJson);
            
            Map<String, Object> opt = objectMapper.readValue(extractedJson, new TypeReference<Map<String, Object>>(){});

            ResumeOptimization o = new ResumeOptimization();
            o.setUserId(userId);  // 使用正确的用户ID
            o.setResumeId(resumeId);
            o.setEducationSuggestions((String) opt.getOrDefault("education", ""));
            o.setSkillsSuggestions((String) opt.getOrDefault("skills", ""));
            o.setAwardsSuggestions((String) opt.getOrDefault("awards", ""));
            o.setInternshipsSuggestions((String) opt.getOrDefault("internships", ""));
            o.setProjectsSuggestions((String) opt.getOrDefault("projects", ""));
            o.setOverallSuggestions((String) opt.getOrDefault("overall", ""));
            o.setPriorityLevel(2);
            o.setCreateTime(LocalDateTime.now());
            o.setUpdateTime(LocalDateTime.now());
            resumeOptimizationMapper.insert(o);

            // 更新Redis缓存，包含优化建议结果
            updateOptimizationCache(resumeId, userId, o.getId());
            log.info("已更新优化建议缓存: resumeId={}, userId={}, optimizationId={}", resumeId, userId, o.getId());

            AiAnalysisTask t = aiAnalysisTaskMapper.selectByTaskId(taskId);
            t.setStatus(2);
            t.setProgress(100);
            t.setResultData(objectMapper.writeValueAsString(Map.of("optimizationId", o.getId())));
            t.setEndTime(LocalDateTime.now());
            aiAnalysisTaskMapper.update(t);
        } catch (Exception e) {
            AiAnalysisTask t = aiAnalysisTaskMapper.selectByTaskId(taskId);
            t.setStatus(3);
            t.setErrorMessage(e.getMessage());
            t.setEndTime(LocalDateTime.now());
            aiAnalysisTaskMapper.update(t);
            log.error("优化建议失败", e);
        }
    }

    @Override
    public ResumeOptimizationDTO getOptimizationSuggestions(Long optimizationId) {
        String cacheKey = RedisConstant.RESUME_OPTIMIZATION + optimizationId;
        Object cached = redisTemplate.opsForValue().get(cacheKey);
        if (cached instanceof ResumeOptimizationDTO dtoCached) return dtoCached;
        ResumeOptimization o = resumeOptimizationMapper.selectById(optimizationId);
        if (o == null) return null;
        ResumeOptimizationDTO dto = new ResumeOptimizationDTO();
        dto.setId(o.getId());
        dto.setUserId(o.getUserId());
        dto.setResumeId(o.getResumeId());
        dto.setTargetPositionId(o.getTargetPositionId());
        dto.setEducationSuggestions(o.getEducationSuggestions());
        dto.setSkillsSuggestions(o.getSkillsSuggestions());
        dto.setAwardsSuggestions(o.getAwardsSuggestions());
        dto.setInternshipsSuggestions(o.getInternshipsSuggestions());
        dto.setProjectsSuggestions(o.getProjectsSuggestions());
        dto.setOverallSuggestions(o.getOverallSuggestions());
        dto.setPriorityLevel(o.getPriorityLevel());
        dto.setCreateTime(o.getCreateTime());
        redisTemplate.opsForValue().set(cacheKey, dto, Duration.ofSeconds(RedisConstant.RESUME_CACHE_TTL_SECONDS));
        return dto;
    }

    @Override
    public AiAnalysisTaskDTO getTaskStatus(String taskId) {
        // 先从内存存储中查找
        AiAnalysisTaskDTO cachedTask = taskStore.get(taskId);
        if (cachedTask != null) {
            return cachedTask;
        }
        
        // 如果内存中没有，从数据库获取
        AiAnalysisTask t = aiAnalysisTaskMapper.selectByTaskId(taskId);
        if (t == null) return null;
        AiAnalysisTaskDTO dto = new AiAnalysisTaskDTO();
        dto.setId(t.getId());
        dto.setUserId(t.getUserId());
        dto.setResumeId(t.getResumeId());
        dto.setTaskType(t.getTaskType());
        dto.setStatus(t.getStatus());
        dto.setProgress(t.getProgress());
                    try { dto.setResultData(objectMapper.readValue(t.getResultData(), new TypeReference<Map<String, Object>>(){})); } catch (Exception ignore) {}
        dto.setErrorMessage(t.getErrorMessage());
        dto.setStartTime(t.getStartTime());
        dto.setEndTime(t.getEndTime());
        dto.setCreateTime(t.getCreateTime());
        return dto;
    }

    @Override
    public Map<String, Object> getLatestAnalysisByUser(Long userId) {
        // Redis 缓存
        String cacheKey = RedisConstant.RESUME_LATEST_ANALYSIS + userId;
        Object cached = redisTemplate.opsForValue().get(cacheKey);
        if (cached instanceof java.util.Map) {
            log.debug("从Redis缓存获取简历分析信息: userId={}", userId);
            return (Map<String, Object>) cached;
        }
        
        log.debug("Redis缓存未命中，从数据库查询: userId={}", userId);
        
        // 从数据库查询最新的简历分析信息
        Map<String, Object> resp = buildLatestAnalysisFromDatabase(userId);
        
        // 写入缓存
        redisTemplate.opsForValue().set(cacheKey, resp, 
            java.time.Duration.ofSeconds(RedisConstant.RESUME_CACHE_TTL_SECONDS));
        
        log.info("已从数据库查询并更新缓存: userId={}, resumeId={}", userId, resp.get("resumeId"));
        return resp;
    }

    /**
     * 从数据库构建最新的简历分析信息
     */
    private Map<String, Object> buildLatestAnalysisFromDatabase(Long userId) {
        Map<String, Object> resp = new HashMap<>();
        
        // 1. 获取用户最新的简历（通过简历上传时间排序）
        List<Resume> userResumes = resumeMapper.selectAllByUserId(userId);
        if (userResumes.isEmpty()) {
            log.warn("用户没有简历记录: userId={}", userId);
            return resp;
        }
        
        // 获取最新的简历
        Resume latestResume = userResumes.get(0);
        Long resumeId = latestResume.getId();
        
        resp.put("resumeId", resumeId);
        resp.put("originalFileName", latestResume.getOriginalFileName());
        resp.put("fileType", latestResume.getFileType());
        resp.put("uploadTime", latestResume.getCreateTime());
        
        // 2. 获取简历解析结果
        ResumeParseResult parseResult = resumeParseResultMapper.selectByResumeId(resumeId);
        if (parseResult != null) {
            resp.put("parseStatus", "completed");
            resp.put("parseResult", parseResult);
            resp.put("parseTime", parseResult.getCreateTime());
        } else {
            resp.put("parseStatus", "pending");
        }
        
        // 3. 获取最新的测评报告
        EvaluationReport er = evaluationReportMapper.selectLatestByResumeId(resumeId);
        if (er != null) {
            EvaluationReportDTO dto = new EvaluationReportDTO();
            dto.setId(er.getId());
            dto.setUserId(er.getUserId());
            dto.setResumeId(er.getResumeId());
            dto.setTargetPositionId(er.getTargetPositionId());
            dto.setOverallScore(er.getOverallScore());
            dto.setGapAnalysis(er.getGapAnalysis());
            try { 
                dto.setKeywords(objectMapper.readValue(er.getKeywords(), new TypeReference<List<String>>(){})); 
            } catch (Exception e) {
                log.warn("解析keywords字段失败: {}", e.getMessage());
                dto.setKeywords(List.of());
            }
            // 设置其他字段的默认值
            dto.setStrengths(List.of());
            dto.setWeaknesses(List.of());
            dto.setImprovementSuggestions("");
            dto.setCreateTime(er.getCreateTime());
            resp.put("evaluation", dto);
            resp.put("evaluationStatus", "completed");
        } else {
            resp.put("evaluationStatus", "pending");
        }
        
        // 4. 获取最新的优化建议
        ResumeOptimization ro = resumeOptimizationMapper.selectLatestByResumeId(resumeId);
        if (ro != null) {
            ResumeOptimizationDTO dto = new ResumeOptimizationDTO();
            dto.setId(ro.getId());
            dto.setUserId(ro.getUserId());
            dto.setResumeId(ro.getResumeId());
            dto.setTargetPositionId(ro.getTargetPositionId());
            dto.setEducationSuggestions(ro.getEducationSuggestions());
            dto.setSkillsSuggestions(ro.getSkillsSuggestions());
            dto.setAwardsSuggestions(ro.getAwardsSuggestions());
            dto.setInternshipsSuggestions(ro.getInternshipsSuggestions());
            dto.setProjectsSuggestions(ro.getProjectsSuggestions());
            dto.setOverallSuggestions(ro.getOverallSuggestions());
            dto.setPriorityLevel(ro.getPriorityLevel());
            dto.setCreateTime(ro.getCreateTime());
            resp.put("optimization", dto);
            resp.put("optimizationStatus", "completed");
        } else {
            resp.put("optimizationStatus", "pending");
        }
        
        log.info("构建最新简历分析信息成功: userId={}, resumeId={}, parseStatus={}, evaluationStatus={}, optimizationStatus={}", 
            userId, resumeId, resp.get("parseStatus"), resp.get("evaluationStatus"), resp.get("optimizationStatus"));
        
        return resp;
    }



    @Override
    public String queryResumeWithRAG(Long resumeId, String query) {
        try {
            EmbeddingStore<TextSegment> store = vectorStores.get(resumeId);
            if (store == null) return "未找到该简历的向量索引";
            dev.langchain4j.community.model.dashscope.QwenChatModel chat = dev.langchain4j.community.model.dashscope.QwenChatModel.builder()
                .apiKey(dashscopeApiKey)
                .modelName(dashscopeModelName)
                .build();
            return chat.chat(query);
        } catch (Exception e) {
            log.error("RAG 查询失败", e);
            return "查询失败：" + e.getMessage();
        }
    }

    @Override
    public com.cg.dto.SimilarityReportDTO computeSimilarityReport(Long resumeId, Integer topK, Double threshold) {
        int k = topK == null ? 5 : Math.max(1, Math.min(20, topK));
        double tau = threshold == null ? 0.90 : Math.max(0.5, Math.min(0.99, threshold));

        List<com.cg.entity.DocumentVector> segments = documentVectorMapper.selectByResumeId(resumeId);
        if (segments == null || segments.isEmpty()) {
            throw new RuntimeException("该简历尚未向量化");
        }

        List<com.cg.entity.DocumentVector> corpus = documentVectorMapper.selectCorpusForSimilarity(resumeId);
        if (corpus == null || corpus.isEmpty()) {
            throw new RuntimeException("对比语料为空");
        }

        Map<String, double[]> segVec = new HashMap<>();
        for (var s : segments) {
            segVec.put(s.getSegmentId(), parseEmbeddingToArray(s.getEmbeddingVector()));
        }
        Map<String, double[]> corVec = new HashMap<>();
        for (var c : corpus) {
            corVec.put(c.getSegmentId(), parseEmbeddingToArray(c.getEmbeddingVector()));
        }

        int highSimCount = 0;
        double sumTopSim = 0.0;

        for (var s : segments) {
            double[] v = segVec.get(s.getSegmentId());
            if (v == null || v.length == 0) continue;

            java.util.PriorityQueue<Map.Entry<com.cg.entity.DocumentVector, Double>> heap =
                new java.util.PriorityQueue<>(Map.Entry.comparingByValue());
            for (var c : corpus) {
                double[] cv = corVec.get(c.getSegmentId());
                if (cv == null || cv.length != v.length) continue;
                double sim = com.cg.util.CosineSimilarityUtils.cosine(v, cv);
                if (heap.size() < k) {
                    heap.offer(Map.entry(c, sim));
                } else if (heap.peek().getValue() < sim) {
                    heap.poll();
                    heap.offer(Map.entry(c, sim));
                }
            }

            if (heap.isEmpty()) continue;
            List<Map.Entry<com.cg.entity.DocumentVector, Double>> top = new ArrayList<>(heap);
            top.sort((a, b) -> Double.compare(b.getValue(), a.getValue()));
            var best = top.get(0);
            double bestSim = best.getValue();
            sumTopSim += bestSim;
            if (bestSim >= tau) highSimCount++;

            // 详细匹配列表已移除
        }

        double coverage = segments.isEmpty() ? 0 : (double) highSimCount / segments.size();
        double avgTopSim = segments.isEmpty() ? 0 : (sumTopSim / segments.size());
        double overall = 100.0 * (0.6 * coverage + 0.4 * avgTopSim);
        String risk = overall >= 70 ? "HIGH" : (overall >= 40 ? "MEDIUM" : "LOW");

        com.cg.dto.SimilarityReportDTO report = new com.cg.dto.SimilarityReportDTO();
        report.setResumeId(resumeId);
        report.setCoverage(coverage);
        report.setAvgTopSim(avgTopSim);
        report.setOverallScore(Math.round(overall * 10.0) / 10.0);
        report.setRiskLevel(risk);
        report.setGeneratedAt(LocalDateTime.now());
        return report;
    }

    private double[] parseEmbeddingToArray(String json) {
        try {
            List<Double> list = objectMapper.readValue(json, new TypeReference<List<Double>>(){});
            double[] arr = new double[list.size()];
            for (int i = 0; i < list.size(); i++) arr[i] = list.get(i);
            return arr;
        } catch (Exception e) {
            return new double[0];
        }
    }

    /**
     * 生成短任务ID
     * @return 12字符的任务ID
     */
    private String generateShortTaskId() {
        return UUID.randomUUID().toString().replace("-", "").substring(0, 12);
    }
    
    /**
     * 获取resources目录的绝对路径
     * @return resources目录的绝对路径
     */
    private String getResourcesPath() {
        // 直接使用项目根目录下的src/main/resources，而不是编译后的target/classes
        String projectRoot = System.getProperty("user.dir");
        String resourcesPath = projectRoot + "/src/main/resources";
        log.info("使用项目根目录下的resources目录: {}", resourcesPath);
        return resourcesPath;
    }
    
    /**
     * 加载PDF文档
     * @param filePath PDF文件路径
     * @return 文档列表
     */
    private List<Document> loadPdfDocument(Path filePath) {
        try {
            log.info("开始加载PDF文件: {}", filePath);
            
            // 直接使用loadDocument加载单个PDF文件
            // 使用 LangChain4j 的 FileSystemDocumentLoader.loadDocument(filePath) 读取 PDF
            // 并返回 Document
            Document doc = FileSystemDocumentLoader.loadDocument(filePath);
            
            if (doc == null) {
                log.warn("PDF文件加载后为空: {}", filePath);
                throw new RuntimeException("PDF文件内容为空或无法解析");
            }
            
            log.info("PDF文件加载成功: {}, 文档长度: {} 字符", filePath, doc.text().length());
            
            // 返回包含单个文档的列表
            return List.of(doc);
        } catch (Exception e) {
            log.error("PDF文件加载失败: {}, error: {}", filePath, e.getMessage(), e);
            throw new RuntimeException("PDF文件加载失败: " + e.getMessage(), e);
        }
    }

    /**
     * 从AI响应中提取JSON内容
     * 处理可能的Markdown格式、代码块等
     * @param response AI的原始响应
     * @return 提取的JSON字符串
     */
    private String extractJsonFromResponse(String response) {
        if (response == null || response.trim().isEmpty()) {
            throw new RuntimeException("AI响应为空");
        }

        try {
            // 首先尝试直接解析，如果成功就直接返回
            objectMapper.readValue(response, new TypeReference<Map<String, Object>>(){});
            return response;
        } catch (Exception e) {
            log.debug("直接解析失败，尝试提取JSON内容: {}", e.getMessage());
        }

        // 尝试从Markdown代码块中提取JSON
        String json = extractJsonFromMarkdown(response);
        if (json != null) {
            return json;
        }

        // 尝试查找JSON对象的大括号
        json = extractJsonByBrackets(response);
        if (json != null) {
            return json;
        }

        // 如果都失败了，记录原始响应并抛出异常
        log.error("无法从AI响应中提取有效的JSON内容。原始响应: {}", response);
        throw new RuntimeException("AI响应格式不正确，无法解析JSON内容");
    }

    /**
     * 从Markdown代码块中提取JSON
     */
    private String extractJsonFromMarkdown(String response) {
        // 查找 ```json 或 ``` 代码块
        String[] patterns = {
            "```json\\s*([\\s\\S]*?)\\s*```",
            "```\\s*([\\s\\S]*?)\\s*```",
            "`([^`]+)`"
        };

        for (String pattern : patterns) {
            Pattern p = Pattern.compile(pattern);
            Matcher m = p.matcher(response);
            if (m.find()) {
                String extracted = m.group(1).trim();
                try {
                    // 验证提取的内容是否为有效JSON
                    objectMapper.readValue(extracted, new TypeReference<Map<String, Object>>(){});
                    return extracted;
                } catch (Exception e) {
                    log.debug("从代码块提取的内容不是有效JSON: {}", extracted);
                }
            }
        }
        return null;
    }

    /**
     * 通过大括号匹配提取JSON
     */
    private String extractJsonByBrackets(String response) {
        int startBrace = response.indexOf('{');
        if (startBrace == -1) {
            return null;
        }

        int braceCount = 0;
        int endBrace = -1;
        
        for (int i = startBrace; i < response.length(); i++) {
            char c = response.charAt(i);
            if (c == '{') {
                braceCount++;
            } else if (c == '}') {
                braceCount--;
                if (braceCount == 0) {
                    endBrace = i;
                    break;
                }
            }
        }

        if (endBrace != -1) {
            String extracted = response.substring(startBrace, endBrace + 1);
            try {
                // 验证提取的内容是否为有效JSON
                objectMapper.readValue(extracted, new TypeReference<Map<String, Object>>(){});
                return extracted;
            } catch (Exception e) {
                log.debug("通过大括号提取的内容不是有效JSON: {}", extracted);
            }
        }
        return null;
    }

    /**
     * 清理用户简历分析缓存
     */
    private void clearUserResumeAnalysisCache(Long userId) {
        String latestAnalysisKey = com.cg.constant.RedisConstant.RESUME_LATEST_ANALYSIS + userId;
        redisTemplate.delete(latestAnalysisKey);
        log.info("已清理用户简历分析缓存: userId={}", userId);
    }

    /**
     * 更新简历分析缓存
     * 在简历解析完成后立即更新Redis缓存，让前端能获取到新记录
     */
    private void updateResumeAnalysisCache(Long resumeId, Long userId) {
        try {
            // 获取简历基本信息
            Resume resume = resumeMapper.selectById(resumeId);
            if (resume == null) {
                log.warn("简历不存在，无法更新缓存: resumeId={}", resumeId);
                return;
            }

            // 获取简历解析结果
            ResumeParseResult parseResult = resumeParseResultMapper.selectByResumeId(resumeId);
            if (parseResult == null) {
                log.warn("简历解析结果不存在，无法更新缓存: resumeId={}", resumeId);
                return;
            }

            // 构建最新的分析数据
            Map<String, Object> latestAnalysis = new HashMap<>();
            latestAnalysis.put("resumeId", resumeId);
            latestAnalysis.put("originalFileName", resume.getOriginalFileName());
            latestAnalysis.put("fileType", resume.getFileType());
            latestAnalysis.put("uploadTime", resume.getCreateTime());
            latestAnalysis.put("parseStatus", "completed");
            latestAnalysis.put("parseResult", parseResult);

            // 更新Redis缓存
            String cacheKey = RedisConstant.RESUME_LATEST_ANALYSIS + userId;
            redisTemplate.opsForValue().set(cacheKey, latestAnalysis, 
                Duration.ofSeconds(RedisConstant.RESUME_CACHE_TTL_SECONDS));
            
            log.info("简历分析缓存更新成功: resumeId={}, userId={}", resumeId, userId);
        } catch (Exception e) {
            log.error("更新简历分析缓存失败: resumeId={}, userId={}, error={}", resumeId, userId, e.getMessage(), e);
        }
    }

    /**
     * 更新测评缓存
     * 在测评完成后更新Redis缓存，包含测评结果
     */
    private void updateEvaluationCache(Long resumeId, Long userId, Long reportId) {
        try {
            // 获取测评报告
            EvaluationReport report = evaluationReportMapper.selectById(reportId);
            if (report == null) {
                log.warn("测评报告不存在，无法更新缓存: reportId={}", reportId);
                return;
            }

            // 获取现有的简历分析缓存
            String cacheKey = RedisConstant.RESUME_LATEST_ANALYSIS + userId;
            Object existingCache = redisTemplate.opsForValue().get(cacheKey);
            
            Map<String, Object> latestAnalysis;
            if (existingCache instanceof Map) {
                latestAnalysis = new HashMap<>((Map<String, Object>) existingCache);
            } else {
                // 如果缓存不存在，创建新的
                latestAnalysis = new HashMap<>();
                Resume resume = resumeMapper.selectById(resumeId);
                if (resume != null) {
                    latestAnalysis.put("resumeId", resumeId);
                    latestAnalysis.put("originalFileName", resume.getOriginalFileName());
                    latestAnalysis.put("fileType", resume.getFileType());
                    latestAnalysis.put("uploadTime", resume.getCreateTime());
                }
            }

            // 添加测评结果
            latestAnalysis.put("evaluationStatus", "completed");
            latestAnalysis.put("evaluationReportId", reportId);
            latestAnalysis.put("overallScore", report.getOverallScore());
            latestAnalysis.put("gapAnalysis", report.getGapAnalysis());
            latestAnalysis.put("evaluationTime", report.getCreateTime());

            // 更新Redis缓存
            redisTemplate.opsForValue().set(cacheKey, latestAnalysis, 
                Duration.ofSeconds(RedisConstant.RESUME_CACHE_TTL_SECONDS));
            
            log.info("测评缓存更新成功: resumeId={}, userId={}, reportId={}", resumeId, userId, reportId);
        } catch (Exception e) {
            log.error("更新测评缓存失败: resumeId={}, userId={}, reportId={}, error={}", resumeId, userId, reportId, e.getMessage(), e);
        }
    }

    /**
     * 更新优化建议缓存
     * 在优化建议生成完成后更新Redis缓存，包含优化建议结果
     */
    private void updateOptimizationCache(Long resumeId, Long userId, Long optimizationId) {
        try {
            // 获取优化建议
            ResumeOptimization optimization = resumeOptimizationMapper.selectById(optimizationId);
            if (optimization == null) {
                log.warn("优化建议不存在，无法更新缓存: optimizationId={}", optimizationId);
                return;
            }

            // 获取现有的简历分析缓存
            String cacheKey = RedisConstant.RESUME_LATEST_ANALYSIS + userId;
            Object existingCache = redisTemplate.opsForValue().get(cacheKey);
            
            Map<String, Object> latestAnalysis;
            if (existingCache instanceof Map) {
                latestAnalysis = new HashMap<>((Map<String, Object>) existingCache);
            } else {
                // 如果缓存不存在，创建新的
                latestAnalysis = new HashMap<>();
                com.cg.entity.Resume resume = resumeMapper.selectById(resumeId);
                if (resume != null) {
                    latestAnalysis.put("resumeId", resumeId);
                    latestAnalysis.put("originalFileName", resume.getOriginalFileName());
                    latestAnalysis.put("fileType", resume.getFileType());
                    latestAnalysis.put("uploadTime", resume.getCreateTime());
                }
            }

            // 添加优化建议结果
            latestAnalysis.put("optimizationStatus", "completed");
            latestAnalysis.put("optimizationId", optimizationId);
            latestAnalysis.put("educationSuggestions", optimization.getEducationSuggestions());
            latestAnalysis.put("skillsSuggestions", optimization.getSkillsSuggestions());
            latestAnalysis.put("awardsSuggestions", optimization.getAwardsSuggestions());
            latestAnalysis.put("internshipsSuggestions", optimization.getInternshipsSuggestions());
            latestAnalysis.put("projectsSuggestions", optimization.getProjectsSuggestions());
            latestAnalysis.put("overallSuggestions", optimization.getOverallSuggestions());
            latestAnalysis.put("optimizationTime", optimization.getCreateTime());

            // 更新Redis缓存
            redisTemplate.opsForValue().set(cacheKey, latestAnalysis, 
                Duration.ofSeconds(RedisConstant.RESUME_CACHE_TTL_SECONDS));
            
            log.info("优化建议缓存更新成功: resumeId={}, userId={}, optimizationId={}", resumeId, userId, optimizationId);
        } catch (Exception e) {
            log.error("更新优化建议缓存失败: resumeId={}, userId={}, optimizationId={}, error={}", resumeId, userId, optimizationId, e.getMessage(), e);
        }
    }

    /**
     * 通过简历ID获取分析信息
     * 这个方法可以确保获取到指定简历的完整分析信息
     */
    public Map<String, Object> getAnalysisByResumeId(Long resumeId) {
        try {
            // 1. 获取简历基本信息
            Resume resume = resumeMapper.selectById(resumeId);
            if (resume == null) {
                log.warn("简历不存在: resumeId={}", resumeId);
                return new HashMap<>();
            }
            
            Map<String, Object> analysis = new HashMap<>();
            analysis.put("resumeId", resumeId);
            analysis.put("userId", resume.getUserId());
            analysis.put("originalFileName", resume.getOriginalFileName());
            analysis.put("fileType", resume.getFileType());
            analysis.put("uploadTime", resume.getCreateTime());
            
            // 2. 获取简历解析结果
            ResumeParseResult parseResult = resumeParseResultMapper.selectByResumeId(resumeId);
            if (parseResult != null) {
                analysis.put("parseStatus", "completed");
                analysis.put("parseResult", parseResult);
                analysis.put("parseTime", parseResult.getCreateTime());
            } else {
                analysis.put("parseStatus", "pending");
            }
            
            // 3. 获取测评报告
            EvaluationReport evaluation = evaluationReportMapper.selectLatestByResumeId(resumeId);
            if (evaluation != null) {
                analysis.put("evaluationStatus", "completed");
                analysis.put("evaluationReportId", evaluation.getId());
                analysis.put("overallScore", evaluation.getOverallScore());
                analysis.put("gapAnalysis", evaluation.getGapAnalysis());
                analysis.put("evaluationTime", evaluation.getCreateTime());
            } else {
                analysis.put("evaluationStatus", "pending");
            }
            
            // 4. 获取优化建议
            ResumeOptimization optimization = resumeOptimizationMapper.selectLatestByResumeId(resumeId);
            if (optimization != null) {
                analysis.put("optimizationStatus", "completed");
                analysis.put("optimizationId", optimization.getId());
                analysis.put("educationSuggestions", optimization.getEducationSuggestions());
                analysis.put("skillsSuggestions", optimization.getSkillsSuggestions());
                analysis.put("awardsSuggestions", optimization.getAwardsSuggestions());
                analysis.put("internshipsSuggestions", optimization.getInternshipsSuggestions());
                analysis.put("projectsSuggestions", optimization.getProjectsSuggestions());
                analysis.put("overallSuggestions", optimization.getOverallSuggestions());
                analysis.put("optimizationTime", optimization.getCreateTime());
            } else {
                analysis.put("optimizationStatus", "pending");
            }
            
            log.info("获取简历分析信息成功: resumeId={}, userId={}", resumeId, resume.getUserId());
            return analysis;
            
        } catch (Exception e) {
            log.error("获取简历分析信息失败: resumeId={}, error={}", resumeId, e.getMessage(), e);
            return new HashMap<>();
        }
    }

    /**
     * 强制刷新用户简历分析缓存
     * 前端可以调用此方法强制刷新缓存
     */
    public void refreshUserAnalysisCache(Long userId) {
        try {
            // 删除现有缓存
            String cacheKey = RedisConstant.RESUME_LATEST_ANALYSIS + userId;
            redisTemplate.delete(cacheKey);
            log.info("已强制刷新用户简历分析缓存: userId={}", userId);
        } catch (Exception e) {
            log.error("强制刷新缓存失败: userId={}, error={}", userId, e.getMessage(), e);
        }
    }

    /**
     * 智能分析生成测评结果（当AI返回非标准JSON时的回退方案）
     */
    private Map<String, Object> generateSmartEvaluation(List<Map<String, Object>> skills) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            // 分析技能数量和类型
            int skillCount = skills.size();
            List<String> skillNames = new ArrayList<>();
            for (Map<String, Object> skill : skills) {
                if (skill.containsKey("skill")) {
                    skillNames.add(skill.get("skill").toString());
                }
            }
            
            // 基于技能数量和质量计算分数
            double score = 60.0; // 基础分数
            if (skillCount >= 10) score += 20;
            else if (skillCount >= 5) score += 15;
            else if (skillCount >= 3) score += 10;
            
            // 检查是否有热门技能
            List<String> hotSkills = Arrays.asList("Java", "Python", "JavaScript", "Vue.js", "React", "Spring", "MySQL", "Redis");
            long hotSkillCount = skillNames.stream()
                .map(String::toLowerCase)
                .filter(skill -> hotSkills.stream().anyMatch(hot -> hot.toLowerCase().contains(skill) || skill.contains(hot.toLowerCase())))
                .count();
            
            if (hotSkillCount >= 3) score += 15;
            else if (hotSkillCount >= 2) score += 10;
            else if (hotSkillCount >= 1) score += 5;
            
            score = Math.min(score, 95.0); // 最高95分
            
            result.put("score", Math.round(score * 10.0) / 10.0);
            result.put("gap_analysis", "基于技能分析，建议继续深入学习核心技术栈，并关注行业最新发展趋势。");
            result.put("keywords", skillNames.stream().limit(8).collect(Collectors.toList()));
            
            log.info("智能分析生成测评结果: score={}, skills={}", score, skillNames);
            
        } catch (Exception e) {
            log.error("智能分析失败，使用默认值: {}", e.getMessage());
            result.put("score", 70.0);
            result.put("gap_analysis", "请优化提示词或稍后重试");
            result.put("keywords", List.of("技能分析", "简历优化"));
        }
        
        return result;
    }
}
