package com.deepwiki.service;

import com.deepwiki.domain.entity.Document;
import com.deepwiki.domain.entity.DocumentCatalog;
import com.deepwiki.domain.entity.Warehouse;

import com.deepwiki.storage.impl.ElasticsearchStorageStrategy;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.Comparator;
import java.util.stream.Collectors;

/**
 * 文档生成服务 - 修正版
 * 支持新的ES索引结构，映射到OpenDeepWiki数据库表
 */
@Service
public class DocumentGenerationService {

    private static final Logger logger = LoggerFactory.getLogger(DocumentGenerationService.class);

    private final CodeAnalysisService codeAnalysisService;
    private final OpenAIService openAIService;
    private final ElasticsearchStorageStrategy elasticsearchStorage;
    private final ObjectMapper objectMapper;
    private final PromptService promptService;
    private final ProjectClassifierService projectClassifierService;
    private final DirectoryTreeService directoryTreeService;
    
    // 线程池用于并发生成文档
    private final ExecutorService executorService = Executors.newFixedThreadPool(3);

    @Autowired
    public DocumentGenerationService(CodeAnalysisService codeAnalysisService,
                                   OpenAIService openAIService,
                                   ElasticsearchStorageStrategy elasticsearchStorage,
                                   ObjectMapper objectMapper,
                                   PromptService promptService,
                                   ProjectClassifierService projectClassifierService,
                                   DirectoryTreeService directoryTreeService) {
        this.codeAnalysisService = codeAnalysisService;
        this.openAIService = openAIService;
        this.elasticsearchStorage = elasticsearchStorage;
        this.objectMapper = objectMapper;
        this.promptService = promptService;
        this.projectClassifierService = projectClassifierService;
        this.directoryTreeService = directoryTreeService;
    }

    /**
     * 生成仓库的完整文档
     *
     * @param warehouse 仓库信息
     * @param document 文档记录
     */
    @Transactional
    public void generateDocuments(Warehouse warehouse, Document document) {
        try {
            logger.info("开始生成文档: {}", warehouse.getId());
            
            // 1. 检查README文件是否存在，如果不存在则生成
            String readmeContent = checkAndGenerateReadme(warehouse, document);
            
            // 2. 分析代码仓库结构
            String repositoryStructure = codeAnalysisService.analyzeRepositoryStructure(document.getGitPath());
            
            // 3. 生成基础目录树结构（代码文件结构）
            String basicDirectoryTree = generateBasicDirectoryTree(document.getGitPath());
            
            // 4. 通过AI优化目录树结构（用于AI分析的代码文件目录结构）
            String optimizedDirectoryStructure = optimizeDirectoryTreeWithAI(basicDirectoryTree, repositoryStructure, warehouse);
            
            // 5. 项目类型分类
            String projectType = projectClassifierService.classifyProject(document.getGitPath());
            logger.info("项目类型检测结果: {} -> {}", warehouse.getName(), projectType);
            
            // 6. 生成项目概要(overview)
            String overviewContent = generateProjectOverview(warehouse, document, repositoryStructure, readmeContent, projectType);
            
            // 7. 先保存仓库基本信息到ES（不包含文档目录结构）
            saveWarehouseBasicInfo(warehouse, document, optimizedDirectoryStructure);
            
            // 8. 获取代码文件内容
            String codeFiles = codeAnalysisService.getCodeFilesContent(document.getGitPath(), 50, 10000);
            
            // 9. 使用优化后的代码目录结构生成文档目录结构
            List<DocumentCatalog> catalogs = parseCatalogStructure(optimizedDirectoryStructure, warehouse, document, projectType);
            
            // 10. 保存文档目录结构到ES中，包含完整路径信息
            saveCatalogsToElasticsearch(catalogs, warehouse.getId(), document);
            
            // 11. 生成文档目录树JSON（用于前端展示的文档导航结构）
            String documentTreeStructure = generateDocumentTreeStructure(catalogs);
            
            // 12. 并发生成文档内容
            generateDocumentContents(catalogs, warehouse, document.getGitPath(), projectType, overviewContent);
            
            // 13. 更新仓库信息，添加文档目录结构
            updateWarehouseWithDocumentTree(warehouse, documentTreeStructure);
            
            logger.info("文档生成完成: {}", warehouse.getId());
            
        } catch (Exception e) {
            logger.error("生成文档失败: {}", warehouse.getId(), e);
            throw new RuntimeException("生成文档失败: " + e.getMessage(), e);
        }
    }

    /**
     * 检查README文件是否存在，如果不存在则生成
     */
    private String checkAndGenerateReadme(Warehouse warehouse, Document document) {
        try {
            Path projectPath = Paths.get(document.getGitPath());
            String[] readmeFiles = {"README.md", "readme.md", "Readme.md", "README.rst", "README.txt", "README"};
            
            // 检查是否存在README文件
            Path readmePath = null;
            for (String readmeFile : readmeFiles) {
                Path candidatePath = projectPath.resolve(readmeFile);
                if (Files.exists(candidatePath)) {
                    readmePath = candidatePath;
                    break;
                }
            }
            
            String readmeContent;
            if (readmePath != null) {
                // 读取现有README内容
                readmeContent = Files.readString(readmePath);
                logger.info("找到现有README文件: {}", readmePath.getFileName());
                
                // 保存README内容到概览索引 - 映射DocumentOverviews表
                ElasticsearchStorageStrategy.OverviewInfo overviewInfo = new ElasticsearchStorageStrategy.OverviewInfo();
                overviewInfo.overviewId = "readme-original";
                overviewInfo.documentId = document.getId();
                overviewInfo.title = "项目README文档";
                overviewInfo.content = readmeContent;
                elasticsearchStorage.saveOverview(warehouse.getId(), overviewInfo);
                
            } else {
                // 没有README文件，生成一个基础的README
                logger.info("未找到README文件，开始生成基础README: {}", warehouse.getName());
                
                readmeContent = generateBasicReadme(warehouse, document);
                
                // 保存生成的README到项目根目录
                Path newReadmePath = projectPath.resolve("README.md");
                Files.writeString(newReadmePath, readmeContent);
                
                // 保存生成的README内容到概览索引
                ElasticsearchStorageStrategy.OverviewInfo overviewInfo = new ElasticsearchStorageStrategy.OverviewInfo();
                overviewInfo.overviewId = "readme-generated";
                overviewInfo.documentId = document.getId();
                overviewInfo.title = "生成的README文档";
                overviewInfo.content = readmeContent;
                elasticsearchStorage.saveOverview(warehouse.getId(), overviewInfo);
                
                logger.info("基础README生成完成: {}", newReadmePath);
            }
            
            return readmeContent;
            
        } catch (Exception e) {
            logger.error("检查和生成README失败: {}", warehouse.getId(), e);
            return ""; // 返回空字符串，继续后续流程
        }
    }

    /**
     * 生成基础README内容
     */
    private String generateBasicReadme(Warehouse warehouse, Document document) {
        try {
            // 获取项目结构信息
            String projectStructure = codeAnalysisService.analyzeRepositoryStructure(document.getGitPath());
            
            // 构建提示词参数
            Map<String, Object> promptArgs = Map.of(
                "project_name", warehouse.getName(),
                "git_repository", warehouse.getAddress(),
                "branch", warehouse.getBranch(),
                "catalogue", projectStructure
            );
            
            // 使用README生成提示词
            String readmePrompt = promptService.getReadmeGenerationPrompt(promptArgs, null);
            
            // 调用AI生成README
            return openAIService.generateWithPrompt(readmePrompt);
            
        } catch (Exception e) {
            logger.error("生成基础README失败: {}", warehouse.getId(), e);
            return String.format("# %s\n\n%s\n\n## 描述\n%s", 
                warehouse.getName(), warehouse.getDescription(), 
                "这是一个自动生成的README文件。");
        }
    }

    /**
     * 生成基础目录树结构（不使用AI）
     */
    private String generateBasicDirectoryTree(String gitPath) {
        try {
            logger.info("生成基础目录树结构: {}", gitPath);
            
            StringBuilder treeBuilder = new StringBuilder();
            treeBuilder.append("# 项目目录结构\n\n");
            treeBuilder.append("```\n");
            
            Path projectPath = Paths.get(gitPath);
            generateDirectoryTreeRecursive(projectPath, treeBuilder, "", 0, 3);
            
            treeBuilder.append("```\n");
            
            return treeBuilder.toString();
            
        } catch (Exception e) {
            logger.error("生成基础目录树失败: {}", gitPath, e);
            return "项目目录结构";
        }
    }

    /**
     * 递归生成目录树
     */
    private void generateDirectoryTreeRecursive(Path dir, StringBuilder builder, String prefix, int depth, int maxDepth) {
        if (depth > maxDepth) return;
        
        try {
            File[] files = dir.toFile().listFiles();
            if (files == null) return;
            
            // 过滤和排序文件
            Arrays.sort(files, (a, b) -> {
                if (a.isDirectory() && !b.isDirectory()) return -1;
                if (!a.isDirectory() && b.isDirectory()) return 1;
                return a.getName().compareToIgnoreCase(b.getName());
            });
            
            int count = 0;
            for (File file : files) {
                if (shouldSkipFile(file.getName())) continue;
                if (count++ > 20) { // 限制文件数量
                    builder.append(prefix).append("└── ... (更多文件)\n");
                    break;
                }
                
                boolean isLast = count == Math.min(files.length, 20);
                String currentPrefix = isLast ? "└── " : "├── ";
                String nextPrefix = prefix + (isLast ? "    " : "│   ");
                
                builder.append(prefix).append(currentPrefix).append(file.getName());
                if (file.isDirectory()) {
                    builder.append("/\n");
                    generateDirectoryTreeRecursive(file.toPath(), builder, nextPrefix, depth + 1, maxDepth);
                } else {
                    builder.append("\n");
                }
            }
            
        } catch (Exception e) {
            logger.warn("生成目录树时出错: {}", dir, e);
        }
    }

    /**
     * 判断是否应该跳过文件/目录
     */
    private boolean shouldSkipFile(String fileName) {
        String[] skipPatterns = {".git", ".idea", ".vscode", "node_modules", ".DS_Store", "*.log", "target", "build", "dist"};
        for (String pattern : skipPatterns) {
            if (fileName.startsWith(pattern.replace("*", ""))) {
                return true;
            }
        }
        return false;
    }

    /**
     * 通过AI优化目录树结构
     */
    private String optimizeDirectoryTreeWithAI(String basicDirectoryTree, String repositoryStructure, Warehouse warehouse) {
        try {
            logger.info("开始AI优化目录树结构: {}", warehouse.getName());
            
            // 构建提示词参数
            Map<String, Object> promptArgs = Map.of(
                "basic_tree", basicDirectoryTree,
                "repository_structure", repositoryStructure,
                "git_repository", warehouse.getAddress(),
                "branch", warehouse.getBranch(),
                "project_name", warehouse.getName()
            );
            
            // 使用目录树优化提示词
            String optimizationPrompt = promptService.getDirectoryTreeOptimizationPrompt(promptArgs, null);
            
            // 调用AI优化目录树
            String optimizedTree = openAIService.generateWithPrompt(optimizationPrompt);
            
            logger.info("AI目录树优化完成: {}", warehouse.getName());
            return optimizedTree;
            
        } catch (Exception e) {
            logger.error("AI优化目录树失败: {}", warehouse.getId(), e);
            // 如果AI优化失败，返回基础目录树
            return basicDirectoryTree;
        }
    }

    /**
     * 生成项目概要
     */
    private String generateProjectOverview(Warehouse warehouse, Document document, String repositoryStructure, 
                                         String readmeContent, String projectType) {
        try {
            logger.info("开始生成项目概要: {}", warehouse.getName());
            
            // 获取关键代码文件
            String keyCodeFiles = codeAnalysisService.getCodeFilesContent(document.getGitPath(), 20, 5000);
            
            // 构建提示词参数
            Map<String, Object> promptArgs = Map.of(
                "project_name", warehouse.getName(),
                "git_repository", warehouse.getAddress(),
                "branch", warehouse.getBranch(),
                "catalogue", repositoryStructure,
                "readme", readmeContent,
                "key_files", keyCodeFiles,
                "project_type", projectType
            );
            
            // 使用概要生成提示词
            String overviewPrompt = promptService.getOverviewPrompt(projectType, promptArgs, null);
            
            // 调用AI生成概要
            String overviewContent = openAIService.generateWithPrompt(overviewPrompt);
            
            // 保存概要内容到概览索引 - 映射DocumentOverviews表
            ElasticsearchStorageStrategy.OverviewInfo overviewInfo = new ElasticsearchStorageStrategy.OverviewInfo();
            overviewInfo.overviewId = "project-overview";
            overviewInfo.documentId = document.getId();
            overviewInfo.title = "项目概要";
            overviewInfo.content = overviewContent;
            elasticsearchStorage.saveOverview(warehouse.getId(), overviewInfo);
            
            logger.info("项目概要生成完成: {}", warehouse.getName());
            return overviewContent;
            
        } catch (Exception e) {
            logger.error("生成项目概要失败: {}", warehouse.getId(), e);
            return "项目概要生成失败";
        }
    }

    /**
     * 保存仓库基本信息到ES - 映射Warehouses表（第一次保存，不包含文档目录结构）
     */
    private void saveWarehouseBasicInfo(Warehouse warehouse, Document document, String optimizedDirectoryStructure) {
        try {
            ElasticsearchStorageStrategy.WarehouseInfo warehouseInfo = new ElasticsearchStorageStrategy.WarehouseInfo();
            warehouseInfo.warehouseId = warehouse.getId();
            warehouseInfo.organizationName = warehouse.getOrganizationName();
            warehouseInfo.name = warehouse.getName();
            warehouseInfo.description = warehouse.getDescription();
            warehouseInfo.address = warehouse.getAddress();
            warehouseInfo.type = warehouse.getType();
            warehouseInfo.branch = warehouse.getBranch();
            warehouseInfo.gitUsername = warehouse.getGitUsername();
            warehouseInfo.gitPassword = warehouse.getGitPassword();
            warehouseInfo.email = warehouse.getEmail();
            warehouseInfo.status = warehouse.getStatus().toString();
            warehouseInfo.error = warehouse.getError();
            warehouseInfo.prompt = warehouse.getPrompt();
            warehouseInfo.version = warehouse.getVersion();
            warehouseInfo.model = warehouse.getModel();
            warehouseInfo.isEmbedded = warehouse.getIsEmbedded();
            warehouseInfo.isRecommended = warehouse.getIsRecommended();
            warehouseInfo.isDeleted = warehouse.getIsDeleted();
            // 保存AI优化后的代码文件目录结构
            warehouseInfo.optimizedDirectoryStructure = optimizedDirectoryStructure;
            // documentTreeStructure稍后保存
            warehouseInfo.documentTreeStructure = null;
            
            elasticsearchStorage.saveWarehouse(warehouseInfo);
            
            logger.info("仓库基本信息保存到ES完成: {}", warehouse.getName());
            
        } catch (Exception e) {
            logger.error("保存仓库基本信息到ES失败: {}", warehouse.getId(), e);
        }
    }

    /**
     * 解析目录结构并生成DocumentCatalog列表
     */
    private List<DocumentCatalog> parseCatalogStructure(String optimizedDirectoryStructure, Warehouse warehouse, 
                                                       Document document, String projectType) {
        try {
            List<DocumentCatalog> catalogs = new ArrayList<>();
            
            // 解析优化后的目录结构（这里简化处理，实际可以更复杂）
            String[] lines = optimizedDirectoryStructure.split("\n");
            int orderIndex = 0;
            
            for (String line : lines) {
                if (line.trim().isEmpty() || line.startsWith("#") || line.startsWith("```")) continue;
                
                DocumentCatalog catalog = new DocumentCatalog();
                catalog.setId(UUID.randomUUID().toString());
                catalog.setWarehouseId(warehouse.getId());
                catalog.setDucumentId(document.getId());
                catalog.setName(line.trim());
                catalog.setDescription("自动生成的文档目录");
                catalog.setUrl("/" + catalog.getId());
                catalog.setOrderIndex(orderIndex++);
                catalog.setIsCompleted(false);
                catalog.setIsDeleted(false);
                catalog.setPrompt(generatePromptForCatalog(catalog.getName(), projectType));
                catalog.setDependentFile("");
                
                catalogs.add(catalog);
            }
            
            return catalogs;
            
        } catch (Exception e) {
            logger.error("解析目录结构失败: {}", warehouse.getId(), e);
            return new ArrayList<>();
        }
    }

    /**
     * 为目录生成提示词
     */
    private String generatePromptForCatalog(String catalogName, String projectType) {
        return String.format("为%s项目生成%s章节的详细文档", projectType, catalogName);
    }

    /**
     * 并发生成文档内容
     */
    private void generateDocumentContents(List<DocumentCatalog> catalogs, Warehouse warehouse, String gitPath, 
                                        String projectType, String overviewContent) {
        try {
            logger.info("开始并发生成文档内容: {} 个文档", catalogs.size());
            
            // 获取代码上下文
            String codeContext = codeAnalysisService.getCodeFilesContent(gitPath, 100, 50000);
            
            // 分批处理，避免过多并发
            List<List<DocumentCatalog>> batches = partition(catalogs, 5);
            
            for (List<DocumentCatalog> batch : batches) {
                List<CompletableFuture<Void>> futures = batch.stream()
                        .map(catalog -> CompletableFuture.runAsync(() -> 
                            generateSingleDocument(catalog, warehouse, codeContext, projectType, overviewContent), 
                            executorService))
                        .toList();
                
                // 等待当前批次完成
                CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
                
                logger.info("批次完成: {}/{}", batches.indexOf(batch) + 1, batches.size());
            }
            
            logger.info("所有文档内容生成完成");
            
        } catch (Exception e) {
            logger.error("并发生成文档内容失败: {}", warehouse.getId(), e);
        }
    }

    /**
     * 生成单个文档
     */
    private void generateSingleDocument(DocumentCatalog catalog, Warehouse warehouse, String codeContext, 
                                      String projectType, String overviewContent) {
        try {
            logger.debug("生成文档: {}", catalog.getName());
            
            // 构建提示词参数
            Map<String, Object> promptArgs = Map.of(
                    "prompt", catalog.getPrompt(),
                    "title", catalog.getName(),
                    "git_repository", warehouse.getAddress(),
                    "branch", warehouse.getBranch(),
                    "catalogue", codeContext,
                    "overview", overviewContent // 添加概览内容到提示词
            );
            
            // 根据文档类型选择合适的提示词
            String finalPrompt;
            if (catalog.getName().toLowerCase().contains("overview") || 
                catalog.getName().toLowerCase().contains("readme")) {
                // 概览文档使用Overview提示词
                finalPrompt = promptService.getOverviewPrompt(projectType, promptArgs, null);
            } else {
                // 其他文档使用GenerateDocs提示词
                finalPrompt = promptService.getGenerateDocsPrompt(projectType, promptArgs, null);
            }
            
            // 使用提示词生成文档内容
            String content = openAIService.generateWithPrompt(finalPrompt);
            
            // 保存到ES存储 - 映射DocumentFileItems表
            ElasticsearchStorageStrategy.DocumentInfo documentInfo = new ElasticsearchStorageStrategy.DocumentInfo();
            documentInfo.documentId = catalog.getId();
            documentInfo.catalogId = catalog.getId();
            documentInfo.title = catalog.getName();
            documentInfo.description = catalog.getDescription();
            documentInfo.content = content;
            documentInfo.size = (long) content.length();
            documentInfo.commentCount = 0L;
            documentInfo.requestToken = 0; // TODO: 可以从OpenAI服务获取实际token消耗
            documentInfo.responseToken = 0;
            documentInfo.isEmbedded = false;
            documentInfo.metadata = new HashMap<>();
            documentInfo.extra = new HashMap<>();
            documentInfo.sources = new ArrayList<>();
            
            elasticsearchStorage.saveDocument(catalog.getWarehouseId(), documentInfo);
            
            // 更新生成状态
            catalog.setIsCompleted(true);
            
            logger.debug("文档生成完成: {}", catalog.getName());
            
        } catch (Exception e) {
            logger.error("生成单个文档失败: {}", catalog.getName(), e);
        }
    }

    /**
     * 分批处理列表
     */
    private <T> List<List<T>> partition(List<T> list, int size) {
        List<List<T>> partitions = new ArrayList<>();
        for (int i = 0; i < list.size(); i += size) {
            partitions.add(list.subList(i, Math.min(i + size, list.size())));
        }
        return partitions;
    }

    /**
     * 保存目录结构到Elasticsearch - 合并DocumentCatalogs和Documents表
     */
    private void saveCatalogsToElasticsearch(List<DocumentCatalog> catalogs, String warehouseId, Document document) {
        try {
            for (DocumentCatalog catalog : catalogs) {
                // 生成完整路径
                String fullPath = generateFullPath(catalog, catalogs);
                
                // 计算层级深度
                int level = calculateLevel(catalog, catalogs);
                
                // 保存到ES - 映射合并后的DocumentCatalogs+Documents表
                ElasticsearchStorageStrategy.CatalogInfo catalogInfo = new ElasticsearchStorageStrategy.CatalogInfo();
                catalogInfo.catalogId = catalog.getId();
                catalogInfo.documentId = catalog.getDucumentId();
                catalogInfo.parentId = catalog.getParentId();
                catalogInfo.name = catalog.getName();
                catalogInfo.url = catalog.getUrl();
                catalogInfo.description = catalog.getDescription();
                catalogInfo.orderIndex = catalog.getOrderIndex();
                catalogInfo.isCompleted = catalog.getIsCompleted();
                catalogInfo.isDeleted = catalog.getIsDeleted();
                catalogInfo.prompt = catalog.getPrompt();
                catalogInfo.dependentFile = catalog.getDependentFile();
                // Documents表字段
                catalogInfo.gitPath = document.getGitPath();
                catalogInfo.status = document.getStatus().toString();
                catalogInfo.likeCount = document.getLikeCount();
                catalogInfo.commentCount = document.getCommentCount();
                catalogInfo.lastUpdate = System.currentTimeMillis();
                // 优化字段
                catalogInfo.fullPath = fullPath;
                catalogInfo.level = level;
                
                elasticsearchStorage.saveCatalog(warehouseId, catalogInfo);
            }
            
            logger.info("目录结构保存到ES完成: {} 个目录", catalogs.size());
            
        } catch (Exception e) {
            logger.error("保存目录结构到ES失败: {}", warehouseId, e);
        }
    }

    /**
     * 生成完整路径，避免递归查询
     */
    private String generateFullPath(DocumentCatalog catalog, List<DocumentCatalog> allCatalogs) {
        List<String> pathParts = new ArrayList<>();
        DocumentCatalog current = catalog;
        
        while (current != null) {
            pathParts.add(0, current.getName());
            
            if (current.getParentId() == null) {
                break;
            }
            
            // 查找父目录
            final String parentId = current.getParentId();
            current = allCatalogs.stream()
                    .filter(c -> c.getId().equals(parentId))
                    .findFirst()
                    .orElse(null);
        }
        
        return String.join("/", pathParts);
    }

    /**
     * 计算层级深度
     */
    private int calculateLevel(DocumentCatalog catalog, List<DocumentCatalog> allCatalogs) {
        int level = 0;
        DocumentCatalog current = catalog;
        
        while (current != null && current.getParentId() != null) {
            level++;
            final String parentId = current.getParentId();
            current = allCatalogs.stream()
                    .filter(c -> c.getId().equals(parentId))
                    .findFirst()
                    .orElse(null);
        }
        
        return level;
    }

    /**
     * 生成文档目录树结构（用于前端展示）
     */
    private String generateDocumentTreeStructure(List<DocumentCatalog> catalogs) {
        try {
            logger.info("生成文档目录树结构");
            
            // 构建层级结构的文档目录树
            Map<String, Object> documentTree = new HashMap<>();
            documentTree.put("title", "项目文档");
            documentTree.put("type", "root");
            documentTree.put("children", buildDocumentTreeNodes(catalogs));
            
            String treeJson = objectMapper.writeValueAsString(documentTree);
            logger.info("文档目录树结构生成完成");
            
            return treeJson;
            
        } catch (Exception e) {
            logger.error("生成文档目录树结构失败", e);
            return "{}";
        }
    }

    /**
     * 构建文档目录树节点
     */
    private List<Map<String, Object>> buildDocumentTreeNodes(List<DocumentCatalog> catalogs) {
        List<Map<String, Object>> nodes = new ArrayList<>();
        
        // 按orderIndex排序
        List<DocumentCatalog> sortedCatalogs = catalogs.stream()
                .filter(catalog -> !catalog.getIsDeleted())
                .sorted(Comparator.comparing(DocumentCatalog::getOrderIndex, Comparator.nullsLast(Integer::compareTo)))
                .collect(Collectors.toList());
        
        // 构建根节点（没有父级的节点）
        List<DocumentCatalog> rootCatalogs = sortedCatalogs.stream()
                .filter(catalog -> catalog.getParentId() == null || catalog.getParentId().trim().isEmpty())
                .collect(Collectors.toList());
        
        for (DocumentCatalog catalog : rootCatalogs) {
            Map<String, Object> node = buildDocumentTreeNode(catalog, sortedCatalogs);
            nodes.add(node);
        }
        
        return nodes;
    }

    /**
     * 递归构建文档目录树节点
     */
    private Map<String, Object> buildDocumentTreeNode(DocumentCatalog catalog, List<DocumentCatalog> allCatalogs) {
        Map<String, Object> node = new HashMap<>();
        node.put("id", catalog.getId());
        node.put("title", catalog.getName());
        node.put("name", catalog.getName());
        node.put("url", catalog.getUrl());
        node.put("description", catalog.getDescription());
        node.put("type", "document");
        node.put("isGenerated", catalog.getIsCompleted());
        node.put("orderIndex", catalog.getOrderIndex());
        
        // 查找子节点
        List<DocumentCatalog> children = allCatalogs.stream()
                .filter(c -> catalog.getId().equals(c.getParentId()))
                .sorted(Comparator.comparing(DocumentCatalog::getOrderIndex, Comparator.nullsLast(Integer::compareTo)))
                .collect(Collectors.toList());
        
        if (!children.isEmpty()) {
            List<Map<String, Object>> childNodes = new ArrayList<>();
            for (DocumentCatalog child : children) {
                Map<String, Object> childNode = buildDocumentTreeNode(child, allCatalogs);
                childNodes.add(childNode);
            }
            node.put("children", childNodes);
        }
        
        return node;
    }

    /**
     * 更新仓库信息，添加文档目录结构
     */
    private void updateWarehouseWithDocumentTree(Warehouse warehouse, String documentTreeStructure) {
        try {
            // 重新保存仓库信息，这次包含文档目录结构
            ElasticsearchStorageStrategy.WarehouseInfo warehouseInfo = new ElasticsearchStorageStrategy.WarehouseInfo();
            warehouseInfo.warehouseId = warehouse.getId();
            warehouseInfo.organizationName = warehouse.getOrganizationName();
            warehouseInfo.name = warehouse.getName();
            warehouseInfo.description = warehouse.getDescription();
            warehouseInfo.address = warehouse.getAddress();
            warehouseInfo.type = warehouse.getType();
            warehouseInfo.branch = warehouse.getBranch();
            warehouseInfo.gitUsername = warehouse.getGitUsername();
            warehouseInfo.gitPassword = warehouse.getGitPassword();
            warehouseInfo.email = warehouse.getEmail();
            warehouseInfo.status = warehouse.getStatus().toString();
            warehouseInfo.error = warehouse.getError();
            warehouseInfo.prompt = warehouse.getPrompt();
            warehouseInfo.version = warehouse.getVersion();
            warehouseInfo.model = warehouse.getModel();
            warehouseInfo.isEmbedded = warehouse.getIsEmbedded();
            warehouseInfo.isRecommended = warehouse.getIsRecommended();
            warehouseInfo.isDeleted = warehouse.getIsDeleted();
            warehouseInfo.optimizedDirectoryStructure = warehouse.getOptimizedDirectoryStructure();
            // 添加生成的文档目录结构
            warehouseInfo.documentTreeStructure = documentTreeStructure;
            
            elasticsearchStorage.saveWarehouse(warehouseInfo);
            
            logger.info("仓库文档目录结构更新完成: {}", warehouse.getName());
            
        } catch (Exception e) {
            logger.error("更新仓库文档目录结构失败: {}", warehouse.getId(), e);
        }
    }

    /**
     * 重新生成特定文档
     *
     * @param catalogId 目录ID
     * @param warehouse 仓库信息
     * @param gitPath Git路径
     */
    public void regenerateDocument(String catalogId, Warehouse warehouse, String gitPath) {
        try {
            // TODO: 从ES获取DocumentCatalog的逻辑
            // 这里需要从ES的catalog索引中获取目录信息，然后转换为DocumentCatalog对象
            logger.warn("regenerateDocument方法暂未实现ES查询逻辑: {}", catalogId);
            
            // 获取代码上下文
            String codeContext = codeAnalysisService.getCodeFilesContent(gitPath, 100, 50000);
            
            // TODO: 重新生成文档
            // generateSingleDocument(catalog, warehouse, codeContext, "unknown", "");
            
            logger.info("文档重新生成完成: {}", catalogId);
            
        } catch (Exception e) {
            logger.error("重新生成文档失败: {}", catalogId, e);
            throw new RuntimeException("重新生成文档失败: " + e.getMessage(), e);
        }
    }

    /**
     * 获取文档生成进度
     *
     * @param warehouseId 仓库ID
     * @return 进度信息
     */
    public Map<String, Object> getGenerationProgress(String warehouseId) {
        try {
            // TODO: 从ES获取统计信息的逻辑
            // 这里需要从ES的catalog索引中统计总数和已生成数
            logger.warn("getGenerationProgress方法暂未实现ES查询逻辑: {}", warehouseId);
            
            Map<String, Object> progress = new HashMap<>();
            progress.put("total", 0);
            progress.put("generated", 0);
            progress.put("progress", 0.0);
            progress.put("completed", false);
            
            return progress;
            
        } catch (Exception e) {
            logger.error("获取文档生成进度失败: {}", warehouseId, e);
            return Collections.emptyMap();
        }
    }
} 