package com.example.apitesttool.service;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.Operation;
import io.swagger.v3.oas.models.PathItem;
import io.swagger.v3.oas.models.parameters.Parameter;
import io.swagger.v3.oas.models.responses.ApiResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.*;

/**
 * 文档向量化服务
 * 负责将 OpenAPI 文档转换为可搜索的向量格式
 */
@Service
public class DocumentVectorizationService {
    
    private static final Logger logger = LoggerFactory.getLogger(DocumentVectorizationService.class);
    
    @Autowired
    private VectorDatabaseService vectorDatabaseService;
    
    @Autowired
    private OpenApiDocumentManager documentManager;
    
    private final ObjectMapper objectMapper = new ObjectMapper();
    
    /**
     * 向量化 OpenAPI 文档
     */
    public void vectorizeDocument(String documentId) {
        try {
            OpenApiDocumentManager.DocumentMetadata metadata = documentManager.getDocumentMetadata(documentId).orElse(null);
            if (metadata == null) {
                throw new IllegalArgumentException("文档不存在: " + documentId);
            }
            
            OpenAPI openAPI = documentManager.getDocument(documentId).orElse(null);
            if (openAPI == null) {
                throw new IllegalArgumentException("无法获取文档内容: " + documentId);
            }
            
            // 提取文档的各种信息并向量化
            vectorizeApiInfo(documentId, openAPI, metadata);
            vectorizePaths(documentId, openAPI, metadata);
            vectorizeSchemas(documentId, openAPI, metadata);
            
            logger.info("文档 {} 向量化完成", documentId);
        } catch (Exception e) {
            logger.error("向量化文档失败: {}", documentId, e);
            throw new RuntimeException("向量化文档失败: " + e.getMessage(), e);
        }
    }
    
    /**
     * 向量化 API 基本信息
     */
    private void vectorizeApiInfo(String documentId, OpenAPI openAPI, OpenApiDocumentManager.DocumentMetadata metadata) {
        StringBuilder content = new StringBuilder();
        
        // API 基本信息
        if (openAPI.getInfo() != null) {
            content.append("API 标题: ").append(openAPI.getInfo().getTitle()).append("\n");
            if (openAPI.getInfo().getDescription() != null) {
                content.append("API 描述: ").append(openAPI.getInfo().getDescription()).append("\n");
            }
            if (openAPI.getInfo().getVersion() != null) {
                content.append("API 版本: ").append(openAPI.getInfo().getVersion()).append("\n");
            }
        }
        
        // 服务器信息
        if (openAPI.getServers() != null && !openAPI.getServers().isEmpty()) {
            content.append("服务器地址: ");
            openAPI.getServers().forEach(server -> {
                content.append(server.getUrl()).append(" ");
                if (server.getDescription() != null) {
                    content.append("(").append(server.getDescription()).append(") ");
                }
            });
            content.append("\n");
        }
        
        Map<String, Object> vectorMetadata = new HashMap<>();
        vectorMetadata.put("type", "api_info");
        vectorMetadata.put("documentId", documentId);
        vectorMetadata.put("documentName", metadata.getTitle());
        
        vectorDatabaseService.addDocument(documentId + "_api_info", content.toString(), vectorMetadata);
    }
    
    /**
     * 向量化 API 路径和操作
     */
    private void vectorizePaths(String documentId, OpenAPI openAPI, OpenApiDocumentManager.DocumentMetadata metadata) {
        if (openAPI.getPaths() == null) {
            return;
        }
        
        openAPI.getPaths().forEach((path, pathItem) -> {
            vectorizePathItem(documentId, path, pathItem, metadata);
        });
    }
    
    /**
     * 向量化单个路径项
     */
    private void vectorizePathItem(String documentId, String path, PathItem pathItem, OpenApiDocumentManager.DocumentMetadata metadata) {
        Map<PathItem.HttpMethod, Operation> operations = pathItem.readOperationsMap();
        
        operations.forEach((method, operation) -> {
            StringBuilder content = new StringBuilder();
            
            // 基本信息
            content.append("接口路径: ").append(path).append("\n");
            content.append("HTTP 方法: ").append(method.name()).append("\n");
            
            if (operation.getSummary() != null) {
                content.append("接口摘要: ").append(operation.getSummary()).append("\n");
            }
            
            if (operation.getDescription() != null) {
                content.append("接口描述: ").append(operation.getDescription()).append("\n");
            }
            
            if (operation.getOperationId() != null) {
                content.append("操作ID: ").append(operation.getOperationId()).append("\n");
            }
            
            // 参数信息
            if (operation.getParameters() != null && !operation.getParameters().isEmpty()) {
                content.append("参数列表:\n");
                for (Parameter param : operation.getParameters()) {
                    content.append("- ").append(param.getName());
                    if (param.getIn() != null) {
                        content.append(" (").append(param.getIn()).append(")");
                    }
                    if (param.getDescription() != null) {
                        content.append(": ").append(param.getDescription());
                    }
                    if (param.getRequired() != null && param.getRequired()) {
                        content.append(" [必填]");
                    }
                    content.append("\n");
                }
            }
            
            // 响应信息
            if (operation.getResponses() != null && !operation.getResponses().isEmpty()) {
                content.append("响应状态码:\n");
                for (Map.Entry<String, ApiResponse> entry : operation.getResponses().entrySet()) {
                    content.append("- ").append(entry.getKey());
                    if (entry.getValue().getDescription() != null) {
                        content.append(": ").append(entry.getValue().getDescription());
                    }
                    content.append("\n");
                }
            }
            
            // 标签信息
            if (operation.getTags() != null && !operation.getTags().isEmpty()) {
                content.append("标签: ").append(String.join(", ", operation.getTags())).append("\n");
            }
            
            Map<String, Object> vectorMetadata = new HashMap<>();
            vectorMetadata.put("type", "api_endpoint");
            vectorMetadata.put("documentId", documentId);
            vectorMetadata.put("documentName", metadata.getTitle());
            vectorMetadata.put("path", path);
            vectorMetadata.put("method", method.name());
            vectorMetadata.put("operationId", operation.getOperationId());
            vectorMetadata.put("summary", operation.getSummary());
            vectorMetadata.put("tags", operation.getTags());
            
            String chunkId = documentId + "_" + method.name().toLowerCase() + "_" + path.replaceAll("[^a-zA-Z0-9]", "_");
            vectorDatabaseService.addDocument(chunkId, content.toString(), vectorMetadata);
        });
    }
    
    /**
     * 向量化数据模型
     */
    private void vectorizeSchemas(String documentId, OpenAPI openAPI, OpenApiDocumentManager.DocumentMetadata metadata) {
        if (openAPI.getComponents() == null || openAPI.getComponents().getSchemas() == null) {
            return;
        }
        
        StringBuilder content = new StringBuilder();
        content.append("数据模型定义:\n");
        
        openAPI.getComponents().getSchemas().forEach((schemaName, schema) -> {
            content.append("模型名称: ").append(schemaName).append("\n");
            if (schema.getDescription() != null) {
                content.append("模型描述: ").append(schema.getDescription()).append("\n");
            }
            if (schema.getProperties() != null) {
                content.append("属性列表:\n");
                schema.getProperties().forEach((propName, propSchema) -> {
                    content.append("- ").append(propName);
                    // 简化处理，不访问具体的 schema 属性
                    content.append("\n");
                });
            }
            content.append("\n");
        });
        
        Map<String, Object> vectorMetadata = new HashMap<>();
        vectorMetadata.put("type", "data_models");
        vectorMetadata.put("documentId", documentId);
        vectorMetadata.put("documentName", metadata.getTitle());
        vectorMetadata.put("modelCount", openAPI.getComponents().getSchemas().size());
        
        vectorDatabaseService.addDocument(documentId + "_schemas", content.toString(), vectorMetadata);
    }
    
    /**
     * 删除文档的向量数据
     */
    public void removeDocumentVectors(String documentId) {
        vectorDatabaseService.removeDocument(documentId + "_api_info");
        vectorDatabaseService.removeDocument(documentId + "_schemas");
        
        // 删除所有接口相关的向量数据
        // 这里简化处理，实际应该维护一个文档到向量ID的映射
        logger.info("已删除文档 {} 的向量数据", documentId);
    }
    
    /**
     * 获取向量化统计信息
     */
    public Map<String, Object> getVectorizationStats() {
        Map<String, Object> stats = vectorDatabaseService.getStats();
        stats.put("service", "DocumentVectorizationService");
        return stats;
    }
}

