package com.jim.healio.graphrag.service;

import com.jim.healio.graphrag.model.MedicalDocument;
import com.jim.healio.graphrag.model.DocumentSearchResult;
import lombok.val;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.document.Document;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.stream.Collectors;

/**
 * 基于Spring AI VectorStore的Chroma文档服务
 */
@Service
public class ChromaDocumentService {
    
    private static final Logger logger = LoggerFactory.getLogger(ChromaDocumentService.class);

    private final VectorStore vectorStore;

    public ChromaDocumentService(VectorStore vectorStore) {
        this.vectorStore = vectorStore;
    }
    
    /**
     * 存储医学文档到向量数据库
     */
    public void storeDocument(MedicalDocument medicalDocument) {
        if (vectorStore == null) {
            logger.warn("VectorStore未配置，使用模拟存储");
            return;
        }
        
        try {
            logger.info("存储文档到向量数据库: {}", medicalDocument.getTitle());
            
            // 创建文档元数据
            Map<String, Object> metadata = new HashMap<>();
            metadata.put("id", medicalDocument.getId());
            metadata.put("title", medicalDocument.getTitle());
            metadata.put("type", medicalDocument.getType().name());
            metadata.put("source", medicalDocument.getSource());
            metadata.put("author", medicalDocument.getAuthor());
            metadata.put("tags", String.join(",", medicalDocument.getTags()));
            metadata.put("entities", String.join(",", medicalDocument.getExtractedEntityIds()));
            metadata.put("created_at", medicalDocument.getCreatedAt().toString());
            
            // 创建Spring AI Document对象
            Document document = new Document(
                medicalDocument.getId(),
                medicalDocument.getContent(),
                metadata
            );
            
            // 存储到向量数据库
            vectorStore.add(List.of(document));
            
            logger.info("文档存储成功: {}", medicalDocument.getTitle());
            
        } catch (Exception e) {
            logger.error("文档存储失败: " + medicalDocument.getTitle(), e);
            throw new RuntimeException("文档存储失败", e);
        }
    }
    
    /**
     * 语义搜索文档
     */
    public List<DocumentSearchResult> semanticSearch(String query, int limit) {
        if (vectorStore == null) {
            logger.warn("VectorStore未配置，返回空结果");
            return new ArrayList<>();
        }
        
        try {
            logger.info("执行语义搜索: {}, 限制: {}", query, limit);
            
            // 简化搜索实现，先使用基础API
            List<Document> documents = vectorStore.similaritySearch(query);
            
            // 转换为DocumentSearchResult
            assert documents != null;
            List<DocumentSearchResult> results = documents.stream()
                .map(this::convertToSearchResult)
                .collect(Collectors.toList());
            
            logger.info("语义搜索完成，找到 {} 个结果", results.size());
            return results;
            
        } catch (Exception e) {
            logger.error("语义搜索失败: " + query, e);
            return new ArrayList<>();
        }
    }
    
    /**
     * 根据实体ID查询相关文档
     */
    public List<DocumentSearchResult> queryDocumentsByEntity(String entityId, int limit) {
        if (vectorStore == null) {
            logger.warn("VectorStore未配置，返回空结果");
            return new ArrayList<>();
        }
        
        try {
            logger.info("根据实体查询文档: {}, 限制: {}", entityId, limit);
            
            // 使用实体ID作为查询条件进行搜索
            SearchRequest searchRequest = SearchRequest.builder()
                    .query(entityId)
                    .topK(limit)
                    .similarityThreshold(0.3)
                    .build();
            
            List<Document> documents = vectorStore.similaritySearch(searchRequest);
            
            // 过滤包含指定实体的文档
            List<DocumentSearchResult> results = documents.stream()
                .filter(doc -> {
                    String entities = (String) doc.getMetadata().get("entities");
                    return entities != null && entities.contains(entityId);
                })
                .map(this::convertToSearchResult)
                .limit(limit)
                .collect(Collectors.toList());
            
            logger.info("实体文档查询完成，找到 {} 个结果", results.size());
            return results;
            
        } catch (Exception e) {
            logger.error("实体文档查询失败: " + entityId, e);
            return new ArrayList<>();
        }
    }
    
    /**
     * 混合搜索：结合语义搜索和实体过滤
     */
    public List<DocumentSearchResult> hybridSearch(String query, List<String> entityIds, int limit) {
        if (vectorStore == null) {
            logger.warn("VectorStore未配置，返回空结果");
            return new ArrayList<>();
        }
        
        try {
            logger.info("执行混合搜索: {}, 实体: {}, 限制: {}", query, entityIds, limit);
            
            // 先进行语义搜索获取更多结果
            List<DocumentSearchResult> semanticResults = semanticSearch(query, limit * 2);
            
            if (entityIds == null || entityIds.isEmpty()) {
                return semanticResults.subList(0, Math.min(semanticResults.size(), limit));
            }
            
            // 过滤包含指定实体的文档
            List<DocumentSearchResult> filteredResults = semanticResults.stream()
                .filter(result -> {
                    if (result.getMetadata() == null) return false;
                    String entities = (String) result.getMetadata().get("entities");
                    if (entities == null) return false;
                    
                    return entityIds.stream().anyMatch(entities::contains);
                })
                .limit(limit)
                .collect(Collectors.toList());
            
            logger.info("混合搜索完成，找到 {} 个结果", filteredResults.size());
            return filteredResults;
            
        } catch (Exception e) {
            logger.error("混合搜索失败: " + query, e);
            return new ArrayList<>();
        }
    }
    
    /**
     * 批量存储文档
     */
    public void storeDocuments(List<MedicalDocument> medicalDocuments) {
        if (vectorStore == null) {
            logger.warn("VectorStore未配置，跳过批量存储");
            return;
        }
        
        try {
            logger.info("批量存储 {} 个文档", medicalDocuments.size());
            
            List<Document> documents = medicalDocuments.stream()
                .map(this::convertToDocument)
                .collect(Collectors.toList());
            
            vectorStore.add(documents);
            
            logger.info("批量存储完成");
            
        } catch (Exception e) {
            logger.error("批量存储失败", e);
            throw new RuntimeException("批量存储失败", e);
        }
    }
    
    /**
     * 删除文档
     */
    public void deleteDocument(String documentId) {
        if (vectorStore == null) {
            logger.warn("VectorStore未配置，跳过删除操作");
            return;
        }
        
        try {
            logger.info("删除文档: {}", documentId);
            vectorStore.delete(List.of(documentId));
            logger.info("文档删除成功: {}", documentId);
            
        } catch (Exception e) {
            logger.error("文档删除失败: " + documentId, e);
            throw new RuntimeException("文档删除失败", e);
        }
    }
    
    /**
     * 将MedicalDocument转换为Spring AI Document
     */
    private Document convertToDocument(MedicalDocument medicalDocument) {
        Map<String, Object> metadata = new HashMap<>();
        metadata.put("id", medicalDocument.getId());
        metadata.put("title", medicalDocument.getTitle());
        metadata.put("type", medicalDocument.getType().name());
        metadata.put("source", medicalDocument.getSource());
        metadata.put("author", medicalDocument.getAuthor());
        metadata.put("tags", String.join(",", medicalDocument.getTags()));
        metadata.put("entities", String.join(",", medicalDocument.getExtractedEntityIds()));
        metadata.put("created_at", medicalDocument.getCreatedAt().toString());
        
        return new Document(
            medicalDocument.getId(),
            medicalDocument.getContent(),
            metadata
        );
    }
    
    /**
     * 将Spring AI Document转换为DocumentSearchResult
     */
    private DocumentSearchResult convertToSearchResult(Document document) {
        Map<String, Object> metadata = document.getMetadata();
        
        DocumentSearchResult result = new DocumentSearchResult();
        result.setDocumentId(document.getId());
        result.setTitle((String) metadata.get("title"));
        result.setContent(document.getFormattedContent());
        result.setMetadata(metadata);
        result.setSource((String) metadata.get("source"));
        
        // 计算相似度（从元数据中获取，如果有的话）
        Object similarityObj = metadata.get("similarity");
        if (similarityObj instanceof Number) {
            result.setSimilarity(((Number) similarityObj).doubleValue());
        } else {
            result.setSimilarity(0.8); // 默认相似度
        }
        
        // 设置高亮文本（简化处理）
        String content = document.getFormattedContent();
        if (content.length() > 200) {
            result.setHighlightedText(content.substring(0, 200) + "...");
        } else {
            result.setHighlightedText(content);
        }
        
        return result;
    }
    
    /**
     * 检查向量存储是否可用
     */
    public boolean isVectorStoreAvailable() {
        return vectorStore != null;
    }
} 