package com.caseprocessing.ai.language;

import com.caseprocessing.ai.AIServiceManager;
import com.caseprocessing.api.*;
import com.caseprocessing.model.DocumentSegment;
import com.caseprocessing.model.DocumentType;
import com.caseprocessing.model.CaseDocument;

import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Pattern;
import java.util.regex.Matcher;

/**
 * LLM语义文档处理器
 * 使用大语言模型进行文档语义理解和分析
 */
public class SemanticDocumentProcessor {
    
    private final SiliconFlowClient siliconFlowClient;
    private final SemanticPromptLibrary promptLibrary;
    private final Map<String, Object> processingConfig;
    
    public SemanticDocumentProcessor(SiliconFlowClient siliconFlowClient) {
        this.siliconFlowClient = siliconFlowClient;
        this.promptLibrary = new SemanticPromptLibrary();
        this.processingConfig = new HashMap<>();
        
        // 默认配置
        processingConfig.put("max_context_length", 8000);
        processingConfig.put("semantic_threshold", 0.7);
        processingConfig.put("coherence_threshold", 0.6);
    }
    
    /**
     * 文档语义理解
     * 深度分析文档内容的语义结构和逻辑关系
     */
    public CompletableFuture<SemanticAnalysisResult> understandDocumentContent(
            String documentContent) {
        
        String prompt = promptLibrary.getSemanticUnderstandingPrompt();
        
        // 如果内容过长，进行智能分段
        List<String> contentSegments = splitContentIntoSegments(documentContent);
        
        if (contentSegments.size() == 1) {
            // 内容不长，直接分析
            return analyzeSingleSegment(contentSegments.get(0), prompt);
        } else {
            // 内容较长，分段分析后合并
            return analyzeMultipleSegments(contentSegments, prompt);
        }
    }
    
    /**
     * 智能文档分类
     * 零样本学习，适应新文书类型
     */
    public CompletableFuture<DocumentClassificationResult> intelligentClassification(
            DocumentSegment documentSegment) {
        
        String prompt = promptLibrary.getIntelligentClassificationPrompt();
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("请分析以下文档段落并进行精确分类：\\n\\n" + documentSegment.getContent())
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.1)
                .maxTokens(1000)
        ).thenApply(this::parseClassificationResult);
    }
    
    /**
     * 合规性推理
     * 深度理解法规要求，进行智能合规检查
     */
    public CompletableFuture<ComplianceReasoningResult> complianceReasoning(
            DocumentSegment documentSegment,
            DocumentType documentType,
            List<String> legalStandards) {
        
        String prompt = promptLibrary.getComplianceReasoningPrompt(documentType);
        String standardsContext = String.join("\\n", legalStandards);
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("法律标准：\\n" + standardsContext + 
                        "\\n\\n文档内容：\\n" + documentSegment.getContent())
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.1)
                .maxTokens(2000)
        ).thenApply(this::parseComplianceReasoningResult);
    }
    
    /**
     * 法律要素提取
     * 提取文档中的关键法律要素
     */
    public CompletableFuture<LegalElementsResult> extractLegalElements(
            String documentContent,
            DocumentType documentType) {
        
        String prompt = promptLibrary.getLegalElementsExtractionPrompt(documentType);
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("请从以下文档中提取关键法律要素：\\n\\n" + documentContent)
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.1)
                .maxTokens(1500)
        ).thenApply(this::parseLegalElementsResult);
    }
    
    /**
     * 内容一致性验证
     * 验证文档内容的逻辑一致性
     */
    public CompletableFuture<ConsistencyCheckResult> verifyContentConsistency(
            List<DocumentSegment> documentSegments) {
        
        String prompt = promptLibrary.getConsistencyCheckPrompt();
        
        // 构建所有文档段落的内容
        StringBuilder allContent = new StringBuilder();
        for (int i = 0; i < documentSegments.size(); i++) {
            DocumentSegment segment = documentSegments.get(i);
            allContent.append("===== 文档段落 ").append(i + 1).append(" =====\\n");
            allContent.append("类型：").append(segment.getDocumentType().getDescription()).append("\\n");
            allContent.append("内容：").append(segment.getContent()).append("\\n\\n");
        }
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("请验证以下文档段落之间的逻辑一致性：\\n\\n" + allContent.toString())
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.1)
                .maxTokens(2000)
        ).thenApply(this::parseConsistencyCheckResult);
    }
    
    /**
     * 智能改进建议
     * 基于语义理解提供具体的改进建议
     */
    public CompletableFuture<ImprovementSuggestionResult> generateImprovementSuggestions(
            DocumentSegment documentSegment,
            List<String> identifiedIssues) {
        
        String prompt = promptLibrary.getImprovementSuggestionPrompt();
        String issuesContext = String.join("\\n", identifiedIssues);
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("已识别的问题：\\n" + issuesContext + 
                        "\\n\\n文档内容：\\n" + documentSegment.getContent())
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.3)
                .maxTokens(1500)
        ).thenApply(this::parseImprovementSuggestionResult);
    }
    
    /**
     * 文档摘要生成
     * 生成文档的智能摘要
     */
    public CompletableFuture<DocumentSummaryResult> generateDocumentSummary(
            CaseDocument caseDocument,
            List<DocumentSegment> documentSegments) {
        
        String prompt = promptLibrary.getDocumentSummaryPrompt();
        
        // 构建文档概览
        StringBuilder documentOverview = new StringBuilder();
        documentOverview.append("案卷信息：\\n");
        documentOverview.append("- 案卷ID：").append(caseDocument.getDocumentId()).append("\\n");
        documentOverview.append("- 总页数：").append(caseDocument.getTotalPages()).append("\\n\\n");
        
        documentOverview.append("文档段落：\\n");
        for (int i = 0; i < documentSegments.size(); i++) {
            DocumentSegment segment = documentSegments.get(i);
            documentOverview.append(i + 1).append(". ").append(segment.getDocumentType().getDescription());
            documentOverview.append("（第").append(segment.getStartPage()).append("-").append(segment.getEndPage()).append("页）\\n");
            
            // 提取内容片段
            String contentPreview = segment.getContent().length() > 200 ? 
                segment.getContent().substring(0, 200) + "..." : 
                segment.getContent();
            documentOverview.append("   内容摘要：").append(contentPreview).append("\\n\\n");
        }
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("请为以下案卷生成智能摘要：\\n\\n" + documentOverview.toString())
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.2)
                .maxTokens(1000)
        ).thenApply(this::parseDocumentSummaryResult);
    }
    
    /**
     * 批量语义分析
     * 对多个文档进行批量语义分析
     */
    public CompletableFuture<BatchSemanticResult> batchSemanticAnalysis(
            List<DocumentSegment> documentSegments,
            String analysisType) {
        
        List<CompletableFuture<SemanticAnalysisResult>> futures = new ArrayList<>();
        
        for (DocumentSegment segment : documentSegments) {
            CompletableFuture<SemanticAnalysisResult> future = 
                understandDocumentContent(segment.getContent());
            futures.add(future);
        }
        
        return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
                .thenApply(v -> {
                    List<SemanticAnalysisResult> results = futures.stream()
                            .map(CompletableFuture::join)
                            .collect(java.util.stream.Collectors.toList());
                    
                    return new BatchSemanticResult(results, analysisType);
                });
    }
    
    /**
     * 关键词和主题提取
     * 提取文档的关键词和主题
     */
    public CompletableFuture<KeywordTopicResult> extractKeywordsAndTopics(
            String documentContent) {
        
        String prompt = promptLibrary.getKeywordTopicPrompt();
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("请从以下文档中提取关键词和主题：\\n\\n" + documentContent)
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.1)
                .maxTokens(800)
        ).thenApply(this::parseKeywordTopicResult);
    }
    
    // ============ 私有辅助方法 ============
    
    /**
     * 将长文档内容分割为适合处理的段落
     */
    private List<String> splitContentIntoSegments(String content) {
        List<String> segments = new ArrayList<>();
        int maxLength = (Integer) processingConfig.get("max_context_length");
        
        if (content.length() <= maxLength) {
            segments.add(content);
            return segments;
        }
        
        // 智能分段：优先在句号、段落边界处分割
        String[] paragraphs = content.split("\\n\\n");
        StringBuilder currentSegment = new StringBuilder();
        
        for (String paragraph : paragraphs) {
            if (currentSegment.length() + paragraph.length() + 2 <= maxLength) {
                if (currentSegment.length() > 0) {
                    currentSegment.append("\\n\\n");
                }
                currentSegment.append(paragraph);
            } else {
                if (currentSegment.length() > 0) {
                    segments.add(currentSegment.toString());
                    currentSegment = new StringBuilder();
                }
                
                // 如果单个段落过长，按句子分割
                if (paragraph.length() > maxLength) {
                    String[] sentences = paragraph.split("。");
                    for (String sentence : sentences) {
                        if (currentSegment.length() + sentence.length() + 1 <= maxLength) {
                            if (currentSegment.length() > 0) {
                                currentSegment.append("。");
                            }
                            currentSegment.append(sentence);
                        } else {
                            if (currentSegment.length() > 0) {
                                segments.add(currentSegment.toString());
                                currentSegment = new StringBuilder();
                            }
                            currentSegment.append(sentence);
                        }
                    }
                } else {
                    currentSegment.append(paragraph);
                }
            }
        }
        
        if (currentSegment.length() > 0) {
            segments.add(currentSegment.toString());
        }
        
        return segments;
    }
    
    /**
     * 分析单个文档段落
     */
    private CompletableFuture<SemanticAnalysisResult> analyzeSingleSegment(
            String content, String prompt) {
        
        List<Message> messages = Arrays.asList(
            Message.builder()
                .role("system")
                .content(prompt)
                .build(),
            Message.builder()
                .role("user")
                .content("请分析以下文档内容：\\n\\n" + content)
                .build()
        );
        
        return siliconFlowClient.chatCompletionLLM(
            messages,
            ChatCompletionRequest.builder()
                .temperature(0.1)
                .maxTokens(2000)
        ).thenApply(this::parseSemanticAnalysisResult);
    }
    
    /**
     * 分析多个文档段落并合并结果
     */
    private CompletableFuture<SemanticAnalysisResult> analyzeMultipleSegments(
            List<String> contentSegments, String prompt) {
        
        List<CompletableFuture<SemanticAnalysisResult>> futures = new ArrayList<>();
        
        for (String segment : contentSegments) {
            CompletableFuture<SemanticAnalysisResult> future = 
                analyzeSingleSegment(segment, prompt);
            futures.add(future);
        }
        
        return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
                .thenApply(v -> {
                    List<SemanticAnalysisResult> results = futures.stream()
                            .map(CompletableFuture::join)
                            .collect(java.util.stream.Collectors.toList());
                    
                    return mergeSemanticAnalysisResults(results);
                });
    }
    
    /**
     * 合并多个语义分析结果
     */
    private SemanticAnalysisResult mergeSemanticAnalysisResults(
            List<SemanticAnalysisResult> results) {
        
        if (results.isEmpty()) {
            return new SemanticAnalysisResult("无分析结果");
        }
        
        if (results.size() == 1) {
            return results.get(0);
        }
        
        // 合并多个结果
        SemanticAnalysisResult merged = new SemanticAnalysisResult("合并分析结果");
        
        // 合并主要分析内容
        StringBuilder mergedAnalysis = new StringBuilder();
        mergedAnalysis.append("综合语义分析结果：\\n\\n");
        
        for (int i = 0; i < results.size(); i++) {
            SemanticAnalysisResult result = results.get(i);
            mergedAnalysis.append("段落 ").append(i + 1).append("：\\n");
            mergedAnalysis.append(result.getMainAnalysis()).append("\\n\\n");
        }
        
        merged.setMainAnalysis(mergedAnalysis.toString());
        
        // 合并置信度（取平均值）
        double avgConfidence = results.stream()
                .mapToDouble(SemanticAnalysisResult::getConfidence)
                .average()
                .orElse(0.0);
        merged.setConfidence(avgConfidence);
        
        return merged;
    }
    
    // ============ 结果解析方法 ============
    
    private SemanticAnalysisResult parseSemanticAnalysisResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        SemanticAnalysisResult result = new SemanticAnalysisResult(content);
        
        // 尝试解析结构化信息
        try {
            // 简单的关键词提取
            List<String> keywords = extractKeywords(content);
            result.setKeywords(keywords);
            
            // 置信度评估
            double confidence = estimateConfidence(content);
            result.setConfidence(confidence);
            
        } catch (Exception e) {
            System.out.println("解析语义分析结果时出错: " + e.getMessage());
        }
        
        return result;
    }
    
    private DocumentClassificationResult parseClassificationResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        return new DocumentClassificationResult(content);
    }
    
    private ComplianceReasoningResult parseComplianceReasoningResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        return new ComplianceReasoningResult(content);
    }
    
    private LegalElementsResult parseLegalElementsResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        return new LegalElementsResult(content);
    }
    
    private ConsistencyCheckResult parseConsistencyCheckResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        return new ConsistencyCheckResult(content);
    }
    
    private ImprovementSuggestionResult parseImprovementSuggestionResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        return new ImprovementSuggestionResult(content);
    }
    
    private DocumentSummaryResult parseDocumentSummaryResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        return new DocumentSummaryResult(content);
    }
    
    private KeywordTopicResult parseKeywordTopicResult(ChatCompletionResponse response) {
        String content = response.getFirstChoiceContent();
        return new KeywordTopicResult(content);
    }
    
    // 辅助方法
    private List<String> extractKeywords(String content) {
        // 简单的关键词提取逻辑
        List<String> keywords = new ArrayList<>();
        
        // 法律文书常见关键词模式
        Pattern pattern = Pattern.compile("(处罚|决定|告知|调查|证据|违法|法律|条例|规定)");
        Matcher matcher = pattern.matcher(content);
        
        while (matcher.find()) {
            String keyword = matcher.group(1);
            if (!keywords.contains(keyword)) {
                keywords.add(keyword);
            }
        }
        
        return keywords;
    }
    
    private double estimateConfidence(String content) {
        // 简单的置信度评估
        if (content.contains("明确") || content.contains("清楚") || content.contains("确定")) {
            return 0.9;
        } else if (content.contains("可能") || content.contains("似乎") || content.contains("疑似")) {
            return 0.6;
        } else {
            return 0.8;
        }
    }
    
    /**
     * 设置处理配置
     */
    public void setConfig(String key, Object value) {
        processingConfig.put(key, value);
    }
    
    /**
     * 获取处理配置
     */
    public Object getConfig(String key) {
        return processingConfig.get(key);
    }
}