﻿using System.Text;
using Microsoft.Extensions.AI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.Qdrant;
using Microsoft.SemanticKernel.Text;
using NLog;
using WYD.Common.AI;
using WYD.Infrastructure.Attribute;
using WYD.Model.AI.OpenAIApi;
using WYD.Model.AI.RAG;
using WYD.Service.AI.IService;

namespace WYD.Service.AI;

#pragma warning disable SKEXP0010, SKEXP0001, SKEXP0050

[AppService(ServiceType = typeof(IRagService), ServiceLifetime = LifeTime.Transient)]
public class RagService : IRagService
{
    private readonly Logger _logger = LogManager.GetCurrentClassLogger();
    
    private readonly Kernel _kernel;
    private readonly QdrantVectorStore _vectorStore;
    private readonly IChatCompletionService _chatCompletionService;

    public RagService(Kernel kernel, QdrantVectorStore vectorStore)
    {
        _kernel = kernel;
        _vectorStore = vectorStore;
        _chatCompletionService = _kernel.GetRequiredService<IChatCompletionService>();;
    }

    /// <summary>
    /// 预处理 Markdown 文档并将其加载到向量存储中。
    /// </summary>
    /// <param name="ragDirectory">包含 Markdown 文件的目录路径</param>
    /// <returns></returns>
    public async Task InitializeAsync(string ragDirectory)
    {
        var embeddingGenerator = _kernel.GetRequiredService<IEmbeddingGenerator<string, Embedding<float>>>();
        var collection = _vectorStore.GetCollection<ulong, DocumentChunk>("doc_chunks");
        
        await collection.EnsureCollectionExistsAsync();
        
        var markdownFiles = Directory.EnumerateFiles(ragDirectory, "*.md", SearchOption.AllDirectories);
        var chunks = new List<DocumentChunk>();
        ulong index = 0;

        foreach (var filePath in markdownFiles)
        {
            var markdownContent = await File.ReadAllTextAsync(filePath);
            var paragraphs =
                TextChunker.SplitMarkdownParagraphs(
                    TextChunker.SplitMarkDownLines(markdownContent.Replace("\r\n", " "), 100),
                    512);

            foreach (var para in paragraphs)
            {
                var embedding = await embeddingGenerator.GenerateAsync(para);
                chunks.Add(new DocumentChunk
                {
                    Id = index,
                    SourceFile = Path.GetFileName(filePath),
                    StartIndex = (int)index,
                    Content = para,
                    Embedding = embedding.Vector,
                    SectionTitle = ExtractSectionTitle(para, markdownContent)
                });
                index++;
            }
            _logger.Info($"已处理文件: {filePath}");
        }

        await collection.UpsertAsync(chunks);
    }

    /// <summary>
    /// 使用 RAG 检索并生成聊天回复流。
    /// </summary>
    /// <param name="messages"></param>
    /// <returns>AI 的回答流</returns>
    public async IAsyncEnumerable<string> GetChatResponseStreamAsync(List<Message> messages)
    {
        var userQuestion = messages.LastOrDefault()?.Content ?? string.Empty;
        var collection = _vectorStore.GetCollection<ulong, DocumentChunk>("doc_chunks");
        var additionalInfo = new StringBuilder();

        await foreach (var hit in collection.SearchAsync(userQuestion, top: 3))
        {
            additionalInfo.AppendLine(hit.Record.Content);
        }

        var chatHistory = new ChatHistory("你是一个小文的AI助手，请帮助大家查找信息和回答问题，注意保留图片信息，图片紧跟操作步骤，直接显示图片,不要点击后跳转");

        if (additionalInfo.Length != 0)
        {
            additionalInfo.Insert(0, "以下是一些附加信息：\n");
            chatHistory.AddUserMessage(additionalInfo.ToString());
        }

        // 遍历所有历史消息并添加到 chatHistory
        foreach (var message in messages)
        {
            switch (message.Role.ToLower())
            {
                case "user":
                    chatHistory.AddUserMessage(message.Content);
                    break;
                case "assistant":
                    chatHistory.AddAssistantMessage(message.Content);
                    break;
                // 如果有 system 角色，也可以在这里处理
            }
        }
        
        var inputText = string.Join("",
            chatHistory.Select(c => c.Content));
        _logger.Info($"原始文本: {inputText}");
        var inputTokens = QwenTokenizer.CountTokens(inputText);
        var outPutText = string.Empty;
        // 关键点：使用 GetStreamingChatMessageContentsAsync 获取流
        await foreach (var message in _chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory))
        {
            if (string.IsNullOrEmpty(message.Content)) continue;
            // 使用 yield return 逐块返回内容
            yield return message.Content;
            // 累加返回的每一块内容的Token数
            outPutText += message.Content;
            // _logger.Info(message.Content);
        }
        
        var outputTokens = QwenTokenizer.CountTokens(outPutText);
        var totalTokens = inputTokens + outputTokens;
        _logger.Info($"输入的Prompt Token数量：{inputTokens}，输出的Token数量：{outputTokens}，总Token数量：{totalTokens}");
    }

    /// <summary>
    /// 使用 RAG 检索并生成聊天回复。
    /// </summary>
    /// <param name="messages"></param>
    /// <returns>AI 的回答</returns>
    public async Task<string> GetChatResponseAsync(List<Message> messages)
    {
        var userQuestion = messages.LastOrDefault()?.Content ?? string.Empty;
        var collection = _vectorStore.GetCollection<ulong, DocumentChunk>("doc_chunks");
        var additionalInfo = new StringBuilder();
        
        await foreach (var hit in collection.SearchAsync(userQuestion, top: 3))
        {
            additionalInfo.AppendLine(hit.Record.Content);
        }
        
        var chatHistory = new ChatHistory("你是一个小文的AI助手，请帮助大家查找信息和回答问题，注意保留图片信息，图片紧跟操作步骤，直接显示图片,不要点击后跳转");
        
        if (additionalInfo.Length != 0)
        {
            additionalInfo.Insert(0, "以下是一些附加信息：\n");
            chatHistory.AddUserMessage(additionalInfo.ToString());
        }
        
        // 遍历所有历史消息并添加到 chatHistory
        foreach (var message in messages)
        {
            switch (message.Role.ToLower())
            {
                case "user":
                    chatHistory.AddUserMessage(message.Content);
                    break;
                case "assistant":
                    chatHistory.AddAssistantMessage(message.Content);
                    break;
                // 如果有 system 角色，也可以在这里处理
            }
        }
        
        var inputText = string.Join("",
            chatHistory.Select(c => c.Content));
        _logger.Info($"原始文本: {inputText}");
        var inputTokens = QwenTokenizer.CountTokens(inputText);
        var outPutText = string.Empty;
        var chatResponse = new StringBuilder();
        await foreach (var message in _chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory))
        {
            if (string.IsNullOrEmpty(message.Content)) continue;
            chatResponse.Append(message.Content);
            // 累加返回的每一块内容的Token数
            outPutText += message.Content;
        }
        
        var outputTokens = QwenTokenizer.CountTokens(outPutText);
        var totalTokens = inputTokens + outputTokens;
        _logger.Info($"输入的Prompt Token数量：{inputTokens}，输出的Token数量：{outputTokens}，总Token数量：{totalTokens}");
        return chatResponse.ToString();
    }

    /// <summary>
    /// 从 Markdown 内容中提取章节标题的辅助方法
    /// </summary>
    /// <param name="paragraph"></param>
    /// <param name="fullMarkdown"></param>
    /// <returns></returns>
    private static string? ExtractSectionTitle(string paragraph, string fullMarkdown)
    {
        // ... (保持您原有的 ExtractSectionTitle 方法)
        var lines = fullMarkdown.Split('\n');
        string? currentTitle = null;
    
        foreach (var line in lines)
        {
            if (line.StartsWith("## "))
            {
                currentTitle = line.TrimStart('#').Trim();
            }
            else if (line.Contains(paragraph.Substring(0, Math.Min(20, paragraph.Length))))
            {
                return currentTitle;
            }
        }
        return null;
    }
}