using System;
using System.IO;
using System.Linq;
using System.Text;
using Project.Commom;
using Project.Commom.Model;
using Project.Commom.Plugins;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
using System.Collections.Generic;
using Microsoft.Extensions.Options;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;

namespace Project.SemanticKernelService
{
    /// <summary>
    ///  Chat服务
    /// </summary>
    public class ChatService : IChatService
    {
        private readonly Dictionary<ModelType, IChatCompletionService> _modelServices = new();
        private readonly Dictionary<ModelType, ChatHistory> _modelHistories = new();
        private readonly ModelKernels _modelKernels;
        private readonly Kernel _kernel;

        /// <summary>
        /// 初始化模型
        /// </summary>
        /// <param name="modelKernels"></param>
        /// <returns></returns>
        public ChatService(IOptionsSnapshot<ModelKernels> modelKernels)
        {
            //获取模型数据
            _modelKernels = modelKernels.Value;

            // 创建用于构造语义内核实例的生成器。
            var builder = Kernel.CreateBuilder();

            // 添加所有模型服务并建立类型映射
            foreach (var model in _modelKernels.ModelKernel)
            {
                builder.AddOpenAIChatCompletion(modelId: model.ModelId, endpoint: new Uri(model.Endpoint), apiKey: model.ApiKey, serviceId: model.ModelType.ToString());
            }

            // 添加插件
            builder.Plugins.AddFromType<FunctionTest>();

            // 绑定配置
            _kernel = builder.Build();

            // 初始化各模型服务
            foreach (var model in _modelKernels.ModelKernel)
            {
                // 获取所需的服务
                var service = _kernel.GetRequiredService<IChatCompletionService>(serviceKey: model.ModelType.ToString());

                _modelServices[model.ModelType] = service;

                // 为各模型创建独立历史记录
                ChatHistory history = new ChatHistory();

                history.AddSystemMessage(GetSystemPrompt(model.ModelType));

                _modelHistories[model.ModelType] = history;
            }
        }

        /// <summary>
        /// 根据模型类型返回对应的系统提示词（System Prompt）
        /// </summary>
        /// <param name="modelType">模型类型，如推理型、多模态型等</param>
        /// <returns>格式化的系统提示字符串</returns>
        private string GetSystemPrompt(ModelType modelType)
        {
            return modelType switch
            {
                ModelType.Reasoning => GetReasoningPrompt(),
                ModelType.Multimodal => GetMultimodalPrompt(),
                _ => GetDefaultPrompt()
            };
        }

        /// <summary>
        /// 返回用于多模态 AI 的系统提示词
        /// 支持图像识别、文档解析等多种输入类型的处理流程
        /// </summary>
        private string GetMultimodalPrompt()
        {
            return "多模态AI处理规范:\n" +
                   "- 支持接收并处理文本、图像、文件等多类型输入。\n" +
                   "- 文件处理要求：自动解析文档内容，提取关键信息并生成简洁摘要。\n" +
                   "- 所有处理结果需以结构化方式呈现，便于后续调用与分析。\n";
        }

        /// <summary>
        /// 返回用于推理型 AI 的系统提示词
        /// 强调结构化问题拆解、逻辑推理链构建及结论验证机制
        /// </summary>
        private string GetReasoningPrompt()
        {
            return $"高级推理AI工作指南:\n" +
                   "- 接收到问题后，首先进行结构化拆解，识别核心问题与子问题。\n" +
                   "- 对每个子问题应用逻辑推理链，标注推理依据和假设前提。\n" +
                   "- 使用三步分析法：问题定义 → 深度分析 → 结论验证。\n" +
                   "- 确保推理过程可追溯，关键节点需保留中间结论。\n" +
                   "- 遇到模糊信息时，主动请求澄清并等待确认后再继续。";
        }

        /// <summary>
        /// 返回默认的通用对话助手系统提示词
        /// 适用于一般性问答、聊天等场景
        /// </summary>
        private string GetDefaultPrompt()
        {
            return "智能对话助手服务准则:\n" +
                   "- 采用自然、友好的对话风格与用户交互。\n" +
                   "- 在理解上下文的基础上提供个性化回复。\n" +
                   "- 若需求不明确，应主动提问以获取补充信息。\n" +
                   "- 对模糊或歧义内容进行澄清确认后再执行操作。\n" +
                   "- 回复应简洁明了，避免冗余表达和无关信息。";
        }

        /// <summary>
        /// 获取模型响应（自动选择模型）
        /// </summary>
        /// <param name="dto">数据消息</param>
        /// <returns></returns>
        async IAsyncEnumerable<MessageResult> IChatService.GetStreamingChatResponseAsync(MessageRequest dto)
        {
            //数据消息结果
            MessageResult messageResult = new MessageResult();

            // 自动选择模型类型
            var modelType = SelectModelType(dto.FileData, dto.FilePath, dto.Reasoning);

            var service = _modelServices[modelType];

            var history = _modelHistories[modelType];

            // 构建消息内容
            if (modelType == ModelType.Multimodal)
            {
                if (string.IsNullOrEmpty(dto.MimeType))
                {
                    history.AddMessage(AuthorRole.User, new ChatMessageContentItemCollection
                    {
                        new TextContent(dto.Content),
                        new ImageContent(new Uri(dto.FilePath))
                    });
                }
                else
                {
                    history.AddMessage(AuthorRole.User, new ChatMessageContentItemCollection
                    {
                        new TextContent(dto.Content),
                        new ImageContent(new BinaryData(dto.FileData),mimeType:dto.MimeType)
                    });
                }
            }
            else
            {
                history.AddMessage(AuthorRole.User, dto.Content);
            }

            // 配置执行参数
            var executionSettings = new OpenAIPromptExecutionSettings
            {
                Temperature = modelType == ModelType.Reasoning ? 0.3 : 0.7,
                MaxTokens = modelType == ModelType.Multimodal ? 1500 : 800,
                TopP = 0.9,
                PresencePenalty = 0.5
            };

            if (modelType != ModelType.Multimodal)
            {
                executionSettings.ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions;
            }

            if (modelType != ModelType.Reasoning)
            {
                while (true)
                {
                    // 获取响应
                    ChatMessageContent result = await service.GetChatMessageContentAsync(history, executionSettings: executionSettings, kernel: _kernel);

                    if (!string.IsNullOrEmpty(result.Content))
                    {
                        string assistantContent = "";

                        assistantContent = result.Content?.ToString() ?? "";

                        foreach (var chat in assistantContent)
                        {
                            messageResult.Content = chat.ToString();

                            yield return messageResult;
                        }

                        history.Add(result);

                        messageResult.AuthorRole = "assistant";

                        messageResult.Content = assistantContent;

                        break;
                    }

                    history.Add(result);

                    IEnumerable<FunctionCallContent> functionCalls = FunctionCallContent.GetFunctionCalls(result);

                    if (!functionCalls.Any())
                    {
                        break;
                    }

                    foreach (var functionCall in functionCalls)
                    {
                        FunctionResultContent resultContent = await functionCall.InvokeAsync(_kernel);

                        history.Add(resultContent.ToChatMessage());
                    }
                }
            }
            else
            {
                var result = service.GetStreamingChatMessageContentsAsync(history, executionSettings: executionSettings, kernel: _kernel);

                // 临时存储助手消息内容
                var assistantContent = new StringBuilder();

                await foreach (var chatmessage in result)
                {
                    // 更新助手消息内容
                    assistantContent.Append(chatmessage.Content);

                    if (!string.IsNullOrEmpty(chatmessage.Content?.ToString()))
                    {
                        messageResult.Content = chatmessage.Content;

                        yield return messageResult;
                    }
                }

                history.AddMessage(AuthorRole.Assistant, assistantContent.ToString());

                messageResult.AuthorRole = "assistant";

                messageResult.Content = assistantContent.ToString();
            }
        }

        /// <summary>
        /// 智能选择模型类型
        /// </summary>
        /// <param name="fileData"></param>
        /// <param name="reasoning"></param>
        /// <returns></returns>
        private ModelType SelectModelType(byte[]? fileData, string filePath, bool reasoning = false)
        {
            // 存在文件数据时使用多模态模型
            if (fileData != null || !string.IsNullOrEmpty(filePath)) return ModelType.Multimodal;

            // 指定使用深度思考
            if (reasoning) return ModelType.Reasoning;

            // 默认对话模型
            return ModelType.Chat;
        }
    }
}
