using CoreManager.AI.Models;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using System.Runtime.CompilerServices;

namespace CoreManager.AI.Services;

/// <summary>
/// 基于 Ollama 的 AI 服务实现
/// </summary>
public class OllamaAIService : IAIService
{
    private readonly ILogger<OllamaAIService> _logger;
    private readonly IConfiguration _configuration;
    private readonly ISessionHistoryService _sessionHistoryService;
    private readonly string _ollamaEndpoint;
    private readonly string _defaultModel;
    private readonly int _maxHistoryMessages;

    public OllamaAIService(
        ILogger<OllamaAIService> logger,
        IConfiguration configuration,
        ISessionHistoryService sessionHistoryService)
    {
        _logger = logger;
        _configuration = configuration;
        _sessionHistoryService = sessionHistoryService;
        _ollamaEndpoint = configuration["AI:Ollama:Endpoint"] ?? "http://localhost:11434";
        _defaultModel = configuration["AI:Ollama:DefaultModel"] ?? "deepseek-r1:14b";
        _maxHistoryMessages = int.TryParse(
            configuration["AI:MaxHistoryMessages"], 
            out var max) ? max : 20; // 默认保留最近20条消息
    }

    /// <summary>
    /// 流式聊天
    /// </summary>
    public async IAsyncEnumerable<ChatResponse> StreamChatAsync(
        ChatRequest request,
        [EnumeratorCancellation] CancellationToken cancellationToken = default)
    {
        var sessionId = request.SessionId ?? Guid.NewGuid().ToString();
        var modelName = request.Model ?? _defaultModel;

        _logger.LogInformation("开始流式聊天，SessionId: {SessionId}, Model: {Model}", sessionId, modelName);

        IAsyncEnumerable<ChatResponse>? responseStream = null;
        
        // 由于不能在包含 catch 的 try 块中使用 yield，我们需要分离错误处理
        responseStream = StreamChatInternalAsync(sessionId, modelName, request, cancellationToken);
        
        await foreach (var response in responseStream.WithCancellation(cancellationToken))
        {
            yield return response;
        }
    }

    /// <summary>
    /// 内部流式聊天实现
    /// </summary>
    private async IAsyncEnumerable<ChatResponse> StreamChatInternalAsync(
        string sessionId,
        string modelName,
        ChatRequest request,
        [EnumeratorCancellation] CancellationToken cancellationToken = default)
    {
        // 获取会话历史
        var sessionHistory = await _sessionHistoryService.GetSessionHistoryAsync(sessionId);
        
        // 更新模型信息
        if (string.IsNullOrEmpty(sessionHistory.Model))
        {
            sessionHistory.Model = modelName;
        }

        // 添加用户消息到会话历史
        var userMessage = new ChatMessage
        {
            Role = ChatRole.User,
            Content = request.Message,
            CreatedAt = DateTime.UtcNow
        };
        await _sessionHistoryService.AddMessageAsync(sessionId, userMessage);

        _logger.LogInformation("会话 {SessionId} 当前有 {Count} 条消息", 
            sessionId, sessionHistory.Messages.Count);

        Kernel? kernel = null;
        
        // 创建 Kernel Builder
        var builder = Kernel.CreateBuilder();

#pragma warning disable SKEXP0010 // 实验性 API
        // 添加 Ollama 聊天完成服务（使用 OpenAI 兼容接口）
        builder.AddOpenAIChatCompletion(
            modelId: modelName,
            apiKey: "not-needed", // Ollama 不需要真实的 API Key，但参数不能为 null
            endpoint: new Uri($"{_ollamaEndpoint}/v1")); // Ollama 的 OpenAI 兼容端点
#pragma warning restore SKEXP0010

        kernel = builder.Build();

        // 获取聊天完成服务
        var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();

        // 创建聊天历史 - 从会话历史构建
        var chatHistory = new ChatHistory();
        
        // 添加系统消息（可选）
        var systemPrompt = _configuration["AI:SystemPrompt"];
        if (!string.IsNullOrEmpty(systemPrompt))
        {
            chatHistory.AddSystemMessage(systemPrompt);
        }

        // 获取最近的历史消息（限制数量以避免 token 过多）
        var recentMessages = sessionHistory.Messages
            .TakeLast(_maxHistoryMessages)
            .ToList();

        _logger.LogDebug("使用 {Count} 条历史消息作为上下文", recentMessages.Count);

        // 添加历史消息到聊天上下文
        foreach (var msg in recentMessages)
        {
            switch (msg.Role)
            {
                case ChatRole.System:
                    chatHistory.AddSystemMessage(msg.Content);
                    break;
                case ChatRole.User:
                    chatHistory.AddUserMessage(msg.Content);
                    break;
                case ChatRole.Assistant:
                    chatHistory.AddAssistantMessage(msg.Content);
                    break;
            }
        }

        // 设置执行参数
        var executionSettings = new OpenAIPromptExecutionSettings
        {
            Temperature = request.Temperature,
            MaxTokens = 2000
        };

        // 累积助手响应
        var assistantResponse = new System.Text.StringBuilder();

        // 流式获取响应
        await foreach (var content in chatCompletionService.GetStreamingChatMessageContentsAsync(
            chatHistory,
            executionSettings,
            kernel,
            cancellationToken))
        {
            if (!string.IsNullOrEmpty(content.Content))
            {
                assistantResponse.Append(content.Content);
                
                yield return new ChatResponse
                {
                    Message = content.Content,
                    SessionId = sessionId,
                    IsComplete = false,
                    Model = modelName
                };
            }
        }

        // 保存助手响应到会话历史
        var assistantMessage = new ChatMessage
        {
            Role = ChatRole.Assistant,
            Content = assistantResponse.ToString(),
            CreatedAt = DateTime.UtcNow
        };
        await _sessionHistoryService.AddMessageAsync(sessionId, assistantMessage);

        // 发送完成信号
        yield return new ChatResponse
        {
            Message = string.Empty,
            SessionId = sessionId,
            IsComplete = true,
            Model = modelName
        };

        _logger.LogInformation("流式聊天完成，SessionId: {SessionId}, 当前消息数: {Count}", 
            sessionId, sessionHistory.Messages.Count + 2); // +2 因为刚添加了用户和助手消息
    }

    /// <summary>
    /// 获取可用的模型列表
    /// </summary>
    public async Task<List<string>> GetAvailableModelsAsync()
    {
        try
        {
            // 这里可以调用 Ollama API 获取模型列表
            // 暂时返回配置的默认模型
            return await Task.FromResult(new List<string> 
            { 
                "deepseek-r1:14b",
                "deepseek-r1:7b",
                "deepseek-r1:1.5b"
            });
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "获取可用模型列表失败");
            return new List<string>();
        }
    }
}

