using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using RAG.Domain.Configuration;
using RAG.Domain.Services;
using System.Text;
using System.Text.Json;
using System.Runtime.CompilerServices;

namespace RAG.Infrastructure.Services;

public class DeepSeekChatService : IChatService
{
    private readonly HttpClient _httpClient;
    private readonly DeepSeekApiSettings _settings;
    private readonly ILogger<DeepSeekChatService> _logger;

    public DeepSeekChatService(HttpClient httpClient, IOptions<DeepSeekApiSettings> settings, ILogger<DeepSeekChatService> logger)
    {
        _httpClient = httpClient;
        _settings = settings.Value;
        _logger = logger;

        _httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {_settings.ApiKey}");
    }

    public async Task<string> GenerateResponseAsync(string question, string context, List<string>? chatHistory = null)
    {
        var messages = new List<object>();

        // 根据上下文内容决定系统提示
        string systemPrompt;
        if (context.Contains("请基于你的知识回答以下问题"))
        {
            // 直接回答模式
            systemPrompt = "你是一个专业的AI助手。请基于你的知识直接回答用户的问题，提供准确、有用的信息。请注意这个回答不是基于用户的知识库，而是基于你的训练数据。";
        }
        else if (context.Contains("没有找到相关的知识库信息"))
        {
            // 无知识库信息
            systemPrompt = "用户的知识库中没有相关信息，请礼貌地说明无法基于知识库回答，并建议用户补充相关资料。";
        }
        else
        {
            // 基于知识库回答
            systemPrompt = "你是一个专业的AI助手。请严格基于提供的上下文信息来回答用户的问题。如果上下文信息不足以完整回答问题，请明确说明并基于现有信息尽可能回答。";
        }

        // 添加系统提示
        messages.Add(new
        {
            role = "system",
            content = systemPrompt
        });

        // 添加历史对话
        if (chatHistory != null && chatHistory.Any())
        {
            foreach (var history in chatHistory)
            {
                messages.Add(new
                {
                    role = "assistant",
                    content = history
                });
            }
        }

        // 添加当前问题和上下文
        var userContent = $"上下文信息：\n{context}\n\n问题：{question}";
        messages.Add(new
        {
            role = "user",
            content = userContent
        });

        var request = new
        {
            model = _settings.Model,
            messages = messages,
            temperature = _settings.Temperature,
            max_tokens = _settings.MaxTokens,
            top_p = _settings.TopP,
            presence_penalty = _settings.PresencePenalty,
            frequency_penalty = _settings.FrequencyPenalty
        };

        var json = JsonSerializer.Serialize(request);
        var content = new StringContent(json, Encoding.UTF8, "application/json");

        var totalSw = System.Diagnostics.Stopwatch.StartNew();
        var headerSw = System.Diagnostics.Stopwatch.StartNew();
        // 使用 ResponseHeadersRead 分离首包时间
        using var requestMessage = new HttpRequestMessage(HttpMethod.Post, _settings.ChatUrl)
        {
            Content = content
        };
        using var cts = new CancellationTokenSource(TimeSpan.FromMilliseconds(_settings.TimeoutMs));
        HttpResponseMessage response = null!;
        int attempt = 0;
        Exception? lastEx = null;
        for (; attempt <= _settings.MaxRetry; attempt++)
        {
            try
            {
                response = await _httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, cts.Token);
                break;
            }
            catch (Exception ex)
            {
                lastEx = ex;
                if (attempt == _settings.MaxRetry) throw;
                _logger.LogWarning(ex, "[Chat] 调用失败重试 attempt={Attempt}/{Max}", attempt + 1, _settings.MaxRetry + 1);
                await Task.Delay(300 * (attempt + 1));
            }
        }
        headerSw.Stop();
        response.EnsureSuccessStatusCode();

        var bodySw = System.Diagnostics.Stopwatch.StartNew();
        using var stream = await response.Content.ReadAsStreamAsync();
        using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 8192, leaveOpen: false);
        var responseJson = await reader.ReadToEndAsync();
        bodySw.Stop();
        totalSw.Stop();

        DeepSeekChatResponse? result = null;
        try
        {
            result = JsonSerializer.Deserialize<DeepSeekChatResponse>(responseJson, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
        }
        catch (Exception ex)
        {
            _logger.LogWarning(ex, "解析Chat响应失败(长度={Len})", responseJson.Length);
        }

        var answer = result?.Choices?.FirstOrDefault()?.Message?.Content ?? "抱歉，无法生成回答。";
        var answerLen = answer.Length;
        // 粗略 token 估算：中文字符≈1 token，英文≈4 chars/token，简化用 answerLen 近似
        var estTokens = answerLen;
        double tokensPerSec = estTokens > 0 && totalSw.ElapsedMilliseconds > 0 ? estTokens / (totalSw.ElapsedMilliseconds / 1000.0) : 0;
        _logger.LogInformation("[Perf][Chat] HeaderMs={HeaderMs} BodyMs={BodyMs} TotalMs={TotalMs} AnswerLen={AnswerLen} EstTokens={Tokens} TPS={TPS:F2} Temp={Temp} MaxTok={MaxTok} TopP={TopP} Attempt={Attempt}", headerSw.ElapsedMilliseconds, bodySw.ElapsedMilliseconds, totalSw.ElapsedMilliseconds, answerLen, estTokens, tokensPerSec, _settings.Temperature, _settings.MaxTokens, _settings.TopP, attempt + 1);
        return answer;
    }

    // 占位：真实 DeepSeek SSE 流未完全接入，此方法目前返回分段模拟（前端已实现伪流式可选使用）
    public async IAsyncEnumerable<string> GenerateResponseStreamAsync(string question, string context, List<string>? chatHistory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
    {
        var messages = new List<object>();
        string systemPrompt;
        if (context.Contains("请基于你的知识回答以下问题"))
            systemPrompt = "你是一个专业的AI助手。请基于你的知识直接回答用户的问题，提供准确、有用的信息。";
        else if (context.Contains("没有找到相关的知识库信息"))
            systemPrompt = "用户的知识库中没有相关信息，请礼貌地说明无法基于知识库回答，并建议用户补充相关资料。";
        else
            systemPrompt = "你是一个专业的AI助手。请严格基于提供的上下文信息来回答用户的问题。如果上下文信息不足以完整回答，请指明不足并尽量回答。";
        messages.Add(new { role = "system", content = systemPrompt });
        if (chatHistory != null)
        {
            foreach (var h in chatHistory)
                messages.Add(new { role = "assistant", content = h });
        }
        var userContent = $"上下文信息：\n{context}\n\n问题：{question}";
        messages.Add(new { role = "user", content = userContent });

        var req = new
        {
            model = _settings.Model,
            messages = messages,
            temperature = _settings.Temperature,
            max_tokens = _settings.MaxTokens,
            top_p = _settings.TopP,
            presence_penalty = _settings.PresencePenalty,
            frequency_penalty = _settings.FrequencyPenalty,
            stream = true
        };
        var json = JsonSerializer.Serialize(req);
        using var requestMessage = new HttpRequestMessage(HttpMethod.Post, _settings.ChatUrl)
        {
            Content = new StringContent(json, Encoding.UTF8, "application/json")
        };
        var totalSw = System.Diagnostics.Stopwatch.StartNew();
        using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
        cts.CancelAfter(TimeSpan.FromMilliseconds(_settings.TimeoutMs));
        HttpResponseMessage response;
        try
        {
            response = await _httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, cts.Token);
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "[Chat][Stream] 发送请求失败");
            throw;
        }
        using (response)
        {
            response.EnsureSuccessStatusCode();
            await using var stream = await response.Content.ReadAsStreamAsync(cts.Token);
            await foreach (var piece in ReadSseStreamAsync(stream, cts.Token))
            {
                yield return piece;
            }
        }
        totalSw.Stop();
        _logger.LogInformation("[Perf][ChatStream] TotalMs={Ms} QuestionLen={QLen}", totalSw.ElapsedMilliseconds, question.Length);
    }

    private async IAsyncEnumerable<string> ReadSseStreamAsync(Stream stream, [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken ct)
    {
        using var reader = new StreamReader(stream, Encoding.UTF8);
        string? line;
        while (!reader.EndOfStream && (line = await reader.ReadLineAsync()) != null)
        {
            if (ct.IsCancellationRequested) yield break;
            if (string.IsNullOrWhiteSpace(line)) continue;
            if (!line.StartsWith("data:")) continue;
            var payload = line.Substring(5).Trim();
            if (payload == "[DONE]") break;
            string? deltaOut = null;
            bool parseOk = false;
            try
            {
                using var doc = JsonDocument.Parse(payload);
                var root = doc.RootElement;
                if (root.TryGetProperty("choices", out var choices) && choices.GetArrayLength() > 0)
                {
                    var choice = choices[0];
                    if (choice.TryGetProperty("delta", out var deltaElem) && deltaElem.TryGetProperty("content", out var contentElem))
                        deltaOut = contentElem.GetString();
                    else if (choice.TryGetProperty("message", out var msgElem) && msgElem.TryGetProperty("content", out var contentElem2))
                        deltaOut = contentElem2.GetString();
                    parseOk = true;
                }
            }
            catch (Exception ex)
            {
                _logger.LogDebug(ex, "[Chat][Stream] 解析失败片段:{Snippet}", payload.Length > 200 ? payload[..200] : payload);
            }
            if (parseOk && !string.IsNullOrEmpty(deltaOut))
            {
                yield return deltaOut;
            }
        }
    }
}

public class DeepSeekChatResponse
{
    public List<DeepSeekChoice> Choices { get; set; } = new();
}

public class DeepSeekChoice
{
    public DeepSeekMessage Message { get; set; } = default!;
}

public class DeepSeekMessage
{
    public string Content { get; set; } = default!;
    public string Role { get; set; } = default!;
}
