﻿using System.Net.Http.Json;

#nullable disable

namespace AiBLSmartEdu.Module.AIMedicalAssistant.API.Services;

using FrameworkCore.DI;
using System.Collections.Concurrent;
using System.Text;
using System.Text.Json;

public class OllamaService : ITransientDependency
{
    private readonly HttpClient _client;
    private readonly ConcurrentDictionary<string, List<ChatMessage>> _sessionHistories;

    public OllamaService(IHttpClientFactory httpClientFactory)
    {
        _client = httpClientFactory.CreateClient("Ollama");
        _sessionHistories = new ConcurrentDictionary<string, List<ChatMessage>>();
    }

    private class ChatMessage
    {
        public string Role { get; set; }
        public string Content { get; set; }
    }

    public async Task<string> GenerateResponseAsync(string sessionId, string prompt, string model)
    {
        var history = GetOrCreateHistory(sessionId);
        AddUserMessage(history, prompt);

        var fullPrompt = BuildPromptWithLock(history, prompt);
        var response = await _client.PostAsJsonAsync("/api/generate", new
        {
            model,
            prompt = fullPrompt,
            stream = false
        });

        response.EnsureSuccessStatusCode();

        var json = await response.Content.ReadFromJsonAsync<JsonElement>();
        var answer = json.GetProperty("response").GetString();

        AddAssistantMessage(history, answer);
        return answer;
    }

    public async IAsyncEnumerable<string> StreamResponseAsync(string sessionId, string prompt, string model)
    {
        var history = GetOrCreateHistory(sessionId);
        AddUserMessage(history, prompt);

        var fullPrompt = BuildPromptWithLock(history, prompt);

        var response = await _client.PostAsync("/api/generate",
            new StringContent(JsonSerializer.Serialize(new
            {
                model,
                prompt = fullPrompt,
                stream = true
            }), Encoding.UTF8, "application/json"));

        response.EnsureSuccessStatusCode();

        using var stream = await response.Content.ReadAsStreamAsync();
        using var reader = new StreamReader(stream);

        var answer = new StringBuilder();

        while (!reader.EndOfStream)
        {
            var line = await reader.ReadLineAsync();
            if (string.IsNullOrWhiteSpace(line)) continue;

            var json = JsonSerializer.Deserialize<JsonElement>(line);
            if (json.TryGetProperty("response", out var responseElement))
            {
                var text = responseElement.GetString();
                if (!string.IsNullOrWhiteSpace(text))
                {
                    answer.Append(text);
                    yield return text;
                }
            }
        }

        AddAssistantMessage(history, answer.ToString());
    }

    private List<ChatMessage> GetOrCreateHistory(string sessionId)
    {
        return _sessionHistories.GetOrAdd(sessionId, _ => new List<ChatMessage>());
    }

    private void AddUserMessage(List<ChatMessage> history, string prompt)
    {
        lock (history)
        {
            history.Add(new ChatMessage { Role = "user", Content = prompt });
        }
    }

    private void AddAssistantMessage(List<ChatMessage> history, string content)
    {
        lock (history)
        {
            history.Add(new ChatMessage { Role = "assistant", Content = content });
        }
    }

    private string BuildPromptWithLock(List<ChatMessage> history, string currentPrompt)
    {
        List<ChatMessage> recentHistory;
        lock (history)
        {
            recentHistory = history
                .Skip(Math.Max(0, history.Count - 5))
                .ToList();
        }

        var promptBuilder = new StringBuilder();
        promptBuilder.AppendLine("对话历史:");
        foreach (var msg in recentHistory)
        {
            promptBuilder.AppendLine($"{msg.Role}: {msg.Content}");
        }
        promptBuilder.AppendLine($"user: {currentPrompt}");
        return promptBuilder.ToString();
    }
}
