using System.Net.Http.Headers;
using System.Text;
using System.Text.Json;
using LangChainServer.Models;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;

namespace LangChainServer.Services
{
    public class ChatService : IChatService
    {
        private readonly IHttpClientFactory _httpClientFactory;
        private readonly IConfiguration _config;
        private readonly ILogger<ChatService> _logger;

        public ChatService(IHttpClientFactory httpClientFactory, IConfiguration config, ILogger<ChatService> logger)
        {
            _httpClientFactory = httpClientFactory;
            _config = config;
            _logger = logger;
        }

        public async Task<ChatResponse> ChatAsync(ChatRequest request, CancellationToken ct = default)
        {
            var provider = GetProvider(request.Provider);
            return provider == "Azure" ? await ChatAzureAsync(request, ct) : await ChatOpenAIAsync(request, ct);
        }

        private string GetProvider(string? fromRequest)
        {
            if (!string.IsNullOrWhiteSpace(fromRequest)) return fromRequest!;
            var env = Environment.GetEnvironmentVariable("LLM_PROVIDER");
            if (!string.IsNullOrWhiteSpace(env)) return env!;
            var cfg = _config["LLM:Provider"];
            if (!string.IsNullOrWhiteSpace(cfg)) return cfg!;
            return "OpenAI";
        }

        private async Task<ChatResponse> ChatOpenAIAsync(ChatRequest request, CancellationToken ct)
        {
            var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY")
                         ?? _config["LLM:OpenAI:ApiKey"]
                         ?? string.Empty;
            if (string.IsNullOrWhiteSpace(apiKey))
            {
                throw new InvalidOperationException("OpenAI API key is not configured. Set env OPENAI_API_KEY or user-secrets/appsettings LLM:OpenAI:ApiKey.");
            }

            // Resolve BaseUrl treating empty as unset
            string? baseUrl = Environment.GetEnvironmentVariable("OPENAI_BASE_URL");
            if (string.IsNullOrWhiteSpace(baseUrl)) baseUrl = _config["LLM:OpenAI:BaseUrl"];
            if (string.IsNullOrWhiteSpace(baseUrl)) baseUrl = "https://api.siliconflow.cn/v1"; // default to SiliconFlow (OpenAI-compatible)
            baseUrl = baseUrl.Trim().Trim('\"', '\''); // guard against accidental quotes/whitespace

            // Resolve Model treating empty as unset
            string? model = request.Model;
            if (string.IsNullOrWhiteSpace(model)) model = Environment.GetEnvironmentVariable("OPENAI_MODEL");
            if (string.IsNullOrWhiteSpace(model)) model = _config["LLM:OpenAI:Model"];
            if (string.IsNullOrWhiteSpace(model))
            {
                throw new InvalidOperationException("Model is not configured. Provide model in request or set env OPENAI_MODEL or config LLM:OpenAI:Model (e.g., a SiliconFlow-available model).");
            }

            // Validate BaseUrl is absolute and well-formed
            if (!Uri.TryCreate(baseUrl, UriKind.Absolute, out var baseUri))
            {
                throw new InvalidOperationException($"Invalid OPENAI_BASE_URL: '{baseUrl}'. Provide an absolute URL like https://api.siliconflow.cn/v1");
            }

            // Prepare HttpClient with BaseAddress, then use relative path
            var http = _httpClientFactory.CreateClient();
            http.BaseAddress = baseUri; // ensures final URI is absolute
            http.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", apiKey);

            var endpoint = "chat/completions"; // relative path against BaseAddress
            _logger.LogInformation("HTTP Chat -> BaseUrl: {BaseUrl}, Endpoint: {Endpoint}, Model: {Model}", http.BaseAddress, endpoint, model);

            var payload = new
            {
                model,
                temperature = request.Temperature,
                max_tokens = request.MaxTokens,
                messages = (request.Messages ?? new List<ChatMessage>()).Select(m => new { role = m.Role, content = m.Content })
            };

            using var content = new StringContent(JsonSerializer.Serialize(payload), Encoding.UTF8, "application/json");
            using var resp = await http.PostAsync(endpoint, content, ct);
            var body = await resp.Content.ReadAsStringAsync(ct);
            if (!resp.IsSuccessStatusCode)
            {
                _logger.LogError("HTTP Chat request failed. Status: {StatusCode} {ReasonPhrase}. Body: {Body}", (int)resp.StatusCode, resp.ReasonPhrase, body);
                throw new HttpRequestException($"HTTP {(int)resp.StatusCode} {resp.ReasonPhrase}. Body: {body}");
            }

            using var doc = JsonDocument.Parse(body);
            var choice = doc.RootElement.GetProperty("choices")[0];
            string text = choice.GetProperty("message").GetProperty("content").GetString() ?? string.Empty;

            return new ChatResponse { Content = text, Model = model, Provider = "OpenAI" };
        }

        private async Task<ChatResponse> ChatAzureAsync(ChatRequest request, CancellationToken ct)
        {
            var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY")
                         ?? _config["LLM:Azure:ApiKey"]
                         ?? string.Empty;
            var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT")
                         ?? _config["LLM:Azure:Endpoint"]
                         ?? string.Empty;
            var deployment = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT")
                         ?? _config["LLM:Azure:Deployment"]
                         ?? string.Empty;
            var apiVersion = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_VERSION")
                         ?? _config["LLM:Azure:ApiVersion"]
                         ?? "2024-02-15-preview";

            if (string.IsNullOrWhiteSpace(apiKey) || string.IsNullOrWhiteSpace(endpoint) || string.IsNullOrWhiteSpace(deployment))
            {
                throw new InvalidOperationException("Azure OpenAI requires Endpoint, ApiKey, and Deployment. Set env AZURE_OPENAI_{ENDPOINT,API_KEY,DEPLOYMENT} or user-secrets/appsettings LLM:Azure:{Endpoint,ApiKey,Deployment}.");
            }

            var url = CombineUrl(endpoint.TrimEnd('/'), $"/openai/deployments/{deployment}/chat/completions?api-version={apiVersion}");

            var payload = new
            {
                temperature = request.Temperature,
                max_tokens = request.MaxTokens,
                messages = request.Messages.Select(m => new { role = m.Role, content = m.Content })
            };

            var http = _httpClientFactory.CreateClient();
            http.DefaultRequestHeaders.Add("api-key", apiKey);

            using var content = new StringContent(JsonSerializer.Serialize(payload), Encoding.UTF8, "application/json");
            using var resp = await http.PostAsync(url, content, ct);
            var body = await resp.Content.ReadAsStringAsync(ct);
            if (!resp.IsSuccessStatusCode)
            {
                _logger.LogError("HTTP Chat request failed. Status: {StatusCode} {ReasonPhrase}. Body: {Body}", (int)resp.StatusCode, resp.ReasonPhrase, body);
                throw new HttpRequestException($"HTTP {(int)resp.StatusCode} {resp.ReasonPhrase}. Body: {body}");
            }

            using var doc = JsonDocument.Parse(body);
            var choice = doc.RootElement.GetProperty("choices")[0];
            string text = choice.GetProperty("message").GetProperty("content").GetString() ?? string.Empty;

            return new ChatResponse { Content = text, Model = deployment, Provider = "Azure" };
        }

        private static string CombineUrl(string baseUrl, string path)
        {
            if (string.IsNullOrEmpty(baseUrl)) return path;
            if (string.IsNullOrEmpty(path)) return baseUrl;
            return baseUrl.TrimEnd('/') + "/" + path.TrimStart('/');
        }
    }
}
