﻿using llm_net.Services.Implementations;
using llm_project.Infrastructure.Helpers;
using llm_project.Models;
using Microsoft.Extensions.Options;
using System.Text;
using System.Text.Json;
using static llm_net.Infrastructure.Extensions.ServiceCollectionExtensions;

namespace llm_project.Services.Implementations
{
    public class LlmApiClient
    {
        private readonly TongyiApiOptions _apiOptions;
        public LlmApiClient(HttpClient httpClient, IConfiguration configuration, ILogger<TongyiApiService> logger, IOptions<TongyiApiOptions> apiOptions)
        {
            _apiOptions = apiOptions.Value;
            // 初始化HttpClient头部配置
            HttpClientSingleton.ConfigureApiHeaders(_apiOptions.AppKey);
        }
        /// <summary>
        /// 调用LLM API（使用单例HttpClient）
        /// </summary>
        public async Task<string> CallLlmApiAsync(string prompt)
        {
            try
            {
                var requestBody = new
                {
                    model = "qwen-max",
                    messages = new[] { new { role = "system", content = prompt } }
                };
                var content = new StringContent(
                    JsonSerializer.Serialize(requestBody),
                    Encoding.UTF8,
                    "application/json"
                );
                // 使用单例HttpClient发送请求
                var response = await HttpClientSingleton.GetClient()
                    .PostAsync($"{_apiOptions.Endpoint}/compatible-mode/v1/chat/completions", content);
                response.EnsureSuccessStatusCode();

                var responseBody = await response.Content.ReadAsStringAsync();
                var result = JsonSerializer.Deserialize<LlmResponse>(responseBody);
                return result?.Choices.FirstOrDefault()?.Message?.Content ?? string.Empty;
            }
            catch (Exception ex)
            {
                throw new InvalidOperationException("调用LLM API失败", ex);
            }
        }
    }
}
