using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using DocumentCreationSystem.Services;
using DocumentCreationSystem.Models;
using System.Text.Json;
using System.IO;

namespace DocumentCreationSystem;

/// <summary>
/// 测试LM Studio模型设置
/// </summary>
public class TestLMStudioModel
{
    public static async Task Main(string[] args)
    {
        // 设置服务
        var services = new ServiceCollection();
        services.AddLogging(builder => builder.AddConsole().SetMinimumLevel(LogLevel.Information));
        services.AddHttpClient();
        
        // 添加配置
        var configuration = new ConfigurationBuilder()
            .SetBasePath(Directory.GetCurrentDirectory())
            .AddJsonFile("appsettings.json", optional: false)
            .Build();
        services.AddSingleton<IConfiguration>(configuration);
        
        // 添加AI配置服务
        services.AddScoped<IAIModelConfigService, AIModelConfigService>();
        
        var serviceProvider = services.BuildServiceProvider();
        var logger = serviceProvider.GetRequiredService<ILogger<TestLMStudioModel>>();
        var configService = serviceProvider.GetRequiredService<IAIModelConfigService>();

        try
        {
            logger.LogInformation("=== LM Studio模型设置测试 ===");
            
            // 1. 测试LM Studio服务连接
            logger.LogInformation("1. 测试LM Studio服务连接...");
            var lmStudioConfig = configuration.GetSection("AI:LMStudio");
            var lmStudioService = new LMStudioService(lmStudioConfig, logger);
            
            // 2. 获取可用模型
            logger.LogInformation("2. 获取LM Studio可用模型...");
            var availableModels = await lmStudioService.GetAvailableModelsAsync();
            logger.LogInformation($"找到 {availableModels.Count} 个LM Studio模型:");
            foreach (var model in availableModels)
            {
                logger.LogInformation($"  - {model.Id} ({model.Name})");
            }
            
            // 3. 获取当前配置
            logger.LogInformation("\n3. 获取当前配置...");
            var currentConfig = await configService.GetConfigAsync();
            logger.LogInformation($"当前平台: {currentConfig.Platform}");
            logger.LogInformation($"LM Studio配置的模型: {currentConfig.LMStudioConfig?.SelectedModel ?? "无"}");
            
            // 4. 测试模型设置
            if (availableModels.Any())
            {
                var testModel = availableModels.First();
                logger.LogInformation($"\n4. 测试设置模型: {testModel.Id}");
                var success = await lmStudioService.SetCurrentModelAsync(testModel.Id);
                logger.LogInformation($"设置结果: {(success ? "成功" : "失败")}");
                
                var currentModel = lmStudioService.GetCurrentModel();
                logger.LogInformation($"当前模型: {currentModel?.Name ?? "无"}");
                
                // 5. 更新配置文件
                logger.LogInformation("\n5. 更新配置文件...");
                var updatedConfig = new AIModelConfig
                {
                    Platform = "LMStudio",
                    Temperature = 0.7f,
                    MaxTokens = 2000,
                    EnableThinkingChain = true,
                    Timeout = 30,
                    OllamaConfig = new OllamaConfig
                    {
                        BaseUrl = "http://localhost:11434",
                        SelectedModel = ""
                    },
                    LMStudioConfig = new LMStudioConfig
                    {
                        BaseUrl = "http://localhost:1234",
                        SelectedModel = testModel.Id
                    },
                    ZhipuAIConfig = new ZhipuAIConfig
                    {
                        ApiKey = "",
                        BaseUrl = "https://open.bigmodel.cn/api/paas/v4",
                        Model = "GLM-4-Flash-250414"
                    },
                    DeepSeekConfig = new DeepSeekConfig
                    {
                        ApiKey = "",
                        BaseUrl = "https://api.deepseek.com",
                        Model = "deepseek-chat"
                    }
                };
                
                await configService.SaveConfigAsync(updatedConfig);
                logger.LogInformation($"配置已更新，LM Studio模型设置为: {testModel.Id}");
                
                // 6. 验证配置保存
                logger.LogInformation("\n6. 验证配置保存...");
                var reloadedConfig = await configService.GetConfigAsync();
                logger.LogInformation($"重新加载的平台: {reloadedConfig.Platform}");
                logger.LogInformation($"重新加载的LM Studio模型: {reloadedConfig.LMStudioConfig?.SelectedModel ?? "无"}");
                
                if (reloadedConfig.Platform == "LMStudio" && 
                    reloadedConfig.LMStudioConfig?.SelectedModel == testModel.Id)
                {
                    logger.LogInformation("✅ 配置保存和加载测试通过");
                }
                else
                {
                    logger.LogError("❌ 配置保存和加载测试失败");
                }
            }
            else
            {
                logger.LogWarning("没有找到可用的LM Studio模型，请确保LM Studio正在运行并加载了模型");
            }
            
            // 7. 显示最终配置文件
            var configPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), 
                "DocumentCreationSystem", "ai-config.json");
            logger.LogInformation($"\n配置文件路径: {configPath}");
            
            if (File.Exists(configPath))
            {
                var configContent = await File.ReadAllTextAsync(configPath);
                logger.LogInformation("最终配置文件内容:");
                logger.LogInformation(configContent);
            }
            
            logger.LogInformation("\n=== LM Studio模型设置测试完成 ===");
        }
        catch (Exception ex)
        {
            logger.LogError(ex, "测试过程中发生错误");
        }
        
        Console.WriteLine("\n按任意键退出...");
        Console.ReadKey();
    }
}
