using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using DocumentCreationSystem.Services;
using DocumentCreationSystem.Models;
using System.Text.Json;
using System.IO;

namespace DocumentCreationSystem;

/// <summary>
/// 测试状态显示
/// </summary>
public class TestStatusDisplay
{
    public static async Task Main(string[] args)
    {
        // 设置服务
        var services = new ServiceCollection();
        services.AddLogging(builder => builder.AddConsole().SetMinimumLevel(LogLevel.Information));
        services.AddHttpClient();
        
        // 添加配置
        var configuration = new ConfigurationBuilder()
            .SetBasePath(Directory.GetCurrentDirectory())
            .AddJsonFile("appsettings.json", optional: false)
            .Build();
        services.AddSingleton<IConfiguration>(configuration);
        
        // 添加AI配置服务
        services.AddScoped<IAIModelConfigService, AIModelConfigService>();
        
        var serviceProvider = services.BuildServiceProvider();
        var logger = serviceProvider.GetRequiredService<ILogger<TestStatusDisplay>>();
        var configService = serviceProvider.GetRequiredService<IAIModelConfigService>();

        try
        {
            logger.LogInformation("=== 状态显示测试 ===");
            
            // 1. 创建测试配置
            logger.LogInformation("1. 创建测试配置...");
            var testConfig = new AIModelConfig
            {
                Platform = "LMStudio",
                Temperature = 0.7f,
                MaxTokens = 2000,
                EnableThinkingChain = true,
                Timeout = 30,
                OllamaConfig = new OllamaConfig
                {
                    BaseUrl = "http://localhost:11434",
                    SelectedModel = ""
                },
                LMStudioConfig = new LMStudioConfig
                {
                    BaseUrl = "http://localhost:1234",
                    SelectedModel = "my-configured-model"
                },
                ZhipuAIConfig = new ZhipuAIConfig
                {
                    ApiKey = "",
                    BaseUrl = "https://open.bigmodel.cn/api/paas/v4",
                    Model = "GLM-4-Flash-250414"
                },
                DeepSeekConfig = new DeepSeekConfig
                {
                    ApiKey = "",
                    BaseUrl = "https://api.deepseek.com",
                    Model = "deepseek-chat"
                }
            };
            
            await configService.SaveConfigAsync(testConfig);
            logger.LogInformation("测试配置已保存");
            
            // 2. 验证配置保存
            logger.LogInformation("\n2. 验证配置保存...");
            var reloadedConfig = await configService.GetConfigAsync();
            logger.LogInformation($"重新加载的平台: {reloadedConfig.Platform}");
            logger.LogInformation($"重新加载的LM Studio模型: {reloadedConfig.LMStudioConfig?.SelectedModel ?? "无"}");
            
            // 3. 模拟SystemMonitorService的逻辑
            logger.LogInformation("\n3. 模拟SystemMonitorService的逻辑...");
            
            // 模拟没有当前模型的情况（LM Studio服务不可用）
            AIModel? currentModel = null; // 模拟AI服务返回null
            
            var actualPlatform = reloadedConfig.Platform ?? "未知平台";
            var modelName = "未选择模型";
            var isAvailable = false;
            
            // 优先使用当前模型的信息
            if (currentModel != null)
            {
                modelName = currentModel.Name ?? currentModel.Id ?? "未知模型";
                if (!string.IsNullOrEmpty(currentModel.Provider))
                {
                    actualPlatform = currentModel.Provider;
                }
                isAvailable = true;
            }
            else
            {
                // 如果没有当前模型，尝试从配置中获取选中的模型名称
                var selectedModel = GetSelectedModelFromConfig(reloadedConfig);
                if (!string.IsNullOrEmpty(selectedModel))
                {
                    modelName = selectedModel;
                    isAvailable = false; // 配置中的模型但当前不可用
                }
            }
            
            logger.LogInformation($"计算结果:");
            logger.LogInformation($"  平台: {actualPlatform}");
            logger.LogInformation($"  模型名称: {modelName}");
            logger.LogInformation($"  状态: {(isAvailable ? "在线" : "离线")}");
            logger.LogInformation($"  可用性: {isAvailable}");
            
            // 4. 验证结果
            if (actualPlatform == "LMStudio" && modelName == "my-configured-model")
            {
                logger.LogInformation("\n✅ 状态显示测试通过");
                logger.LogInformation("配置中的模型信息应该正确显示在状态栏中");
            }
            else
            {
                logger.LogError("\n❌ 状态显示测试失败");
                logger.LogError($"期望平台: LMStudio, 实际: {actualPlatform}");
                logger.LogError($"期望模型: my-configured-model, 实际: {modelName}");
            }
            
            // 5. 显示最终配置文件
            var configPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), 
                "DocumentCreationSystem", "ai-config.json");
            logger.LogInformation($"\n配置文件路径: {configPath}");
            
            if (File.Exists(configPath))
            {
                var configContent = await File.ReadAllTextAsync(configPath);
                logger.LogInformation("最终配置文件内容:");
                logger.LogInformation(configContent);
            }
            
            logger.LogInformation("\n=== 状态显示测试完成 ===");
        }
        catch (Exception ex)
        {
            logger.LogError(ex, "测试过程中发生错误");
        }
        
        Console.WriteLine("\n按任意键退出...");
        Console.ReadKey();
    }

    private static string GetSelectedModelFromConfig(AIModelConfig config)
    {
        return config.Platform switch
        {
            "Ollama" => config.OllamaConfig?.SelectedModel ?? "",
            "LMStudio" => config.LMStudioConfig?.SelectedModel ?? "",
            "ZhipuAI" => config.ZhipuAIConfig?.Model ?? "",
            "DeepSeek" => config.DeepSeekConfig?.Model ?? "",
            _ => ""
        };
    }
}
