using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using DocumentCreationSystem.Services;
using DocumentCreationSystem.Models;
using System.Text.Json;
using System.IO;

namespace DocumentCreationSystem;

/// <summary>
/// 测试系统监控服务
/// </summary>
public class TestSystemMonitor
{
    public static async Task Main(string[] args)
    {
        // 设置服务
        var services = new ServiceCollection();
        services.AddLogging(builder => builder.AddConsole().SetMinimumLevel(LogLevel.Information));
        services.AddHttpClient();
        
        // 添加配置
        var configuration = new ConfigurationBuilder()
            .SetBasePath(Directory.GetCurrentDirectory())
            .AddJsonFile("appsettings.json", optional: false)
            .Build();
        services.AddSingleton<IConfiguration>(configuration);
        
        // 添加AI配置服务
        services.AddScoped<IAIModelConfigService, AIModelConfigService>();
        services.AddScoped<IAIService, AIServiceManager>();
        services.AddScoped<ISystemMonitorService, SystemMonitorService>();
        
        var serviceProvider = services.BuildServiceProvider();
        var logger = serviceProvider.GetRequiredService<ILogger<TestSystemMonitor>>();
        var configService = serviceProvider.GetRequiredService<IAIModelConfigService>();
        var aiService = serviceProvider.GetRequiredService<IAIService>();
        var monitorService = serviceProvider.GetRequiredService<ISystemMonitorService>();

        try
        {
            logger.LogInformation("=== 系统监控服务测试 ===");
            
            // 1. 获取当前配置
            logger.LogInformation("1. 获取当前配置...");
            var currentConfig = await configService.GetConfigAsync();
            logger.LogInformation($"当前平台: {currentConfig.Platform}");
            logger.LogInformation($"配置的模型: {GetSelectedModelFromConfig(currentConfig)}");
            
            // 2. 获取当前模型信息
            logger.LogInformation("\n2. 获取当前模型信息...");
            var currentModel = aiService.GetCurrentModel();
            logger.LogInformation($"AI服务当前模型: {currentModel?.Name ?? "无"}");
            logger.LogInformation($"AI服务当前模型ID: {currentModel?.Id ?? "无"}");
            logger.LogInformation($"AI服务当前模型提供者: {currentModel?.Provider ?? "无"}");
            
            // 3. 测试系统监控服务
            logger.LogInformation("\n3. 测试系统监控服务...");
            var modelInfo = await monitorService.GetCurrentModelInfoAsync();
            logger.LogInformation($"监控服务平台: {modelInfo.Platform}");
            logger.LogInformation($"监控服务模型名称: {modelInfo.ModelName}");
            logger.LogInformation($"监控服务状态: {modelInfo.Status}");
            logger.LogInformation($"监控服务可用性: {modelInfo.IsAvailable}");
            
            // 4. 测试不同配置场景
            logger.LogInformation("\n4. 测试LM Studio配置场景...");
            var testConfig = new AIModelConfig
            {
                Platform = "LMStudio",
                Temperature = 0.7f,
                MaxTokens = 2000,
                EnableThinkingChain = true,
                Timeout = 30,
                OllamaConfig = new OllamaConfig
                {
                    BaseUrl = "http://localhost:11434",
                    SelectedModel = ""
                },
                LMStudioConfig = new LMStudioConfig
                {
                    BaseUrl = "http://localhost:1234",
                    SelectedModel = "my-test-model"
                },
                ZhipuAIConfig = new ZhipuAIConfig
                {
                    ApiKey = "",
                    BaseUrl = "https://open.bigmodel.cn/api/paas/v4",
                    Model = "GLM-4-Flash-250414"
                },
                DeepSeekConfig = new DeepSeekConfig
                {
                    ApiKey = "",
                    BaseUrl = "https://api.deepseek.com",
                    Model = "deepseek-chat"
                }
            };
            
            await configService.SaveConfigAsync(testConfig);
            logger.LogInformation("测试配置已保存");
            
            // 5. 重新测试监控服务
            logger.LogInformation("\n5. 重新测试监控服务...");
            var updatedModelInfo = await monitorService.GetCurrentModelInfoAsync();
            logger.LogInformation($"更新后平台: {updatedModelInfo.Platform}");
            logger.LogInformation($"更新后模型名称: {updatedModelInfo.ModelName}");
            logger.LogInformation($"更新后状态: {updatedModelInfo.Status}");
            logger.LogInformation($"更新后可用性: {updatedModelInfo.IsAvailable}");
            
            // 6. 测试系统资源监控
            logger.LogInformation("\n6. 测试系统资源监控...");
            var resourceInfo = await monitorService.GetSystemResourceInfoAsync();

            // CPU信息
            logger.LogInformation($"CPU: {resourceInfo.Cpu.Name}");
            logger.LogInformation($"CPU核心数: {resourceInfo.Cpu.CoreCount}");
            logger.LogInformation($"CPU使用率: {resourceInfo.Cpu.UsagePercentage:F1}%");

            // 内存信息
            logger.LogInformation($"内存总量: {resourceInfo.Memory.TotalMemoryMB / 1024.0:F1}GB");
            logger.LogInformation($"内存已用: {resourceInfo.Memory.UsedMemoryMB / 1024.0:F1}GB");
            logger.LogInformation($"内存使用率: {resourceInfo.Memory.UsagePercentage:F1}%");
            logger.LogInformation($"当前进程内存: {resourceInfo.Memory.ProcessMemoryMB}MB");

            // GPU信息
            logger.LogInformation($"GPU可用: {resourceInfo.Gpu.IsAvailable}");
            if (resourceInfo.Gpu.IsAvailable)
            {
                logger.LogInformation($"GPU名称: {resourceInfo.Gpu.Name}");
                logger.LogInformation($"GPU使用率: {resourceInfo.Gpu.UsagePercentage:F1}%");
                logger.LogInformation($"GPU显存总量: {resourceInfo.Gpu.TotalMemoryMB / 1024.0:F1}GB");
                logger.LogInformation($"GPU显存已用: {resourceInfo.Gpu.UsedMemoryMB / 1024.0:F1}GB");
                if (resourceInfo.Gpu.TotalMemoryMB > 0)
                {
                    var memoryUsagePercentage = (double)resourceInfo.Gpu.UsedMemoryMB / resourceInfo.Gpu.TotalMemoryMB * 100;
                    logger.LogInformation($"GPU显存占用率: {memoryUsagePercentage:F1}%");
                }
                if (resourceInfo.Gpu.Temperature.HasValue)
                {
                    logger.LogInformation($"GPU温度: {resourceInfo.Gpu.Temperature.Value:F1}°C");
                }
                logger.LogInformation($"GPU驱动版本: {resourceInfo.Gpu.DriverVersion}");
            }
            else
            {
                logger.LogInformation("未检测到可用的GPU");
            }

            // 7. 验证结果
            if (updatedModelInfo.Platform == "LMStudio" &&
                updatedModelInfo.ModelName == "my-test-model")
            {
                logger.LogInformation("\n✅ 系统监控服务测试通过");
                logger.LogInformation("配置中的模型信息正确显示在状态栏中");
            }
            else
            {
                logger.LogError("\n❌ 系统监控服务测试失败");
                logger.LogError($"期望平台: LMStudio, 实际: {updatedModelInfo.Platform}");
                logger.LogError($"期望模型: my-test-model, 实际: {updatedModelInfo.ModelName}");
            }

            logger.LogInformation("\n=== 系统监控服务测试完成 ===");
        }
        catch (Exception ex)
        {
            logger.LogError(ex, "测试过程中发生错误");
        }
        
        Console.WriteLine("\n按任意键退出...");
        Console.ReadKey();
    }

    private static string GetSelectedModelFromConfig(AIModelConfig config)
    {
        return config.Platform switch
        {
            "Ollama" => config.OllamaConfig?.SelectedModel ?? "",
            "LMStudio" => config.LMStudioConfig?.SelectedModel ?? "",
            "ZhipuAI" => config.ZhipuAIConfig?.Model ?? "",
            "DeepSeek" => config.DeepSeekConfig?.Model ?? "",
            _ => ""
        };
    }
}
