using System;
using System.Net.Http;
using System.Text.Json;
using System.Threading.Tasks;

namespace DocumentCreationSystem
{
    /// <summary>
    /// LM Studio诊断工具
    /// </summary>
    public class LMStudioDiagnosticTool
    {
        private readonly HttpClient _httpClient;
        private readonly string _baseUrl;

        public LMStudioDiagnosticTool(string baseUrl = "http://localhost:1234")
        {
            _httpClient = new HttpClient();
            _httpClient.Timeout = TimeSpan.FromSeconds(10);
            _baseUrl = baseUrl;
        }

        /// <summary>
        /// 执行完整的LM Studio诊断
        /// </summary>
        public async Task<DiagnosticResult> RunDiagnosticAsync()
        {
            var result = new DiagnosticResult();
            
            Console.WriteLine("=== LM Studio 诊断工具 ===");
            Console.WriteLine($"检查地址: {_baseUrl}");
            Console.WriteLine();

            // 1. 检查服务连接
            Console.WriteLine("1. 检查LM Studio服务连接...");
            result.ServiceConnected = await CheckServiceConnectionAsync();
            Console.WriteLine($"   结果: {(result.ServiceConnected ? "✓ 连接成功" : "✗ 连接失败")}");
            
            if (!result.ServiceConnected)
            {
                Console.WriteLine("   建议: 请确认LM Studio已启动并运行在 http://localhost:1234");
                return result;
            }

            // 2. 检查模型列表
            Console.WriteLine("\n2. 检查可用模型...");
            result.AvailableModels = await GetAvailableModelsAsync();
            Console.WriteLine($"   结果: 发现 {result.AvailableModels.Count} 个模型");
            
            if (result.AvailableModels.Count == 0)
            {
                Console.WriteLine("   建议: 请在LM Studio中加载至少一个模型");
                return result;
            }

            foreach (var model in result.AvailableModels)
            {
                Console.WriteLine($"     - {model}");
            }

            // 3. 测试简单推理
            Console.WriteLine("\n3. 测试模型推理...");
            var firstModel = result.AvailableModels[0];
            result.InferenceWorking = await TestInferenceAsync(firstModel);
            Console.WriteLine($"   结果: {(result.InferenceWorking ? "✓ 推理正常" : "✗ 推理失败")}");
            
            if (!result.InferenceWorking)
            {
                Console.WriteLine("   建议: 模型可能未正确加载或资源不足，请重启LM Studio并重新加载模型");
            }

            // 4. 检查系统资源（简单检查）
            Console.WriteLine("\n4. 检查系统状态...");
            result.SystemStatus = CheckSystemStatus();
            Console.WriteLine($"   内存使用: {result.SystemStatus.MemoryUsagePercent:F1}%");
            Console.WriteLine($"   建议: {(result.SystemStatus.MemoryUsagePercent < 90 ? "内存充足" : "内存使用率较高，可能影响推理")}");

            // 5. 生成诊断报告
            Console.WriteLine("\n=== 诊断总结 ===");
            if (result.ServiceConnected && result.AvailableModels.Count > 0 && result.InferenceWorking)
            {
                Console.WriteLine("✓ LM Studio状态正常，可以正常使用");
                result.OverallStatus = "正常";
            }
            else
            {
                Console.WriteLine("✗ LM Studio存在问题，需要处理");
                result.OverallStatus = "异常";
                
                Console.WriteLine("\n建议的解决步骤:");
                if (!result.ServiceConnected)
                    Console.WriteLine("1. 启动LM Studio应用程序");
                if (result.AvailableModels.Count == 0)
                    Console.WriteLine("2. 在LM Studio中加载模型");
                if (!result.InferenceWorking)
                    Console.WriteLine("3. 重启LM Studio并重新加载模型");
                if (result.SystemStatus.MemoryUsagePercent > 90)
                    Console.WriteLine("4. 关闭其他应用程序释放内存");
            }

            return result;
        }

        private async Task<bool> CheckServiceConnectionAsync()
        {
            try
            {
                var response = await _httpClient.GetAsync($"{_baseUrl}/v1/models");
                return response.IsSuccessStatusCode;
            }
            catch
            {
                return false;
            }
        }

        private async Task<List<string>> GetAvailableModelsAsync()
        {
            var models = new List<string>();
            
            try
            {
                var response = await _httpClient.GetAsync($"{_baseUrl}/v1/models");
                if (response.IsSuccessStatusCode)
                {
                    var json = await response.Content.ReadAsStringAsync();
                    var root = JsonSerializer.Deserialize<JsonElement>(json);
                    
                    if (root.TryGetProperty("data", out var dataElement))
                    {
                        foreach (var modelElement in dataElement.EnumerateArray())
                        {
                            var modelId = modelElement.GetProperty("id").GetString();
                            if (!string.IsNullOrEmpty(modelId))
                            {
                                models.Add(modelId);
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine($"   错误: {ex.Message}");
            }
            
            return models;
        }

        private async Task<bool> TestInferenceAsync(string modelId)
        {
            try
            {
                var request = new
                {
                    model = modelId,
                    messages = new[]
                    {
                        new { role = "user", content = "请回复'测试成功'" }
                    },
                    max_tokens = 10,
                    temperature = 0.1f
                };

                var json = JsonSerializer.Serialize(request);
                var content = new StringContent(json, System.Text.Encoding.UTF8, "application/json");
                
                var response = await _httpClient.PostAsync($"{_baseUrl}/v1/chat/completions", content);
                
                if (response.IsSuccessStatusCode)
                {
                    var responseJson = await response.Content.ReadAsStringAsync();
                    return !responseJson.Contains("prediction-error");
                }
                
                return false;
            }
            catch
            {
                return false;
            }
        }

        private SystemStatus CheckSystemStatus()
        {
            var status = new SystemStatus();
            
            try
            {
                // 简单的内存检查
                var totalMemory = GC.GetTotalMemory(false);
                var workingSet = Environment.WorkingSet;
                
                // 这是一个简化的内存使用率计算
                status.MemoryUsagePercent = (double)workingSet / (1024 * 1024 * 1024) * 10; // 简化计算
                status.MemoryUsagePercent = Math.Min(status.MemoryUsagePercent, 100);
            }
            catch
            {
                status.MemoryUsagePercent = 0;
            }
            
            return status;
        }

        public void Dispose()
        {
            _httpClient?.Dispose();
        }
    }

    public class DiagnosticResult
    {
        public bool ServiceConnected { get; set; }
        public List<string> AvailableModels { get; set; } = new List<string>();
        public bool InferenceWorking { get; set; }
        public SystemStatus SystemStatus { get; set; } = new SystemStatus();
        public string OverallStatus { get; set; } = "未知";
    }

    public class SystemStatus
    {
        public double MemoryUsagePercent { get; set; }
    }
}

// 使用示例：
// var diagnostic = new LMStudioDiagnosticTool();
// var result = await diagnostic.RunDiagnosticAsync();
// diagnostic.Dispose();
