using DocumentCreationSystem.Models;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using System.Net.Http;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.IO;

namespace DocumentCreationSystem.Services;

/// <summary>
/// AI模型配置服务
/// </summary>
public class AIModelConfigService : IAIModelConfigService
{
    private readonly ILogger<AIModelConfigService> _logger;
    private readonly IConfiguration _configuration;
    private readonly HttpClient _httpClient;
    private readonly string _configFilePath;

    public AIModelConfigService(
        ILogger<AIModelConfigService> logger,
        IConfiguration configuration,
        HttpClient httpClient)
    {
        _logger = logger;
        _configuration = configuration;
        _httpClient = httpClient;
        _configFilePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), 
            "DocumentCreationSystem", "ai-config.json");
    }

    public async Task<AIModelConfig> GetConfigAsync()
    {
        try
        {
            _logger.LogDebug($"尝试加载AI配置文件: {_configFilePath}");

            if (File.Exists(_configFilePath))
            {
                var json = await File.ReadAllTextAsync(_configFilePath);
                var options = new JsonSerializerOptions
                {
                    PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                    PropertyNameCaseInsensitive = true
                };
                var config = JsonSerializer.Deserialize<AIModelConfig>(json, options);
                if (config != null)
                {
                    _logger.LogDebug($"成功加载AI配置 - 平台: {config.Platform}, 模型: {GetSelectedModelFromConfig(config)}");
                    return config;
                }
            }
            else
            {
                _logger.LogDebug("AI配置文件不存在，将使用默认配置");
            }
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "加载AI配置失败");
        }

        var defaultConfig = GetDefaultConfig();
        _logger.LogDebug($"使用默认AI配置 - 平台: {defaultConfig.Platform}");
        return defaultConfig;
    }

    private string GetSelectedModelFromConfig(AIModelConfig config)
    {
        return config.Platform switch
        {
            "Ollama" => config.OllamaConfig?.SelectedModel ?? "未选择",
            "LMStudio" => config.LMStudioConfig?.SelectedModel ?? "未选择",
            "ZhipuAI" => config.ZhipuAIConfig?.Model ?? "未选择",
            "DeepSeek" => config.DeepSeekConfig?.Model ?? "未选择",
            "OpenAI" => config.OpenAIConfig?.Model ?? "未选择",
            "Alibaba" => config.AlibabaConfig?.Model ?? "未选择",
            _ => "未知平台"
        };
    }

    public async Task SaveConfigAsync(AIModelConfig config)
    {
        try
        {
            _logger.LogInformation($"开始保存AI配置到: {_configFilePath}");
            _logger.LogInformation($"配置内容 - 平台: {config.Platform}");

            var directory = Path.GetDirectoryName(_configFilePath);
            if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory))
            {
                Directory.CreateDirectory(directory);
                _logger.LogInformation($"创建配置目录: {directory}");
            }

            var options = new JsonSerializerOptions
            {
                WriteIndented = true,
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase
            };

            var json = JsonSerializer.Serialize(config, options);
            _logger.LogInformation($"序列化后的配置长度: {json.Length}");

            await File.WriteAllTextAsync(_configFilePath, json);

            // 验证文件是否真的被写入
            if (File.Exists(_configFilePath))
            {
                var fileSize = new FileInfo(_configFilePath).Length;
                _logger.LogInformation($"配置文件已创建，大小: {fileSize} 字节");
            }
            else
            {
                _logger.LogError("配置文件保存后不存在！");
            }

            var selectedModel = GetSelectedModelFromConfig(config);
            _logger.LogInformation($"AI配置已保存到: {_configFilePath} - 平台: {config.Platform}, 模型: {selectedModel}");
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, $"保存AI配置失败到: {_configFilePath}");
            throw;
        }
    }

    public async Task<List<AIModel>> DetectOllamaModelsAsync(string baseUrl)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/api/tags";
            var response = await _httpClient.GetStringAsync(url);

            var options = new JsonSerializerOptions
            {
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                PropertyNameCaseInsensitive = true
            };

            var result = JsonSerializer.Deserialize<OllamaTagsResponse>(response, options);

            var models = new List<AIModel>();
            if (result?.Models != null)
            {
                foreach (var model in result.Models)
                {
                    models.Add(new AIModel
                    {
                        Id = model.Name,
                        Name = model.Name,
                        Provider = "Ollama",
                        Size = model.Size,
                        ModifiedAt = model.ModifiedAt,
                        IsAvailable = true,
                        Description = $"Ollama模型 - {FormatSize(model.Size)}"
                    });
                }
            }

            return models;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "检测Ollama模型失败: {BaseUrl}", baseUrl);
            throw new Exception($"无法连接到Ollama服务 ({baseUrl}): {ex.Message}");
        }
    }

    public async Task<List<AIModel>> DetectLMStudioModelsAsync(string baseUrl)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/v1/models";
            var response = await _httpClient.GetStringAsync(url);

            var options = new JsonSerializerOptions
            {
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                PropertyNameCaseInsensitive = true
            };

            var result = JsonSerializer.Deserialize<LMStudioModelsResponse>(response, options);

            var models = new List<AIModel>();
            if (result?.Data != null)
            {
                foreach (var model in result.Data)
                {
                    models.Add(new AIModel
                    {
                        Id = model.Id,
                        Name = model.Id,
                        Provider = "LMStudio",
                        CreatedAt = DateTimeOffset.FromUnixTimeSeconds(model.Created).DateTime,
                        IsAvailable = true,
                        Description = $"LM Studio模型 - {model.OwnedBy}"
                    });
                }
            }

            return models;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "检测LM Studio模型失败: {BaseUrl}", baseUrl);
            throw new Exception($"无法连接到LM Studio服务 ({baseUrl}): {ex.Message}");
        }
    }

    public async Task<bool> TestConnectionAsync(AIModelConfig config)
    {
        try
        {
            switch (config.Platform.ToLower())
            {
                case "ollama":
                    await TestOllamaConnectionAsync(config.OllamaConfig.BaseUrl);
                    // 如果有选择的模型，测试模型可用性
                    if (!string.IsNullOrEmpty(config.OllamaConfig.SelectedModel))
                    {
                        await TestOllamaModelAsync(config.OllamaConfig.BaseUrl, config.OllamaConfig.SelectedModel);
                    }
                    break;
                case "lmstudio":
                    await TestLMStudioConnectionAsync(config.LMStudioConfig.BaseUrl);
                    // 如果有选择的模型，测试模型可用性
                    if (!string.IsNullOrEmpty(config.LMStudioConfig.SelectedModel))
                    {
                        await TestLMStudioModelAsync(config.LMStudioConfig.BaseUrl, config.LMStudioConfig.SelectedModel);
                    }
                    break;
                case "zhipuai":
                    await TestZhipuAIConnectionAsync(config.ZhipuAIConfig);
                    break;
                case "deepseek":
                    await TestDeepSeekConnectionAsync(config.DeepSeekConfig);
                    break;
                case "openai":
                    await TestOpenAIConnectionAsync(config.OpenAIConfig);
                    break;
                case "alibaba":
                    await TestAlibabaConnectionAsync(config.AlibabaConfig);
                    break;
                default:
                    throw new Exception($"不支持的平台: {config.Platform}");
            }

            return true;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "测试连接失败: {Platform}", config.Platform);
            throw;
        }
    }

    private async Task TestOllamaConnectionAsync(string baseUrl)
    {
        try
        {
            // 首先测试服务器连接
            var tagsUrl = baseUrl.TrimEnd('/') + "/api/tags";
            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(120));
            using var response = await _httpClient.GetAsync(tagsUrl, cts.Token);
            response.EnsureSuccessStatusCode();

            _logger.LogInformation("Ollama服务器连接测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("Ollama连接超时，请检查服务是否启动");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"Ollama网络连接失败: {ex.Message}");
        }
        catch (Exception ex)
        {
            throw new Exception($"Ollama连接测试失败: {ex.Message}");
        }
    }

    private async Task TestLMStudioConnectionAsync(string baseUrl)
    {
        try
        {
            // 首先测试服务器连接
            var modelsUrl = baseUrl.TrimEnd('/') + "/v1/models";
            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
            using var response = await _httpClient.GetAsync(modelsUrl, cts.Token);
            response.EnsureSuccessStatusCode();

            _logger.LogInformation("LM Studio服务器连接测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("LM Studio连接超时，请检查服务是否启动");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"LM Studio网络连接失败: {ex.Message}");
        }
        catch (Exception ex)
        {
            throw new Exception($"LM Studio连接测试失败: {ex.Message}");
        }
    }

    private async Task TestOllamaModelAsync(string baseUrl, string modelName)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/api/generate";
            var request = new
            {
                model = modelName,
                prompt = "测试",
                stream = false
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(120));
            using var response = await _httpClient.PostAsync(url, content, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"Ollama模型 {modelName} 不可用: {errorContent}");
            }

            _logger.LogInformation($"Ollama模型 {modelName} 测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception($"Ollama模型 {modelName} 响应超时");
        }
        catch (Exception ex) when (!ex.Message.Contains("Ollama"))
        {
            throw new Exception($"Ollama模型 {modelName} 测试失败: {ex.Message}");
        }
    }

    private async Task TestLMStudioModelAsync(string baseUrl, string modelName)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/v1/chat/completions";
            var request = new
            {
                model = modelName,
                messages = new[]
                {
                    new { role = "user", content = "测试" }
                },
                max_tokens = 10,
                temperature = 0.1
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(120));
            using var response = await _httpClient.PostAsync(url, content, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"LM Studio模型 {modelName} 不可用: {errorContent}");
            }

            _logger.LogInformation($"LM Studio模型 {modelName} 测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception($"LM Studio模型 {modelName} 响应超时");
        }
        catch (Exception ex) when (!ex.Message.Contains("LM Studio"))
        {
            throw new Exception($"LM Studio模型 {modelName} 测试失败: {ex.Message}");
        }
    }

    private async Task TestZhipuAIConnectionAsync(ZhipuAIConfig config)
    {
        if (string.IsNullOrWhiteSpace(config.ApiKey))
        {
            throw new Exception("智谱AI API Key不能为空");
        }

        try
        {
            var baseUrl = config.BaseUrl?.TrimEnd('/') ?? "https://open.bigmodel.cn/api/paas/v4";
            var url = $"{baseUrl}/chat/completions";

            var request = new
            {
                model = config.Model ?? "GLM-4-Flash-250414",
                messages = new[]
                {
                    new { role = "user", content = "测试连接" }
                },
                max_tokens = 10,
                temperature = 0.1
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            // 设置请求头
            var requestMessage = new HttpRequestMessage(HttpMethod.Post, url)
            {
                Content = content
            };
            requestMessage.Headers.Add("Authorization", $"Bearer {config.ApiKey}");

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));
            using var response = await _httpClient.SendAsync(requestMessage, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"智谱AI API调用失败 (状态码: {response.StatusCode}): {errorContent}");
            }

            // 验证响应格式
            var responseContent = await response.Content.ReadAsStringAsync();
            var responseJson = JsonSerializer.Deserialize<JsonElement>(responseContent);
            if (!responseJson.TryGetProperty("choices", out _))
            {
                throw new Exception($"智谱AI响应格式异常，模型: {config.Model}");
            }

            _logger.LogInformation($"智谱AI连接测试成功，模型: {config.Model}");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("智谱AI连接超时，请检查网络连接");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"智谱AI网络连接失败: {ex.Message}");
        }
        catch (Exception ex) when (!ex.Message.Contains("智谱AI"))
        {
            throw new Exception($"智谱AI连接测试失败: {ex.Message}");
        }
    }

    private async Task TestDeepSeekConnectionAsync(DeepSeekConfig config)
    {
        if (string.IsNullOrWhiteSpace(config.ApiKey))
        {
            throw new Exception("DeepSeek API Key不能为空");
        }

        try
        {
            var baseUrl = config.BaseUrl?.TrimEnd('/') ?? "https://api.deepseek.com";
            var url = $"{baseUrl}/chat/completions";

            var request = new
            {
                model = config.Model ?? "deepseek-chat",
                messages = new[]
                {
                    new { role = "user", content = "测试连接" }
                },
                max_tokens = 10,
                temperature = 0.1
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            // 设置请求头
            var requestMessage = new HttpRequestMessage(HttpMethod.Post, url)
            {
                Content = content
            };
            requestMessage.Headers.Add("Authorization", $"Bearer {config.ApiKey}");

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));
            using var response = await _httpClient.SendAsync(requestMessage, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"DeepSeek API调用失败 (状态码: {response.StatusCode}): {errorContent}");
            }

            // 验证响应格式
            var responseContent = await response.Content.ReadAsStringAsync();
            var responseJson = JsonSerializer.Deserialize<JsonElement>(responseContent);
            if (!responseJson.TryGetProperty("choices", out _))
            {
                throw new Exception($"DeepSeek响应格式异常，模型: {config.Model}");
            }

            _logger.LogInformation($"DeepSeek连接测试成功，模型: {config.Model}");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("DeepSeek连接超时，请检查网络连接");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"DeepSeek网络连接失败: {ex.Message}");
        }
        catch (Exception ex) when (!ex.Message.Contains("DeepSeek"))
        {
            throw new Exception($"DeepSeek连接测试失败: {ex.Message}");
        }
    }

    private async Task TestOpenAIConnectionAsync(OpenAIConfig config)
    {
        try
        {
            var baseUrl = config.BaseUrl?.TrimEnd('/') ?? "https://api.openai.com/v1";
            var url = $"{baseUrl}/chat/completions";

            var request = new
            {
                model = config.Model ?? "gpt-3.5-turbo",
                messages = new[]
                {
                    new { role = "user", content = "测试连接" }
                },
                max_tokens = 10,
                temperature = 0.1
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            // 设置请求头
            var requestMessage = new HttpRequestMessage(HttpMethod.Post, url)
            {
                Content = content
            };

            // 只有在提供了API Key时才添加Authorization头
            if (!string.IsNullOrWhiteSpace(config.ApiKey))
            {
                requestMessage.Headers.Add("Authorization", $"Bearer {config.ApiKey}");
            }

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(120));
            using var response = await _httpClient.SendAsync(requestMessage, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"OpenAI API调用失败 (状态码: {response.StatusCode}): {errorContent}");
            }

            // 验证响应格式
            var responseContent = await response.Content.ReadAsStringAsync();
            var responseJson = JsonSerializer.Deserialize<JsonElement>(responseContent);
            if (!responseJson.TryGetProperty("choices", out _))
            {
                throw new Exception("OpenAI API响应格式不正确");
            }

            _logger.LogInformation($"OpenAI连接测试成功，模型: {config.Model}");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("OpenAI连接超时，请检查网络连接");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"OpenAI网络连接失败: {ex.Message}");
        }
        catch (Exception ex) when (!ex.Message.Contains("OpenAI"))
        {
            throw new Exception($"OpenAI连接测试失败: {ex.Message}");
        }
    }

    private async Task TestAlibabaConnectionAsync(AlibabaConfig config)
    {
        if (string.IsNullOrWhiteSpace(config.ApiKey))
        {
            throw new Exception("阿里云 API Key不能为空");
        }

        try
        {
            var baseUrl = config.BaseUrl?.TrimEnd('/') ?? "https://dashscope.aliyuncs.com/api/v1";
            var url = $"{baseUrl}/services/aigc/text-generation/generation";

            var request = new
            {
                model = config.Model ?? "qwen-turbo",
                input = new
                {
                    messages = new[]
                    {
                        new { role = "user", content = "测试连接" }
                    }
                },
                parameters = new
                {
                    max_tokens = 10,
                    temperature = 0.1
                }
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            // 设置请求头
            var requestMessage = new HttpRequestMessage(HttpMethod.Post, url)
            {
                Content = content
            };
            requestMessage.Headers.Add("Authorization", $"Bearer {config.ApiKey}");

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(15));
            using var response = await _httpClient.SendAsync(requestMessage, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"阿里云API调用失败 (状态码: {response.StatusCode}): {errorContent}");
            }

            // 验证响应格式
            var responseContent = await response.Content.ReadAsStringAsync();
            var responseJson = JsonSerializer.Deserialize<JsonElement>(responseContent);
            if (!responseJson.TryGetProperty("output", out _))
            {
                throw new Exception("阿里云API响应格式不正确");
            }

            _logger.LogInformation($"阿里云连接测试成功，模型: {config.Model}");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("阿里云连接超时，请检查网络连接");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"阿里云网络连接失败: {ex.Message}");
        }
        catch (Exception ex) when (!ex.Message.Contains("阿里云"))
        {
            throw new Exception($"阿里云连接测试失败: {ex.Message}");
        }
    }

    public AIModelConfig GetDefaultConfig()
    {
        return new AIModelConfig
        {
            Platform = "LMStudio", // 改为LMStudio作为默认平台
            Temperature = 0.7f,
            MaxTokens = 16384, // 增加到16K tokens，适合长文本生成
            EnableThinkingChain = true,
            EnableSegmentedGeneration = true, // 默认启用分段生成，避免长文本生成失败
            SegmentationThreshold = 3000, // 降低阈值，更早启用分段生成
            SegmentWordCount = 1500, // 每段1500字，用户可调整
            Timeout = 120, // API请求超时时间120秒，符合最佳实践
            OllamaConfig = new OllamaConfig
            {
                BaseUrl = "http://localhost:11434"
            },
            LMStudioConfig = new LMStudioConfig
            {
                BaseUrl = "http://50844s9656.wocp.fun:42440",
                SelectedModel = "" // 将通过自动检测设置
            },
            ZhipuAIConfig = new ZhipuAIConfig
            {
                BaseUrl = "https://open.bigmodel.cn/api/paas/v4",
                Model = "GLM-4-Flash-250414"
            },
            DeepSeekConfig = new DeepSeekConfig
            {
                BaseUrl = "https://api.deepseek.com",
                Model = "deepseek-chat"
            },
            OpenAIConfig = new OpenAIConfig
            {
                BaseUrl = "https://api.openai.com/v1",
                Model = "gpt-3.5-turbo"
            },
            AlibabaConfig = new AlibabaConfig
            {
                BaseUrl = "https://dashscope.aliyuncs.com/api/v1",
                Model = "qwen-turbo"
            },
            RWKVConfig = new RWKVConfig
            {
                BaseUrl = "http://localhost:8000",
                SelectedModel = "RWKV-6-World-3B"
            }
        };
    }

    /// <summary>
    /// 强制创建默认配置文件（用于修复配置丢失问题）
    /// </summary>
    public async Task CreateDefaultConfigFileAsync()
    {
        try
        {
            _logger.LogInformation("开始创建默认配置文件...");
            var defaultConfig = GetDefaultConfig();
            await SaveConfigAsync(defaultConfig);
            _logger.LogInformation("默认配置文件创建成功");
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "创建默认配置文件失败");
            throw;
        }
    }

    private string FormatSize(long bytes)
    {
        string[] sizes = { "B", "KB", "MB", "GB", "TB" };
        double len = bytes;
        int order = 0;
        while (len >= 1024 && order < sizes.Length - 1)
        {
            order++;
            len = len / 1024;
        }
        return $"{len:0.##} {sizes[order]}";
    }
}

// 响应模型类
public class OllamaTagsResponse
{
    public List<OllamaModelDetail> Models { get; set; } = new();
}

public class OllamaModelDetail
{
    public string Name { get; set; } = "";
    public long Size { get; set; }
    [JsonPropertyName("modified_at")]
    public DateTime ModifiedAt { get; set; }
    public string Digest { get; set; } = "";
    public OllamaModelDetails Details { get; set; } = new();
}

public class OllamaModelDetails
{
    public string Format { get; set; } = "";
    public string Family { get; set; } = "";
    public List<string> Families { get; set; } = new();
    public string ParameterSize { get; set; } = "";
    public string QuantizationLevel { get; set; } = "";
}

public class LMStudioModelsResponse
{
    public string Object { get; set; } = "";
    public List<LMStudioModelDetail> Data { get; set; } = new();
}

public class LMStudioModelDetail
{
    public string Id { get; set; } = "";
    public string Object { get; set; } = "";
    public long Created { get; set; }
    [JsonPropertyName("owned_by")]
    public string OwnedBy { get; set; } = "";
}
