using DocumentCreationSystem.Models;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using System.Net.Http;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.IO;

namespace DocumentCreationSystem.Services;

/// <summary>
/// 向量模型配置服务
/// </summary>
public class VectorModelConfigService : IVectorModelConfigService
{
    private readonly ILogger<VectorModelConfigService> _logger;
    private readonly IConfiguration _configuration;
    private readonly HttpClient _httpClient;
    private readonly string _configFilePath;

    public VectorModelConfigService(
        ILogger<VectorModelConfigService> logger,
        IConfiguration configuration,
        HttpClient httpClient)
    {
        _logger = logger;
        _configuration = configuration;
        _httpClient = httpClient;
        _configFilePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), 
            "DocumentCreationSystem", "vector-config.json");
    }

    public async Task<VectorModelConfig> GetConfigAsync()
    {
        try
        {
            _logger.LogInformation($"尝试加载向量模型配置文件: {_configFilePath}");

            if (File.Exists(_configFilePath))
            {
                var json = await File.ReadAllTextAsync(_configFilePath);
                var options = new JsonSerializerOptions
                {
                    PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                    PropertyNameCaseInsensitive = true
                };
                var config = JsonSerializer.Deserialize<VectorModelConfig>(json, options);
                if (config != null)
                {
                    _logger.LogInformation($"成功加载向量模型配置 - 平台: {config.Platform}, 模型: {GetSelectedModelFromConfig(config)}");
                    return config;
                }
            }
            else
            {
                _logger.LogInformation("向量模型配置文件不存在，将使用默认配置");
            }
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "加载向量模型配置失败");
        }

        var defaultConfig = GetDefaultConfig();
        _logger.LogInformation($"使用默认向量模型配置 - 平台: {defaultConfig.Platform}");
        return defaultConfig;
    }

    private string GetSelectedModelFromConfig(VectorModelConfig config)
    {
        return config.Platform switch
        {
            "Ollama" => config.OllamaVectorConfig?.SelectedModel ?? "未选择",
            "LMStudio" => config.LMStudioVectorConfig?.SelectedModel ?? "未选择",
            _ => "未知平台"
        };
    }

    public async Task SaveConfigAsync(VectorModelConfig config)
    {
        try
        {
            _logger.LogInformation($"开始保存向量模型配置到: {_configFilePath}");
            _logger.LogInformation($"配置内容 - 平台: {config.Platform}");

            var directory = Path.GetDirectoryName(_configFilePath);
            if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory))
            {
                Directory.CreateDirectory(directory);
                _logger.LogInformation($"创建配置目录: {directory}");
            }

            var options = new JsonSerializerOptions
            {
                WriteIndented = true,
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase
            };

            var json = JsonSerializer.Serialize(config, options);
            _logger.LogInformation($"序列化后的配置长度: {json.Length}");

            await File.WriteAllTextAsync(_configFilePath, json);

            // 验证文件是否真的被写入
            if (File.Exists(_configFilePath))
            {
                var fileSize = new FileInfo(_configFilePath).Length;
                _logger.LogInformation($"向量模型配置文件已创建，大小: {fileSize} 字节");
            }
            else
            {
                _logger.LogError("向量模型配置文件保存后不存在！");
            }

            var selectedModel = GetSelectedModelFromConfig(config);
            _logger.LogInformation($"向量模型配置已保存到: {_configFilePath} - 平台: {config.Platform}, 模型: {selectedModel}");
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, $"保存向量模型配置失败到: {_configFilePath}");
            throw;
        }
    }

    public async Task<List<AIModel>> DetectOllamaVectorModelsAsync(string baseUrl)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/api/tags";
            var response = await _httpClient.GetStringAsync(url);

            var options = new JsonSerializerOptions
            {
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                PropertyNameCaseInsensitive = true
            };

            var result = JsonSerializer.Deserialize<OllamaTagsResponse>(response, options);

            var models = new List<AIModel>();
            if (result?.Models != null)
            {
                foreach (var model in result.Models)
                {
                    // 过滤出向量模型（通常包含 embedding 关键词）
                    if (IsVectorModel(model.Name))
                    {
                        models.Add(new AIModel
                        {
                            Id = model.Name,
                            Name = model.Name,
                            Provider = "Ollama",
                            Size = model.Size,
                            ModifiedAt = model.ModifiedAt,
                            IsAvailable = true,
                            Description = $"Ollama向量模型 - {FormatSize(model.Size)}"
                        });
                    }
                }
            }

            return models;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "检测Ollama向量模型失败: {BaseUrl}", baseUrl);
            throw new Exception($"无法连接到Ollama服务 ({baseUrl}): {ex.Message}");
        }
    }

    public async Task<List<AIModel>> DetectLMStudioVectorModelsAsync(string baseUrl)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/v1/models";
            var response = await _httpClient.GetStringAsync(url);

            var options = new JsonSerializerOptions
            {
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                PropertyNameCaseInsensitive = true
            };

            var result = JsonSerializer.Deserialize<LMStudioModelsResponse>(response, options);

            var models = new List<AIModel>();
            if (result?.Data != null)
            {
                foreach (var model in result.Data)
                {
                    // 过滤出向量模型
                    if (IsVectorModel(model.Id))
                    {
                        models.Add(new AIModel
                        {
                            Id = model.Id,
                            Name = model.Id,
                            Provider = "LMStudio",
                            CreatedAt = DateTimeOffset.FromUnixTimeSeconds(model.Created).DateTime,
                            IsAvailable = true,
                            Description = $"LM Studio向量模型 - {model.OwnedBy}"
                        });
                    }
                }
            }

            return models;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "检测LM Studio向量模型失败: {BaseUrl}", baseUrl);
            throw new Exception($"无法连接到LM Studio服务 ({baseUrl}): {ex.Message}");
        }
    }

    /// <summary>
    /// 判断是否为向量模型
    /// </summary>
    private bool IsVectorModel(string modelName)
    {
        var vectorKeywords = new[] { "embedding", "embed", "vector", "bge", "nomic", "mxbai", "minilm" };
        return vectorKeywords.Any(keyword => modelName.ToLower().Contains(keyword));
    }

    public async Task<bool> TestVectorConnectionAsync(VectorModelConfig config)
    {
        try
        {
            switch (config.Platform.ToLower())
            {
                case "ollama":
                    await TestOllamaVectorConnectionAsync(config.OllamaVectorConfig.BaseUrl);
                    // 如果有选择的模型，测试模型可用性
                    if (!string.IsNullOrEmpty(config.OllamaVectorConfig.SelectedModel))
                    {
                        await TestOllamaVectorModelAsync(config.OllamaVectorConfig.BaseUrl, config.OllamaVectorConfig.SelectedModel);
                    }
                    break;
                case "lmstudio":
                    await TestLMStudioVectorConnectionAsync(config.LMStudioVectorConfig.BaseUrl);
                    // 如果有选择的模型，测试模型可用性
                    if (!string.IsNullOrEmpty(config.LMStudioVectorConfig.SelectedModel))
                    {
                        await TestLMStudioVectorModelAsync(config.LMStudioVectorConfig.BaseUrl, config.LMStudioVectorConfig.SelectedModel);
                    }
                    break;
                default:
                    throw new Exception($"不支持的向量模型平台: {config.Platform}");
            }

            return true;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "测试向量模型连接失败: {Platform}", config.Platform);
            throw;
        }
    }

    private async Task TestOllamaVectorConnectionAsync(string baseUrl)
    {
        try
        {
            // 首先测试服务器连接
            var tagsUrl = baseUrl.TrimEnd('/') + "/api/tags";
            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
            using var response = await _httpClient.GetAsync(tagsUrl, cts.Token);
            response.EnsureSuccessStatusCode();

            _logger.LogInformation("Ollama向量模型服务器连接测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("Ollama向量模型连接超时，请检查服务是否启动");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"Ollama向量模型网络连接失败: {ex.Message}");
        }
        catch (Exception ex)
        {
            throw new Exception($"Ollama向量模型连接测试失败: {ex.Message}");
        }
    }

    private async Task TestLMStudioVectorConnectionAsync(string baseUrl)
    {
        try
        {
            // 首先测试服务器连接
            var modelsUrl = baseUrl.TrimEnd('/') + "/v1/models";
            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
            using var response = await _httpClient.GetAsync(modelsUrl, cts.Token);
            response.EnsureSuccessStatusCode();

            _logger.LogInformation("LM Studio向量模型服务器连接测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception("LM Studio向量模型连接超时，请检查服务是否启动");
        }
        catch (HttpRequestException ex)
        {
            throw new Exception($"LM Studio向量模型网络连接失败: {ex.Message}");
        }
        catch (Exception ex)
        {
            throw new Exception($"LM Studio向量模型连接测试失败: {ex.Message}");
        }
    }

    private async Task TestOllamaVectorModelAsync(string baseUrl, string modelName)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/api/embeddings";
            var request = new
            {
                model = modelName,
                prompt = "测试向量化"
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));
            using var response = await _httpClient.PostAsync(url, content, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"Ollama向量模型 {modelName} 不可用: {errorContent}");
            }

            _logger.LogInformation($"Ollama向量模型 {modelName} 测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception($"Ollama向量模型 {modelName} 响应超时");
        }
        catch (Exception ex) when (!ex.Message.Contains("Ollama"))
        {
            throw new Exception($"Ollama向量模型 {modelName} 测试失败: {ex.Message}");
        }
    }

    private async Task TestLMStudioVectorModelAsync(string baseUrl, string modelName)
    {
        try
        {
            var url = baseUrl.TrimEnd('/') + "/v1/embeddings";
            var request = new
            {
                model = modelName,
                input = "测试向量化"
            };

            var json = JsonSerializer.Serialize(request);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));
            using var response = await _httpClient.PostAsync(url, content, cts.Token);

            if (!response.IsSuccessStatusCode)
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                throw new Exception($"LM Studio向量模型 {modelName} 不可用: {errorContent}");
            }

            _logger.LogInformation($"LM Studio向量模型 {modelName} 测试成功");
        }
        catch (TaskCanceledException)
        {
            throw new Exception($"LM Studio向量模型 {modelName} 响应超时");
        }
        catch (Exception ex) when (!ex.Message.Contains("LM Studio"))
        {
            throw new Exception($"LM Studio向量模型 {modelName} 测试失败: {ex.Message}");
        }
    }

    public VectorModelConfig GetDefaultConfig()
    {
        return new VectorModelConfig
        {
            Platform = "Ollama", // 默认使用Ollama平台
            Timeout = 120, // 120秒，符合API请求最佳实践
            OllamaVectorConfig = new OllamaVectorConfig
            {
                BaseUrl = "http://localhost:11434",
                SelectedModel = "text-embedding-bge-m3" // 推荐的默认向量模型
            },
            LMStudioVectorConfig = new LMStudioVectorConfig
            {
                BaseUrl = "http://50844s9656.wocp.fun:42440"
            }
        };
    }

    /// <summary>
    /// 强制创建默认向量模型配置文件
    /// </summary>
    public async Task CreateDefaultConfigFileAsync()
    {
        try
        {
            _logger.LogInformation("开始创建默认向量模型配置文件...");
            var defaultConfig = GetDefaultConfig();
            await SaveConfigAsync(defaultConfig);
            _logger.LogInformation("默认向量模型配置文件创建成功");
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "创建默认向量模型配置文件失败");
            throw;
        }
    }

    private string FormatSize(long bytes)
    {
        string[] sizes = { "B", "KB", "MB", "GB", "TB" };
        double len = bytes;
        int order = 0;
        while (len >= 1024 && order < sizes.Length - 1)
        {
            order++;
            len = len / 1024;
        }
        return $"{len:0.##} {sizes[order]}";
    }
}
