using System.Windows;
using System.Windows.Controls;
using DocumentCreationSystem.Models;
using DocumentCreationSystem.Services;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using System.Text.Json;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Threading;
using System;

namespace DocumentCreationSystem.Views;

/// <summary>
/// 向量模型配置窗口
/// </summary>
public partial class VectorModelConfigWindow : Window
{
    private readonly IServiceProvider _serviceProvider;
    private readonly ILogger<VectorModelConfigWindow> _logger;
    private readonly IConfiguration _configuration;
    private readonly HttpClient _httpClient;
    private VectorModelConfig _currentConfig = null!;

    public VectorModelConfig? Result { get; private set; }

    public VectorModelConfigWindow(IServiceProvider serviceProvider)
    {
        try
        {
            InitializeComponent();

            _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider));
            _logger = serviceProvider.GetRequiredService<ILogger<VectorModelConfigWindow>>();
            _configuration = serviceProvider.GetRequiredService<IConfiguration>();
            _httpClient = new HttpClient();
            _httpClient.Timeout = TimeSpan.FromSeconds(10);

            this.Loaded += async (sender, e) => await SafeInitializeAsync();

            _logger.LogInformation("向量模型配置窗口构造函数完成");
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, "初始化向量模型配置窗口失败");
            MessageBox.Show($"初始化向量模型配置窗口失败：{ex.Message}", "错误", MessageBoxButton.OK, MessageBoxImage.Error);
            throw;
        }
    }

    private async Task SafeInitializeAsync()
    {
        try
        {
            UpdateStatus("正在加载配置...");
            _currentConfig = await Task.Run(() => LoadCurrentConfig());
            await Dispatcher.InvokeAsync(() => InitializeUI());
            UpdateStatus("配置加载完成");
            _logger.LogInformation("向量模型配置窗口初始化成功");
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, "安全初始化失败");
            UpdateStatus($"初始化失败: {ex.Message}");
            MessageBox.Show($"配置加载失败：{ex.Message}", "错误", MessageBoxButton.OK, MessageBoxImage.Error);
        }
    }

    private bool ValidateUIElements()
    {
        try
        {
            if (TimeoutTextBox == null || EnableVectorCacheCheckBox == null)
                return false;
            if (OllamaRadio == null || LMStudioRadio == null)
                return false;
            if (StatusText == null)
                return false;
            return true;
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, "验证UI元素时发生错误");
            return false;
        }
    }

    private VectorModelConfig CreateDefaultConfig()
    {
        return new VectorModelConfig
        {
            Platform = "Ollama",
            Timeout = 30,
            OllamaVectorConfig = new OllamaVectorConfig { BaseUrl = "http://localhost:11434" },
            LMStudioVectorConfig = new LMStudioVectorConfig { BaseUrl = "http://localhost:1234" }
        };
    }

    private VectorModelConfig LoadCurrentConfig()
    {
        try
        {
            var configService = _serviceProvider.GetRequiredService<IVectorModelConfigService>();
            var savedConfig = configService.GetConfigAsync().Result;

            if (savedConfig != null)
            {
                _logger?.LogInformation("从保存的配置文件加载向量模型配置");
                return savedConfig;
            }
        }
        catch (Exception ex)
        {
            _logger?.LogWarning(ex, "加载保存的向量模型配置失败，使用默认配置");
        }

        try
        {
            var config = new VectorModelConfig
            {
                Platform = _configuration?["Vector:DefaultProvider"] ?? "Ollama",
                Timeout = 30
            };

            if (int.TryParse(_configuration?["Vector:Timeout"], out int timeout))
            {
                config.Timeout = timeout;
            }

            config.OllamaVectorConfig = new OllamaVectorConfig
            {
                BaseUrl = _configuration?["Vector:Ollama:BaseUrl"] ?? "http://localhost:11434",
                SelectedModel = _configuration?["Vector:Ollama:DefaultModel"] ?? "text-embedding-bge-m3"
            };

            config.LMStudioVectorConfig = new LMStudioVectorConfig
            {
                BaseUrl = _configuration?["Vector:LMStudio:BaseUrl"] ?? "http://localhost:1234",
                SelectedModel = _configuration?["Vector:LMStudio:DefaultModel"] ?? ""
            };

            return config;
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, "加载向量模型配置失败");
            return CreateDefaultConfig();
        }
    }

    private void InitializeUI()
    {
        try
        {
            if (_currentConfig == null)
            {
                _logger?.LogWarning("配置为空，使用默认配置");
                _currentConfig = CreateDefaultConfig();
            }

            if (!ValidateUIElements())
            {
                _logger?.LogWarning("UI元素尚未完全初始化，延迟重试");
                Dispatcher.BeginInvoke(new Action(() => InitializeUI()), DispatcherPriority.Loaded);
                return;
            }

            TimeoutTextBox.Text = _currentConfig.Timeout.ToString();
            EnableVectorCacheCheckBox.IsChecked = true;

            if (_currentConfig.OllamaVectorConfig != null && OllamaUrlTextBox != null)
            {
                OllamaUrlTextBox.Text = _currentConfig.OllamaVectorConfig.BaseUrl ?? "http://localhost:11434";
            }
            else if (OllamaUrlTextBox != null)
            {
                OllamaUrlTextBox.Text = "http://localhost:11434";
            }

            if (_currentConfig.LMStudioVectorConfig != null && LMStudioUrlTextBox != null)
            {
                LMStudioUrlTextBox.Text = _currentConfig.LMStudioVectorConfig.BaseUrl ?? "http://localhost:1234";
            }
            else if (LMStudioUrlTextBox != null)
            {
                LMStudioUrlTextBox.Text = "http://localhost:1234";
            }

            if (OllamaRadio != null && LMStudioRadio != null)
            {
                SetPlatformSelection(_currentConfig.Platform ?? "Ollama");
            }

            UpdateStatus("配置已加载");
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, "初始化UI失败");
            UpdateStatus($"初始化UI失败: {ex.Message}");
        }
    }

    private void SetPlatformSelection(string platform)
    {
        try
        {
            if (string.IsNullOrEmpty(platform))
                platform = "Ollama";

            if (OllamaRadio == null || LMStudioRadio == null)
            {
                _logger?.LogWarning("平台选择UI元素尚未初始化");
                return;
            }

            switch (platform.ToLower())
            {
                case "ollama":
                    OllamaRadio.IsChecked = true;
                    break;
                case "lmstudio":
                    LMStudioRadio.IsChecked = true;
                    break;
                default:
                    OllamaRadio.IsChecked = true;
                    break;
            }

            UpdatePlatformVisibility();
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, $"设置平台选择失败: {platform}");
        }
    }

    private void UpdatePlatformVisibility()
    {
        try
        {
            if (OllamaConfig == null || LMStudioConfig == null ||
                PlatformTitle == null || PlatformDescription == null)
            {
                return;
            }

            OllamaConfig.Visibility = Visibility.Collapsed;
            LMStudioConfig.Visibility = Visibility.Collapsed;

            if (OllamaRadio?.IsChecked == true)
            {
                PlatformTitle.Text = "Ollama 向量模型配置";
                PlatformDescription.Text = "本地开源向量模型平台，用于文档向量化和语义搜索";
                OllamaConfig.Visibility = Visibility.Visible;
            }
            else if (LMStudioRadio?.IsChecked == true)
            {
                PlatformTitle.Text = "LM Studio 向量模型配置";
                PlatformDescription.Text = "本地向量模型运行环境，提供OpenAI兼容的向量API";
                LMStudioConfig.Visibility = Visibility.Visible;
            }
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, "更新平台可见性失败");
        }
    }

    private void Platform_Checked(object sender, RoutedEventArgs e)
    {
        UpdatePlatformVisibility();
    }

    private async void DetectModels_Click(object sender, RoutedEventArgs e)
    {
        try
        {
            UpdateStatus("正在检测向量模型...", true);

            if (OllamaRadio.IsChecked == true)
            {
                await DetectOllamaVectorModels();
            }
            else if (LMStudioRadio.IsChecked == true)
            {
                await DetectLMStudioVectorModels();
            }

            UpdateStatus("向量模型检测完成", false);
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "检测向量模型失败");
            UpdateStatus($"检测失败: {ex.Message}", false);
        }
    }

    private async Task DetectOllamaVectorModels()
    {
        var url = OllamaUrlTextBox.Text.TrimEnd('/') + "/api/tags";

        try
        {
            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
            var response = await _httpClient.GetStringAsync(url, cts.Token);

            var options = new JsonSerializerOptions
            {
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                PropertyNameCaseInsensitive = true
            };

            var result = JsonSerializer.Deserialize<VectorOllamaModelsResponse>(response, options);

            OllamaModelComboBox.Items.Clear();

            if (result?.Models != null)
            {
                foreach (var model in result.Models)
                {
                    if (IsVectorModel(model.Name))
                    {
                        var aiModel = new AIModel
                        {
                            Id = model.Name,
                            Name = $"{model.Name} ({FormatSize(model.Size)})",
                            Provider = "Ollama",
                            IsAvailable = true
                        };
                        OllamaModelComboBox.Items.Add(aiModel);
                    }
                }

                if (OllamaModelComboBox.Items.Count > 0)
                {
                    OllamaModelComboBox.SelectedIndex = 0;
                }
            }
        }
        catch (Exception ex)
        {
            throw new Exception($"无法连接到Ollama服务: {ex.Message}");
        }
    }

    private async Task DetectLMStudioVectorModels()
    {
        var url = LMStudioUrlTextBox.Text.TrimEnd('/') + "/v1/models";

        try
        {
            using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
            var response = await _httpClient.GetStringAsync(url, cts.Token);

            var options = new JsonSerializerOptions
            {
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
                PropertyNameCaseInsensitive = true
            };

            var result = JsonSerializer.Deserialize<VectorLMStudioModelsResponse>(response, options);

            LMStudioModelComboBox.Items.Clear();

            if (result?.Data != null)
            {
                foreach (var model in result.Data)
                {
                    if (IsVectorModel(model.Id))
                    {
                        var aiModel = new AIModel
                        {
                            Id = model.Id,
                            Name = $"{model.Id} ({model.OwnedBy})",
                            Provider = "LMStudio",
                            IsAvailable = true
                        };
                        LMStudioModelComboBox.Items.Add(aiModel);
                    }
                }

                if (LMStudioModelComboBox.Items.Count > 0)
                {
                    LMStudioModelComboBox.SelectedIndex = 0;
                }
            }
        }
        catch (Exception ex)
        {
            throw new Exception($"无法连接到LM Studio服务: {ex.Message}");
        }
    }

    private bool IsVectorModel(string modelName)
    {
        var vectorKeywords = new[] { "embedding", "embed", "vector", "bge", "nomic", "mxbai", "minilm" };
        return vectorKeywords.Any(keyword => modelName.ToLower().Contains(keyword));
    }

    private async void TestConnection_Click(object sender, RoutedEventArgs e)
    {
        try
        {
            string platformName = "";
            string modelName = "";

            if (OllamaRadio.IsChecked == true)
            {
                platformName = "Ollama";
                modelName = (OllamaModelComboBox?.SelectedItem as AIModel)?.Name ?? "未选择模型";
            }
            else if (LMStudioRadio.IsChecked == true)
            {
                platformName = "LMStudio";
                modelName = (LMStudioModelComboBox?.SelectedItem as AIModel)?.Name ?? "未选择模型";
            }

            UpdateStatus($"正在测试 {platformName} - {modelName} 连接...", true);
            TestConnectionButton.IsEnabled = false;

            var testConfig = BuildConfigFromUI();
            var configService = _serviceProvider.GetRequiredService<IVectorModelConfigService>();
            await configService.TestVectorConnectionAsync(testConfig);

            UpdateStatus($"{platformName} - {modelName} 连接测试成功！正在保存配置...", true);
            await AutoSaveConfigAfterTest();
            UpdateStatus($"{platformName} - {modelName} 配置已保存并应用", false);
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "向量模型连接测试失败");
            UpdateStatus($"连接失败: {ex.Message}", false);
        }
        finally
        {
            TestConnectionButton.IsEnabled = true;
        }
    }

    private async Task AutoSaveConfigAfterTest()
    {
        try
        {
            var config = BuildConfigFromUI();
            var configService = _serviceProvider.GetRequiredService<IVectorModelConfigService>();
            await configService.SaveConfigAsync(config);
            Result = config;

            var result = MessageBox.Show(
                "向量模型连接测试成功！配置已自动保存并应用。\n\n是否关闭配置窗口？",
                "测试成功",
                MessageBoxButton.YesNo,
                MessageBoxImage.Information);

            if (result == MessageBoxResult.Yes)
            {
                DialogResult = true;
                Close();
            }
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "自动保存向量模型配置失败");
            UpdateStatus($"保存配置失败: {ex.Message}");
            throw;
        }
    }

    private void RefreshModels_Click(object sender, RoutedEventArgs e)
    {
        DetectModels_Click(sender, e);
    }

    private void Save_Click(object sender, RoutedEventArgs e)
    {
        try
        {
            Result = BuildConfigFromUI();
            DialogResult = true;
            Close();
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "保存向量模型配置失败");
            UpdateStatus($"保存失败: {ex.Message}");
        }
    }

    private void Cancel_Click(object sender, RoutedEventArgs e)
    {
        DialogResult = false;
        Close();
    }

    private VectorModelConfig BuildConfigFromUI()
    {
        var config = new VectorModelConfig();

        if (!int.TryParse(TimeoutTextBox.Text, out int timeout) || timeout < 5 || timeout > 300)
        {
            throw new ArgumentException("超时时间必须在5-300秒之间");
        }

        config.Timeout = timeout;

        config.OllamaVectorConfig = new OllamaVectorConfig
        {
            BaseUrl = OllamaUrlTextBox?.Text?.Trim() ?? "http://localhost:11434",
            SelectedModel = (OllamaModelComboBox?.SelectedItem as AIModel)?.Id ?? ""
        };

        config.LMStudioVectorConfig = new LMStudioVectorConfig
        {
            BaseUrl = LMStudioUrlTextBox?.Text?.Trim() ?? "http://localhost:1234",
            SelectedModel = GetLMStudioSelectedModel()
        };

        if (OllamaRadio?.IsChecked == true)
        {
            if (string.IsNullOrWhiteSpace(config.OllamaVectorConfig.BaseUrl))
                throw new ArgumentException("请输入Ollama服务地址");
            if (!Uri.TryCreate(config.OllamaVectorConfig.BaseUrl, UriKind.Absolute, out _))
                throw new ArgumentException("Ollama服务地址格式不正确");

            config.Platform = "Ollama";
        }
        else if (LMStudioRadio?.IsChecked == true)
        {
            if (string.IsNullOrWhiteSpace(config.LMStudioVectorConfig.BaseUrl))
                throw new ArgumentException("请输入LM Studio服务地址");
            if (!Uri.TryCreate(config.LMStudioVectorConfig.BaseUrl, UriKind.Absolute, out _))
                throw new ArgumentException("LM Studio服务地址格式不正确");

            config.Platform = "LMStudio";
        }
        else
        {
            throw new ArgumentException("请选择一个向量模型平台");
        }

        return config;
    }

    private string GetLMStudioSelectedModel()
    {
        var selectedFromComboBox = (LMStudioModelComboBox?.SelectedItem as AIModel)?.Id;
        if (!string.IsNullOrEmpty(selectedFromComboBox))
        {
            return selectedFromComboBox;
        }

        var configuredModel = _currentConfig?.LMStudioVectorConfig?.SelectedModel;
        if (!string.IsNullOrEmpty(configuredModel))
        {
            return configuredModel;
        }

        return "";
    }

    private void UpdateStatus(string message, bool showProgress = false)
    {
        try
        {
            if (StatusText != null)
            {
                StatusText.Text = message ?? "就绪";
            }

            if (StatusProgressBar != null)
            {
                StatusProgressBar.Visibility = showProgress ? Visibility.Visible : Visibility.Collapsed;
                StatusProgressBar.IsIndeterminate = showProgress;
            }
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, "更新状态失败");
        }
    }

    private string FormatSize(long bytes)
    {
        string[] sizes = { "B", "KB", "MB", "GB", "TB" };
        double len = bytes;
        int order = 0;
        while (len >= 1024 && order < sizes.Length - 1)
        {
            order++;
            len = len / 1024;
        }
        return $"{len:0.##} {sizes[order]}";
    }

    private async void DownloadModel_Click(object sender, RoutedEventArgs e)
    {
        if (sender is Button button && button.Tag is string modelName)
        {
            await DownloadOllamaVectorModelAsync(modelName);
        }
    }

    private async void DownloadCustomModel_Click(object sender, RoutedEventArgs e)
    {
        var modelName = CustomModelTextBox.Text?.Trim();
        if (string.IsNullOrEmpty(modelName))
        {
            UpdateStatus("请输入向量模型名称", false);
            return;
        }

        await DownloadOllamaVectorModelAsync(modelName);
    }

    private async Task DownloadOllamaVectorModelAsync(string modelName)
    {
        try
        {
            UpdateStatus($"正在下载向量模型: {modelName}...", true);
            DownloadStatusText.Text = $"正在下载 {modelName}...";
            DownloadProgressBar.Visibility = Visibility.Visible;
            DownloadProgressBar.IsIndeterminate = true;

            var ollamaUrl = OllamaUrlTextBox.Text?.Trim() ?? "http://localhost:11434";
            var pullUrl = $"{ollamaUrl}/api/pull";

            var requestBody = new { name = modelName };
            var json = JsonSerializer.Serialize(requestBody);
            var content = new StringContent(json, System.Text.Encoding.UTF8, "application/json");

            using var httpClient = new HttpClient();
            httpClient.Timeout = TimeSpan.FromMinutes(30);

            var response = await httpClient.PostAsync(pullUrl, content);

            if (response.IsSuccessStatusCode)
            {
                UpdateStatus($"向量模型 {modelName} 下载成功！", false);
                DownloadStatusText.Text = $"向量模型 {modelName} 下载完成";
                DetectModels_Click(null!, null!);
            }
            else
            {
                var errorContent = await response.Content.ReadAsStringAsync();
                UpdateStatus($"下载失败: {response.StatusCode}", false);
                DownloadStatusText.Text = $"下载失败: {errorContent}";
            }
        }
        catch (Exception ex)
        {
            _logger?.LogError(ex, $"下载向量模型失败: {modelName}");
            UpdateStatus($"下载失败: {ex.Message}", false);
            DownloadStatusText.Text = $"下载失败: {ex.Message}";
        }
        finally
        {
            DownloadProgressBar.Visibility = Visibility.Collapsed;
            DownloadProgressBar.IsIndeterminate = false;
        }
    }
}

// 响应模型类
public class VectorOllamaModelsResponse
{
    public List<VectorOllamaModelInfo> Models { get; set; } = new();
}

public class VectorOllamaModelInfo
{
    public string Name { get; set; } = "";
    public long Size { get; set; }
    public DateTime ModifiedAt { get; set; }
}

public class VectorLMStudioModelsResponse
{
    public List<VectorLMStudioModelInfo> Data { get; set; } = new();
}

public class VectorLMStudioModelInfo
{
    public string Id { get; set; } = "";
    public string Object { get; set; } = "";
    public long Created { get; set; }
    public string OwnedBy { get; set; } = "";
}
