using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using System.Net.Http;
using System.Text;
using System.Text.Json;
using DocumentCreationSystem.Models;

namespace DocumentCreationSystem.Services;

/// <summary>
/// BGE-M3向量化服务 - 支持 LM Studio (/v1/embeddings) 与 Ollama (/api/embed, 兼容 /api/embeddings)
/// </summary>
public class BGEEmbeddingService : IVectorService
{
    private readonly ILogger<BGEEmbeddingService> _logger;
    private readonly IConfiguration _configuration;
    private readonly HttpClient _httpClient;
    private readonly IVectorModelConfigService _vectorConfigService;
    private readonly Dictionary<string, VectorDocument> _vectorStore;

    // 运行期配置快照
    private string _platform = "LMStudio"; // "Ollama" | "LMStudio"
    private string _baseUrl = "http://localhost:1234";
    private string _modelName = "text-embedding-bge-m3";
    // private int? _lastDim = null; // 未使用，暂时注释以消除警告

    public BGEEmbeddingService(
        ILogger<BGEEmbeddingService> logger,
        IConfiguration configuration,
        HttpClient httpClient,
        IVectorModelConfigService vectorConfigService)
    {
        _logger = logger;
        _configuration = configuration;
        _httpClient = httpClient;
        _vectorConfigService = vectorConfigService;
        _vectorStore = new Dictionary<string, VectorDocument>();
    }

    public async Task<bool> InitializeAsync()
    {
        try
        {
            // 读取向量模型配置快照
            var cfg = await _vectorConfigService.GetConfigAsync();
            _platform = string.IsNullOrWhiteSpace(cfg.Platform) ? "LMStudio" : cfg.Platform;
            if (string.Equals(_platform, "Ollama", StringComparison.OrdinalIgnoreCase))
            {
                _baseUrl = cfg.OllamaVectorConfig.BaseUrl?.TrimEnd('/') ?? "http://localhost:11434";
                _modelName = string.IsNullOrWhiteSpace(cfg.OllamaVectorConfig.SelectedModel) ? "text-embedding-bge-m3" : cfg.OllamaVectorConfig.SelectedModel;
            }
            else
            {
                _baseUrl = cfg.LMStudioVectorConfig.BaseUrl?.TrimEnd('/') ?? "http://localhost:1234";
                _modelName = string.IsNullOrWhiteSpace(cfg.LMStudioVectorConfig.SelectedModel) ? "text-embedding-bge-m3" : cfg.LMStudioVectorConfig.SelectedModel;
                _platform = "LMStudio";
            }

            var ok = await CheckConnectionAsync();
            if (!ok)
            {
                _logger.LogWarning("向量服务({Platform})连接失败，尝试自动检测可用向量模型并重试", _platform);
                await TryAutoDetectAndRetryAsync();
            }
            else
            {
                _logger.LogInformation("向量服务({Platform})初始化成功，BaseUrl={BaseUrl}，Model={Model}", _platform, _baseUrl, _modelName);
            }
            return true;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "BGE-M3向量化服务初始化失败");
            return false;
        }
    }

    public async Task<float[]> EmbedTextAsync(string text)
    {
        try
        {
            float[]? embedding = null;
            if (string.Equals(_platform, "Ollama", StringComparison.OrdinalIgnoreCase))
            {
                embedding = await GetOllamaEmbeddingAsync(text);
                if (embedding == null)
                {
                    // 兼容旧版 /api/embeddings
                    embedding = await GetOllamaEmbeddingsLegacyAsync(text);
                }
            }
            else
            {
                embedding = await GetLMStudioEmbeddingAsync(text);
            }

            if (embedding != null)
            {
                return NormalizeEmbedding(embedding);
            }

            _logger.LogWarning("向量化失败（{Platform}），使用备用方法", _platform);
            return GenerateFallbackEmbedding(text);
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "文本向量化失败");
            return GenerateFallbackEmbedding(text);
        }
    }

        /// <summary>
        /// 归一化工具
        /// </summary>
        private static float[] NormalizeEmbedding(float[] embedding)
        {
            if (embedding == null || embedding.Length == 0) return Array.Empty<float>();
            double sumSq = 0.0;
            for (int i = 0; i < embedding.Length; i++) sumSq += (double)embedding[i] * embedding[i];
            var norm = Math.Sqrt(sumSq);
            if (norm <= 1e-12) return embedding.ToArray();
            var res = new float[embedding.Length];
            var inv = (float)(1.0 / norm);
            for (int i = 0; i < embedding.Length; i++) res[i] = embedding[i] * inv;
            return res;
        }


    /// <summary>
    /// 使用 LM Studio 的 /v1/embeddings 生成向量（OpenAI 兼容）
    /// </summary>
    private async Task<float[]?> GetLMStudioEmbeddingAsync(string text)
    {
        try
        {
            var url = $"{_baseUrl}/v1/embeddings";
            var requestBody = new
            {
                model = string.IsNullOrWhiteSpace(_modelName) ? "text-embedding-bge-m3" : _modelName,
                input = text,
                encoding_format = "float"
            };

            var json = JsonSerializer.Serialize(requestBody);
            var content = new StringContent(json, Encoding.UTF8, "application/json");

            var response = await _httpClient.PostAsync(url, content);
            if (response.IsSuccessStatusCode)
            {
                var responseJson = await response.Content.ReadAsStringAsync();
                var embeddingResponse = JsonSerializer.Deserialize<EmbeddingResponse>(responseJson);
                if (embeddingResponse?.data?.Length > 0 && embeddingResponse.data[0].embedding != null)
                {
                    return NormalizeEmbedding(embeddingResponse.data[0].embedding);
                }
            }
            else
            {
                _logger.LogWarning($"LM Studio embeddings API 请求失败: {response.StatusCode}");
            }
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "调用 LM Studio embeddings API 失败");
        }
        return null;
    }

    /// <summary>
    /// 使用 Ollama 新版 /api/embed 生成向量
    /// </summary>
    private async Task<float[]?> GetOllamaEmbeddingAsync(string text)
    {
        try
        {
            var url = $"{_baseUrl}/api/embed";
            var requestBody = new
            {
                model = string.IsNullOrWhiteSpace(_modelName) ? "text-embedding-bge-m3" : _modelName,
                input = text
            };
            var json = JsonSerializer.Serialize(requestBody);
            var content = new StringContent(json, Encoding.UTF8, "application/json");
            var response = await _httpClient.PostAsync(url, content);
            if (response.IsSuccessStatusCode)
            {
                var responseJson = await response.Content.ReadAsStringAsync();
                var doc = JsonDocument.Parse(responseJson).RootElement;
                // 期望：{"model":"...","embeddings":[[...]], ...}
                if (doc.TryGetProperty("embeddings", out var embeddingsEl) && embeddingsEl.ValueKind == JsonValueKind.Array)
                {
                    // 只取第一条
                    var first = embeddingsEl.EnumerateArray().FirstOrDefault();
                    if (first.ValueKind == JsonValueKind.Array)
                    {
                        var list = new List<float>();
                        foreach (var num in first.EnumerateArray())
                        {
                            if (num.TryGetSingle(out var f)) list.Add(f);
                            else if (num.TryGetDouble(out var d)) list.Add((float)d);
                        }
                        return NormalizeEmbedding(list.ToArray());
                    }
                }
            }
            else
            {
                _logger.LogWarning($"Ollama /api/embed 请求失败: {response.StatusCode}");
            }
        }
        catch (Exception ex)
        {
            _logger.LogWarning(ex, "调用 Ollama /api/embed 失败，将尝试兼容旧版 /api/embeddings");
        }
        return null;
    }

    /// <summary>
    /// 使用 Ollama 旧版 /api/embeddings 生成向量（注意：返回字段为 embedding）
    /// </summary>
    private async Task<float[]?> GetOllamaEmbeddingsLegacyAsync(string text)
    {
        try
        {
            var url = $"{_baseUrl}/api/embeddings";
            var requestBody = new
            {
                model = string.IsNullOrWhiteSpace(_modelName) ? "text-embedding-bge-m3" : _modelName,
                prompt = text
            };
            var json = JsonSerializer.Serialize(requestBody);
            var content = new StringContent(json, Encoding.UTF8, "application/json");
            var response = await _httpClient.PostAsync(url, content);
            if (response.IsSuccessStatusCode)
            {
                var responseJson = await response.Content.ReadAsStringAsync();
                var doc = JsonDocument.Parse(responseJson).RootElement;
                if (doc.TryGetProperty("embedding", out var embEl) && embEl.ValueKind == JsonValueKind.Array)
                {
                    var list = new List<float>();
                    foreach (var num in embEl.EnumerateArray())
                    {
                        if (num.TryGetSingle(out var f)) list.Add(f);
                        else if (num.TryGetDouble(out var d)) list.Add((float)d);
                    }
                    return NormalizeEmbedding(list.ToArray());
                }
            }
            else
            {
                _logger.LogWarning($"Ollama /api/embeddings 请求失败: {response.StatusCode}");
            }
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "调用 Ollama /api/embeddings 失败");
        }
        return null;
    }

    /// <summary>
    /// 备用向量化方法
    /// </summary>
    private float[] GenerateFallbackEmbedding(string text)
    {
        const int vectorSize = 1024; // BGE-M3的向量维度
        var vector = new float[vectorSize];
        var hash = text.GetHashCode();
        var random = new Random(hash);

        // 基于文本特征生成确定性向量
        var words = text.Split(' ', StringSplitOptions.RemoveEmptyEntries);
        var wordCount = words.Length;
        var avgWordLength = words.Any() ? words.Average(w => w.Length) : 0;
        var uniqueWords = words.Distinct().Count();
        var chineseChars = text.Count(c => c >= 0x4e00 && c <= 0x9fff);

        for (int i = 0; i < vectorSize; i++)
        {

            // 结合多种文本特征
            var baseValue = (float)(random.NextDouble() * 2 - 1);
            var textFeature = (float)Math.Sin(i * avgWordLength / 100.0) * 0.3f;
            var lengthFeature = (float)Math.Cos(i * wordCount / 50.0) * 0.2f;
            var uniqueFeature = (float)Math.Sin(i * uniqueWords / 30.0) * 0.1f;
            var chineseFeature = (float)Math.Cos(i * chineseChars / 20.0) * 0.15f;

            vector[i] = baseValue + textFeature + lengthFeature + uniqueFeature + chineseFeature;
        }

        // 归一化向量
        var magnitude = Math.Sqrt(vector.Sum(x => x * x));
        if (magnitude > 0)
        {
            for (int i = 0; i < vectorSize; i++)
            {
                vector[i] = (float)(vector[i] / magnitude);
            }
        }

        return vector;
    }

    public async Task<string> AddDocumentAsync(string vectorId, string content, Dictionary<string, object>? metadata = null)
    {
        try
        {
            var chunks = SplitTextIntoChunks(content, 500, 50);
            var embedding = await EmbedTextAsync(content);

            var vectorDoc = new VectorDocument
            {
                Id = vectorId,
                DocumentId = 0,
                Content = content,
                Chunks = chunks,
                Metadata = metadata ?? new Dictionary<string, object>(),
                CreatedAt = DateTime.Now,
                Embedding = embedding
            };

            _vectorStore[vectorId] = vectorDoc;
            _logger.LogInformation($"添加文档向量成功，向量ID: {vectorId}");
            return vectorId;
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, $"添加文档向量失败，向量ID: {vectorId}");
            throw;
        }
    }

    public async Task<List<VectorSearchResult>> SearchAsync(string query, string collectionName = "documents", int limit = 10, float threshold = 0.7f)
    {
        try
        {
            var queryEmbedding = await EmbedTextAsync(query);
            var results = new List<VectorSearchResult>();

            foreach (var kvp in _vectorStore)
            {
                var vectorDoc = kvp.Value;
                if (vectorDoc.Embedding != null)
                {
                    var similarity = CalculateCosineSimilarity(queryEmbedding, vectorDoc.Embedding);

                    if (similarity >= threshold)
                    {
                        results.Add(new VectorSearchResult
                        {
                            Id = vectorDoc.Id,
                            Score = similarity,
                            Content = vectorDoc.Content.Length > 200 ?
                                vectorDoc.Content.Substring(0, 200) + "..." :
                                vectorDoc.Content,
                            DocumentId = vectorDoc.DocumentId,
                            ChunkIndex = 0,
                            Metadata = vectorDoc.Metadata
                        });
                    }
                }
            }

            return results.OrderByDescending(r => r.Score).Take(limit).ToList();
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, $"向量搜索失败，查询: {query}");
            throw;
        }
    }

    /// <summary>
    /// 计算余弦相似度
    /// </summary>
    private float CalculateCosineSimilarity(float[] vector1, float[] vector2)
    {
        if (vector1.Length != vector2.Length)
            return 0f;

        var dotProduct = 0f;
        var magnitude1 = 0f;
        var magnitude2 = 0f;

        for (int i = 0; i < vector1.Length; i++)
        {
            dotProduct += vector1[i] * vector2[i];
            magnitude1 += vector1[i] * vector1[i];
            magnitude2 += vector2[i] * vector2[i];
        }

        magnitude1 = (float)Math.Sqrt(magnitude1);
        magnitude2 = (float)Math.Sqrt(magnitude2);

        if (magnitude1 == 0f || magnitude2 == 0f)
            return 0f;

        return dotProduct / (magnitude1 * magnitude2);
    }

    public List<string> SplitTextIntoChunks(string text, int chunkSize = 500, int overlap = 50)
    {
        var chunks = new List<string>();
        var sentences = text.Split(new[] { '。', '！', '？', '\n' }, StringSplitOptions.RemoveEmptyEntries);

        var currentChunk = new StringBuilder();
        var currentLength = 0;

        foreach (var sentence in sentences)
        {
            if (currentLength + sentence.Length > chunkSize && currentChunk.Length > 0)
            {
                chunks.Add(currentChunk.ToString().Trim());

                // 保留重叠部分
                var overlapText = GetLastCharacters(currentChunk.ToString(), overlap);
                currentChunk.Clear();
                currentChunk.Append(overlapText);
                currentLength = overlapText.Length;
            }

            currentChunk.Append(sentence + "。");
            currentLength += sentence.Length + 1;
        }

        if (currentChunk.Length > 0)
        {
            chunks.Add(currentChunk.ToString().Trim());
        }

        return chunks;
    }

    public async Task<bool> CheckConnectionAsync()
    {
        try
        {
            var testText = "测试连接";
            float[]? embedding = null;
            if (string.Equals(_platform, "Ollama", StringComparison.OrdinalIgnoreCase))
            {
                embedding = await GetOllamaEmbeddingAsync(testText) ?? await GetOllamaEmbeddingsLegacyAsync(testText);
            }
            else
            {
                embedding = await GetLMStudioEmbeddingAsync(testText);
            }
            return embedding != null && embedding.Length > 0;
        }
        catch
        {
            return false;
        }
    }

    private string GetLastCharacters(string text, int count)
    {
        if (text.Length <= count) return text;
        return text.Substring(text.Length - count);
    }

    // 实现其他接口方法的简化版本
    public async Task<bool> CreateCollectionAsync(string collectionName, int vectorSize = 1024) => true;
    public async Task<bool> DeleteCollectionAsync(string collectionName) => true;
    public async Task<List<VectorRecord>> AddDocumentVectorsAsync(int documentId, List<string> textChunks) => new();
    public async Task<bool> UpdateDocumentVectorsAsync(int documentId, List<string> textChunks) => true;
    public async Task<bool> DeleteDocumentVectorsAsync(int documentId) => true;
    public async Task<List<string>> GetRelevantContextAsync(string query, int? projectId = null, int limit = 5) => new();
    public async Task<VectorCollectionInfo?> GetCollectionInfoAsync(string collectionName) => null;
    public async Task<bool> RebuildIndexAsync(int projectId) => true;

    /// <summary>
    /// 连接失败时自动检测可用向量模型并重试一次
    /// </summary>
    private async Task TryAutoDetectAndRetryAsync()
    {
        try
        {
            if (string.Equals(_platform, "Ollama", StringComparison.OrdinalIgnoreCase))
            {
                // 访问 /api/tags 找出包含 bge/embedding 的模型
                var tagsUrl = _baseUrl.TrimEnd('/') + "/api/tags";
                var resp = await _httpClient.GetAsync(tagsUrl);
                if (resp.IsSuccessStatusCode)
                {
                    var json = await resp.Content.ReadAsStringAsync();
                    using var doc = JsonDocument.Parse(json);
                    var candidates = new List<string>();
                    if (doc.RootElement.TryGetProperty("models", out var models) && models.ValueKind == JsonValueKind.Array)
                    {
                        foreach (var m in models.EnumerateArray())
                        {
                            var name = m.GetProperty("name").GetString() ?? string.Empty;
                            if (IsVectorName(name)) candidates.Add(name);
                        }
                    }
                    if (candidates.Count > 0)
                    {
                        _modelName = candidates.First();
                        _logger.LogInformation("自动选择可用Ollama向量模型: {Model}", _modelName);
                        if (await CheckConnectionAsync()) return;
                    }
                }
            }
            else
            {
                // 访问 /v1/models 找出包含 embedding/bge 的模型
                var modelsUrl = _baseUrl.TrimEnd('/') + "/v1/models";
                var resp = await _httpClient.GetAsync(modelsUrl);
                if (resp.IsSuccessStatusCode)
                {
                    var json = await resp.Content.ReadAsStringAsync();
                    using var doc = JsonDocument.Parse(json);
                    var candidates = new List<string>();
                    if (doc.RootElement.TryGetProperty("data", out var data) && data.ValueKind == JsonValueKind.Array)
                    {
                        foreach (var d in data.EnumerateArray())
                        {
                            var id = d.GetProperty("id").GetString() ?? string.Empty;
                            if (IsVectorName(id)) candidates.Add(id);
                        }
                    }
                    if (candidates.Count > 0)
                    {
                        _modelName = candidates.First();
                        _logger.LogInformation("自动选择可用LM Studio向量模型: {Model}", _modelName);
                        if (await CheckConnectionAsync()) return;
                    }
                }
            }

            _logger.LogWarning("未能自动修复向量服务连接，后续将使用备用向量化");
        }
        catch (Exception ex)
        {
            _logger.LogWarning(ex, "自动检测可用向量模型失败");
        }
    }

    private static bool IsVectorName(string name)
    {
        var n = name.ToLowerInvariant();
        return n.Contains("embedding") || n.Contains("embed") || n.Contains("vector") || n.Contains("bge") || n.Contains("nomic") || n.Contains("mxbai") || n.Contains("minilm");
    }
}

/// <summary>
/// BGE-M3 API响应模型
/// </summary>
public class EmbeddingResponse
{
    public EmbeddingData[]? data { get; set; }
    public string? model { get; set; }
    public Usage? usage { get; set; }
}

public class EmbeddingData
{
    public float[]? embedding { get; set; }
    public int index { get; set; }
    public string? @object { get; set; }
}

public class Usage
{
    public int prompt_tokens { get; set; }
    public int total_tokens { get; set; }
}


