using System.Text;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using RabbitMQ.Client;
using RabbitMQ.Client.Events;
using UniversalAdminSystem.Domian.Core.ValueObjects;
using UniversalAdminSystem.Domian.knowledge.Aggregates;
using UniversalAdminSystem.Domian.knowledge.IRepository;
using UniversalAdminSystem.Domian.knowledge.ValueObj;
using UniversalAdminSystem.Infrastructure.FileStorage.Parsers;
using UniversalAdminSystem.Infrastructure.RabbitMQ.Models.Messages;
using UniversalAdminSystem.Infrastructure.Services;
using UniversalAdminSystem.Application.Common.Interfaces;


namespace UniversalAdminSystem.Infrastructure.RabbitMQ.Consumers;

public class FileProcessingJobConsumer : BackgroundService
{
    private readonly IConnection _conn;
    private readonly ILogger<FileProcessingJobConsumer> _logger;
    private readonly IServiceScopeFactory _scopeFactory;

    private readonly int _consumerCount = 5;

    public FileProcessingJobConsumer(IConnection conn, ILogger<FileProcessingJobConsumer> logger, IServiceScopeFactory scopeFactory)
    {
        _conn = conn;
        _logger = logger;
        _scopeFactory = scopeFactory;
    }

    protected override async Task ExecuteAsync(CancellationToken stoppingToken)
    {
        _logger.LogInformation("正在连接到 RabbitMQ...");
        try
        {
            var consumerTasks = new List<Task>();
            for (int i = 0; i < _consumerCount; i++)
            {
                consumerTasks.Add(AssignConsumerAsync(stoppingToken, i));
            }
            await Task.WhenAll(consumerTasks);
        }
        catch (Exception ex) { _logger.LogError(ex, "文件处理任务消费失败"); }
    }

    private async Task AssignConsumerAsync(CancellationToken stoppingToken, int consumerIndex)
    {
        var exchange = "file-processing";
        var queue = "file-processing-queue";
        var routingKey = "file-processing";

        using var ch = _conn.CreateModel();
        ch.ExchangeDeclare(exchange, "topic", durable: true, autoDelete: false);
        ch.QueueDeclare(queue, durable: true, exclusive: false, autoDelete: false, arguments: null);
        ch.QueueBind(queue, exchange, routingKey);
        ch.BasicQos(0, 1, false);

        _logger.LogInformation("消费者{consumerIndex}正在启动并等待消息...", consumerIndex);

        var consumer = new AsyncEventingBasicConsumer(ch);
        consumer.Received += async (sender, ea) =>
        {
            var message = Encoding.UTF8.GetString(ea.Body.ToArray());

            _logger.LogInformation("消费者 {consumerIndex} 收到文件处理任务: {message}", consumerIndex, message);

            try
            {
                _logger.LogInformation("消费者 {consumerIndex} 收到文件处理任务: {message}", consumerIndex, message);
                var fpm = JsonSerializer.Deserialize<FileProcessingMessage>(message);

                if (fpm == null)
                {
                    _logger.LogError("消息格式错误，跳过处理: {message}", message);
                    ch.BasicNack(ea.DeliveryTag, false, false);
                    return;
                }

                if (fpm.NextAttemptAt.HasValue && fpm.NextAttemptAt > DateTime.UtcNow)
                {
                    _logger.LogInformation("等待重试，当前时间：{DateTime.UtcNow}，下一次尝试时间：{fpm.NextAttemptAt.Value}", DateTime.UtcNow, fpm.NextAttemptAt.Value);
                    await Task.Delay(fpm.NextAttemptAt.Value - DateTime.UtcNow, stoppingToken);
                }

                await ProcessMessageAsync(fpm, stoppingToken, ea, ch);

                ch.BasicAck(ea.DeliveryTag, false);
                _logger.LogInformation("消费者 {consumerIndex} 文件处理任务处理成功: {message}", consumerIndex, message);
            }
            catch (Exception ex)
            {
                var fpm = JsonSerializer.Deserialize<FileProcessingMessage>(message);
                if (fpm == null)
                {
                    ch.BasicNack(ea.DeliveryTag, false, requeue: false);
                    return;
                }
                fpm.RetryCount++;
                if (fpm.RetryCount >= 3)
                {
                    _logger.LogError("消息 {fpm.JobId} 达到最大重试次数，丢弃消息: {message}", fpm.Id, message);
                    ch.BasicNack(ea.DeliveryTag, false, false);
                    return;
                }
                var retryDelay = Math.Min(300, Math.Pow(2, fpm.RetryCount));
                fpm.NextAttemptAt = DateTime.UtcNow.AddSeconds(retryDelay);
                // 先 nack 再重新发布（都在同一个 ch）
                ch.BasicNack(ea.DeliveryTag, false, requeue: false);
                ch.BasicPublish("file-processing", "file-processing", null, Encoding.UTF8.GetBytes(JsonSerializer.Serialize(fpm)));

                _logger.LogError(ex, "消费者 {consumerIndex} 文件处理任务失败: {message}, 重试次数: {fpm.RetryCount}, 下次重试延迟: {retryDelay} 秒", consumerIndex, message, fpm.RetryCount, retryDelay);
            }
        };

        var consumerTag = ch.BasicConsume(queue: queue, autoAck: false, consumer: consumer);

        try
        {
            // 阻塞直到停止
            while (!stoppingToken.IsCancellationRequested)
            {
                await Task.Delay(1000, stoppingToken);
            }
        }
        finally
        {
            // 关闭
            try { ch.BasicCancel(consumerTag); } catch { }
            try { ch.Close(); } catch { }
        }
    }

    private async Task ProcessMessageAsync(FileProcessingMessage fpm, CancellationToken stoppingToken, BasicDeliverEventArgs ea, IModel ch)
    {
        try
        {
            if (!File.Exists(fpm.FilePath))
            {
                _logger.LogWarning("文件不存在，丢弃任务: {Path}", fpm.FilePath);
                ch.BasicAck(ea.DeliveryTag, false); // 确认消费，RabbitMQ 删除消息
                return;
            }

            // 文本提取
            _logger.LogInformation("开始处理文件: {FilePath}", fpm.FilePath);
            var parser = new DocParserFactory().GetParser(fpm.ContentType);
            var text = await parser.ParseAsync(fpm.FilePath);
            _logger.LogInformation("文件处理完成: {FilePath}", fpm.FilePath);

            using var scope = _scopeFactory.CreateScope();
            var spacy = scope.ServiceProvider.GetRequiredService<SpaCyService>();
            var k2 = scope.ServiceProvider.GetRequiredService<K2ModelService>();
            var embedding = scope.ServiceProvider.GetRequiredService<EmbeddingService>();
            var docChunkRepo = scope.ServiceProvider.GetRequiredService<IDocumentChunkRepository>();
            var work = scope.ServiceProvider.GetRequiredService<IUnitOfWork>();

            // 去重
            var exists = await docChunkRepo.ExistsByFileIdAsync(fpm.FileId);
            if (exists)
            {
                _logger.LogInformation("文件 {fpm.FileId} 已处理，跳过", fpm.FileId);
                return;
            }

            // 文本分片
            _logger.LogInformation("开始分片文件: {Text}", text);
            var preprocessResult = await spacy.AnalyzeTextAsync(text);
            var preprocessResultJson = JsonSerializer.Serialize(preprocessResult, new JsonSerializerOptions
            {
                PropertyNamingPolicy = JsonNamingPolicy.CamelCase
            });
            var chunks = await k2.SendChunkingRequestAsync(preprocessResultJson);
            _logger.LogInformation("分片完成，生成 {Count} 个 chunk", chunks.Count);

            // Embedding
            _logger.LogInformation("开始Embedding");
            var embeddings = new List<float[]>();
            embeddings.AddRange(await embedding.GetEmbeddingAsync(chunks));
            _logger.LogInformation("Embedding 完成: {Count} 个向量", embeddings.Count);
            try
            {
                await work.BeginTransactionAsync();

                // Vector Store
                _logger.LogInformation("开始Vector Store文件: {FilePath}", fpm.FilePath);
                var docChunks = new List<DocumentChunk>();
                for (int i = 0; i < chunks.Count; i++)
                {
                    var c = chunks[i];
                    var e = embeddings[i];

                    var docChunk = DocumentChunk.CreateDocumentChunk(
                        ChunkId.Create(Guid.NewGuid()),
                        fpm.FileId,
                        c,
                        TextEmbedding.Create(e)
                    );

                    docChunks.Add(docChunk);
                }

                await docChunkRepo.BulkAddDocumentChunkAsync(docChunks);
                await work.CommitAsync();

                // 文件处理完成
                _logger.LogInformation("文件处理完成: {FilePath}", fpm.FilePath);
            }
            catch (System.Exception)
            {
                await work.RollbackAsync();
                throw;
            }
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "文件处理任务失败: {Message}", fpm);
            throw;
        }
    }
}