import aio_pika
from sqlmodel import Session
import json
from threading import Timer, Lock
from database.mysql import engine
from core.Config import settings
import models.admin.system.Admin as AdminModel
from core.Log import log_admin_event
from database.rabbitmq import create_rabbitmq_channel

# 全局日志缓存列表和锁
log_cache = []
log_cache_lock = Lock()
BATCH_SIZE = 500  # 批量处理阈值
FLUSH_INTERVAL = 10  # 定期清理间隔（秒）
flush_timer = None  # 用于保存定时器实例
channelName = "user_add_log"

# 全局变量
rabbitmq_connection = None
rabbitmq_channel = None

async def setup_rabbitmq():
    """在应用启动时建立 RabbitMQ 连接和频道"""
    global rabbitmq_connection, rabbitmq_channel
    rabbitmq_connection, rabbitmq_channel = await create_rabbitmq_channel()
    await start_rabbitmq_consumer()
    await start_flush_timer()

async def close_rabbitmq():
    """在应用关闭时关闭 RabbitMQ 连接"""
    global rabbitmq_connection
    if rabbitmq_connection:
        await rabbitmq_connection.close()
    if flush_timer:
        flush_timer.cancel()

async def publish_log(log_data):
    """将日志数据发送到 RabbitMQ"""
    global rabbitmq_channel
    if rabbitmq_channel is None:
        await setup_rabbitmq()
    
    await rabbitmq_channel.default_exchange.publish(
        aio_pika.Message(body=json.dumps(log_data).encode('utf-8')),
        routing_key=channelName
    )

async def get_rabbitmq_channel(queue_name):
    global rabbitmq_connection
    if rabbitmq_connection is None:
        await setup_rabbitmq()
    channel = await rabbitmq_connection.channel()
    await channel.declare_queue(queue_name, durable=True)
    return channel

def process_log_batch(log_batch):
    """批量处理日志入库"""
    with Session(engine) as session:
        try:
            session.bulk_insert_mappings(AdminModel.Admin_Record, log_batch)
            session.commit()
        except Exception as e:
            session.rollback()  # 回滚事务
            log_admin_event(f"Error during bulk insert: {str(e)}")

async def add_logs(message: aio_pika.IncomingMessage):
    """将日志数据添加到缓存，并在需要时入库"""
    log_cache.append(json.loads(message.body.decode()))
    if len(log_cache) >= BATCH_SIZE:
        process_log_batch(log_cache)
        log_cache.clear()  # 清空缓存


async def start_rabbitmq_consumer():
    channel = await get_rabbitmq_channel(channelName)
    queue = await channel.declare_queue(channelName, durable=True)
    
    # no_ack 无需确认消息
    await queue.consume(add_logs, no_ack=True) 
    log_admin_event("Started RabbitMQ Consumer for log queue.")

def flush_log_cache():
    """定期清理日志缓存"""
    global log_cache, flush_timer
    with log_cache_lock:
        if log_cache:
            # 将日志缓存中的数据批量插入数据库
            process_log_batch(log_cache)
            log_cache = []  # 清空缓存
    # 设置下次清理的定时器
    flush_timer = Timer(FLUSH_INTERVAL, flush_log_cache)
    flush_timer.start()


async def start_flush_timer():
    """启动定时器，定期执行入库操作"""
    global flush_timer
    flush_timer = Timer(FLUSH_INTERVAL, flush_log_cache)
    flush_timer.start()