import asyncio
import redis.asyncio as aioredis
from sqlmodel import  Session
import json

from database.mysql import engine
from core.Config import settings
import models.admin.system.Admin as AdminModel
from core.Log import log_admin_event

REDIS_QUEUE = "LOG_REDIS_QUEUE"
BATCH_SIZE = 10  # 每批处理的日志数量
SLEEP_TIME = 3  # 没有日志时的休眠时间
MAX_WAIT_TIME = 5  # 最大等待时间，如果超过这个时间未达到批量大小，也会处理当前批次日志
LOG_REDIS = None
LOG_REDIS_POOL= None
async def create_log_queue():
    """Create a log queue in Redis"""
    global LOG_REDIS, LOG_REDIS_POOL
    LOG_REDIS_POOL= aioredis.ConnectionPool.from_url(
        f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}",
        db=settings.REDIS_QUEUE_DB,
        encoding='utf-8',
        decode_responses=True
    )
    LOG_REDIS = aioredis.Redis(connection_pool=LOG_REDIS_POOL)
    # 启动消费者
    asyncio.create_task(log_consumer())
    # await consumer_task

async def close_log_queue():
    """Close the log queue in Redis"""
    global LOG_REDIS
    if LOG_REDIS:
        await LOG_REDIS.close()
    if LOG_REDIS_POOL:
        await LOG_REDIS_POOL.disconnect()

async def log_consumer():
    """Consume log messages from the Redis queue"""
    log_batch = []
    while True:
        try:
            log_data = await LOG_REDIS.blpop(REDIS_QUEUE, timeout=MAX_WAIT_TIME)

            if log_data:
                log_batch.append(json.loads(log_data[1]))
            
            if len(log_batch) >= BATCH_SIZE or (not log_data and log_batch):
                # 处理批量日志
                await process_log_batch(log_batch)
                log_batch = []  # 清空批次

        except Exception as e:
            log_admin_event(f"Error in log_consumer: {str(e)}")
            await asyncio.sleep(SLEEP_TIME)  # 出现错误时，等待一段时间再继续
        
async def log_producer(log_data):
    """Produce log messages to the Redis queue"""
    await LOG_REDIS.rpush(REDIS_QUEUE, log_data)

async def process_log_batch(log_batch):
    """批量处理日志入库"""
    try:
        with Session(engine) as session:
            log_admin_event(f"Inserting {len(log_batch)} logs into the database")
            session.bulk_insert_mappings(AdminModel.Admin_Record, log_batch)
            session.commit()
    except Exception as e:
        log_admin_event(f"Error during bulk insert: {str(e)}")


