import asyncio
import json
import traceback
import logging
from ..common.chat import RoleType
from ..server.mapper._UserChatHistoryMapper import _UserChatHistoryMapper
from ..server.mapper._ChatHistoryCacheMapper import _ChatHistoryCacheMapper
from ..pojo.entity.UserChatHistory import UserChatHistory


async def _sync_cache_to_db(
    db_mapper: _UserChatHistoryMapper, cache_mapper: _ChatHistoryCacheMapper
):
    """定时任务：将 Redis 缓存中的数据同步到数据库"""
    logging.info("定时任务：将 Redis 缓存中的数据同步到数据库...")
    try:
        all_keys = await cache_mapper.keys("persisted:*")
        if not all_keys:
            return

        tasks = []
        for key in all_keys:
            tasks.append(cache_mapper.get_persisted(int(key.split(":")[1])))
        results = await asyncio.gather(*tasks)

        batch_data = []
        for key, value in zip(all_keys, results):
            if value is None:
                continue

            session_id = int(key.split(":")[1])
            length = await cache_mapper.get_cache_length_by_session_id(session_id)
            number = length - value

            if number <= 0:
                continue

            history = await cache_mapper.get_cache_by_session_id(session_id, number)
            messages = [
                UserChatHistory(
                    sessionId=session_id,
                    content=json.dumps(msg[1], ensure_ascii=False),
                    isAi=msg[0] == RoleType.AI,
                )
                for msg in history
            ]
            await cache_mapper.set_persisted(session_id, length)
            batch_data.extend(messages)

        if batch_data:
            await db_mapper.insert_batch(batch_data)
        logging.info("同步到数据库成功...")

    except Exception as e:
        error_message = f"同步缓存到数据库时发生错误: {str(e)}"
        stack_trace = traceback.format_exc()
        logging.error(f"{error_message}\n{stack_trace}")
