# 在 PersistenceService 中新增以下类及修改
import asyncio
from datetime import datetime

from llm_直播弹幕 import ormOB
from llm_直播弹幕.DatabaseManager import DatabaseManager
from llm_直播弹幕.redis.RedisManager import RedisManager


class StreamProcessor:
    def __init__(self, service: 'PersistenceService', stream_key: str,
                 buffer_size=200, timeout=10.0, orm_class=None,
                 formatter=None, delete_after=True):
        """
        通用流处理器
        :param service: 持久化服务实例
        :param stream_key: Redis流键名
        :param buffer_size: 批处理大小
        :param timeout: 批处理超时时间
        :param orm_class: ORM模型类
        :param formatter: 数据格式化函数
        :param delete_after: 处理成功后是否删除消息
        """
        self.service = service
        self.stream_key = stream_key
        self.buffer = []
        self.last_id = "0-0"
        self.buffer_size = buffer_size
        self.timeout = timeout
        self.orm_class = orm_class
        self.formatter = formatter
        self.delete_after = delete_after
        self._running = True

    async def start(self):
        """启动消费协程"""
        return asyncio.gather(
            self._fetch_loop(),
            self._flush_loop()
        )

    async def stop(self):
        """停止处理器"""
        self._running = False

    async def _fetch_loop(self):
        """持续拉取消息"""
        while self._running:
            await self._fetch_messages()

    async def _flush_loop(self):
        """持续提交批次"""
        while self._running:
            await self._auto_flush()

    async def _fetch_messages(self):
        """从Redis拉取消息"""
        messages = await self.service.redis.client.xread(
            streams={self.stream_key: self.last_id},
            count=self.buffer_size * 2,
            block=2000
        )
        if messages:
            stream_messages = messages[0][1]
            self.buffer.extend([
                (msg_id, {k: v for k, v in data.items()})
                for msg_id, data in stream_messages
            ])
            self.last_id = stream_messages[-1][0]

    async def _auto_flush(self):
        """智能批处理"""
        done, pending = await asyncio.wait([
            self._wait_buffer(),
            self._wait_timeout()
        ], return_when=asyncio.FIRST_COMPLETED)

        for t in pending:
            t.cancel()

        await self._safe_batch_insert()

    async def _wait_buffer(self):
        """等待缓冲区填满"""
        while len(self.buffer) < self.buffer_size and self._running:
            await asyncio.sleep(0.1)

    async def _wait_timeout(self):
        """等待超时触发"""
        await asyncio.sleep(self.timeout)

    async def _safe_batch_insert(self):
        """安全批处理"""
        if not self.buffer:
            return

        success_ids = []
        try:
            # 格式转换
            objects = [
                self.formatter(data)
                for _, data in self.buffer
            ] if self.formatter else self.buffer

            # 数据库插入
            if self.orm_class:
                async with self.service.db.get_session() as session:
                    session.add_all([self.orm_class(**data) for data in objects])
                    await session.commit()

            success_ids = [msg_id for msg_id, _ in self.buffer]
        except Exception as e:
            print(f"[{self.stream_key}] 批处理失败: {str(e)}")
        finally:
            # 消息确认
            if success_ids and self.delete_after:
                await self.service.redis.client.xdel(self.stream_key, *success_ids)

            # 更新游标
            if success_ids:
                self.last_id = success_ids[-1]
            self.buffer.clear()


class PersistenceService:
    def __init__(self, redis: RedisManager, db: DatabaseManager):
        self.redis = redis
        self.db = db
        self.processors = {}

        # 注册数据流配置（后续新增流只需在此扩展）
        self._init_stream_processors()

    def _init_stream_processors(self):
        """初始化数据流处理器（配置化）"""
        configs = [
            {
                "stream_key": "STREAM:DANMAKU",
                "buffer_size": 200,
                "timeout": 10,
                "orm_class": ormOB.Danmaku,
                "formatter": self._format_danmaku,
                "delete_after": True
            },
            {
                "stream_key": "STREAM:ONLINE_RANK",
                "buffer_size": 100,
                "timeout": 15,  # 在线人数变化较慢，延长超时
                "orm_class": ormOB.OnlineRankCount,
                "formatter": self._format_online_rank,
                "delete_after": True
            }
        ]

        for cfg in configs:
            processor = StreamProcessor(
                service=self,
                stream_key=cfg["stream_key"],
                buffer_size=cfg["buffer_size"],
                timeout=cfg["timeout"],
                orm_class=cfg["orm_class"],
                formatter=cfg["formatter"],
                delete_after=cfg["delete_after"]
            )
            self.processors[cfg["stream_key"]] = processor

    async def start_consuming(self):
        """启动所有流处理器"""
        # 收集所有处理器的启动任务
        startup_tasks = []
        for processor in self.processors.values():
            # 每个处理器的 start() 返回一个协程（内含两个后台任务）
            startup_tasks.append(processor.start())

        # 并发启动所有处理器的后台任务
        await asyncio.gather(*startup_tasks)

    # 保留原有的格式化方法
    def _format_danmaku(self, data: dict) -> dict:
        """弹幕数据格式化"""
        return {
            'danmu_user': data['danmu_user'],
            'content': data['content'],
            'create_time': datetime.fromtimestamp(int(data['create_time']) / 1000),
            'danmu_time': datetime.fromtimestamp(int(data['danmu_time'])),
            'user_mid': data['uid'],
            'medal_level': data.get('medal_level', -1),
            'medal_name': data.get('medal_name', ''),
            'medal_from_user': data.get('medal_from_user', ''),
            'medal_from_uid': data.get('medal_from_uid', 0),
            'honor_level': data.get('honor_level', -1),
            'guard_level': data.get('guard_level', -1),
            'room_real_id': data.get('room_real_id', 0)
        }

    def _format_online_rank(self, data: dict) -> dict:
        """在线人数格式化"""
        return {
            'online_count': data['online_count'],
            'room_real_id': data['room_real_id'],
            'create_time': datetime.fromtimestamp(int(data['create_time']) / 1000)
        }

    async def stop_consuming(self):
        """停止所有处理器"""
        for processor in self.processors.values():
            await processor.stop()