"""时序数据缓冲服务

处理带时间戳数据的有序存储
解决网络传输导致的数据包乱序问题
"""

import threading
import time
from collections import defaultdict, deque
from datetime import datetime, timedelta
from typing import Dict, List, Any, Optional, Callable
from loguru import logger


class TimeOrderedBuffer:
    """时序数据缓冲器
    
    功能：
    1. 接收带时间戳的数据
    2. 按时间戳排序缓存数据
    3. 定期将有序数据批量写入存储
    4. 处理延迟数据和重复数据
    """
    
    def __init__(self, 
                 flush_interval: int = 30,
                 max_delay_seconds: int = 60,
                 max_buffer_size: int = 1000):
        """初始化时序缓冲器
        
        Args:
            flush_interval: 刷新间隔（秒）
            max_delay_seconds: 最大延迟容忍时间（秒）
            max_buffer_size: 最大缓冲区大小
        """
        self.flush_interval = flush_interval
        self.max_delay_seconds = max_delay_seconds
        self.max_buffer_size = max_buffer_size
        
        # 按session_id分组的数据缓冲区
        self._buffers: Dict[str, deque] = defaultdict(deque)
        self._buffer_lock = threading.Lock()
        
        # 已处理的时间戳记录（用于去重）
        self._processed_timestamps: Dict[str, set] = defaultdict(set)
        
        # 定时器
        self._timer = None
        self._flush_callback: Optional[Callable] = None
        
        # 统计信息
        self._stats = {
            'total_received': 0,
            'total_processed': 0,
            'duplicates_filtered': 0,
            'delayed_data_count': 0
        }
        
        logger.info(f"时序缓冲器初始化完成: flush_interval={flush_interval}s, "
                   f"max_delay={max_delay_seconds}s, max_buffer_size={max_buffer_size}")
    
    def set_flush_callback(self, callback: Callable[[str, List[Dict]], None]):
        """设置数据刷新回调函数
        
        Args:
            callback: 回调函数，参数为(session_id, sorted_data_list)
        """
        self._flush_callback = callback
        self._start_timer()
        logger.info("时序缓冲器回调函数已设置，定时器已启动")
    
    def add_data(self, session_id: str, data_point: Dict[str, Any]) -> bool:
        """添加数据点到缓冲区
        
        Args:
            session_id: 会话ID
            data_point: 数据点，必须包含timestamp字段
            
        Returns:
            bool: 是否成功添加（False表示重复数据或无效数据）
        """
        if 'timestamp' not in data_point:
            logger.warning(f"数据点缺少timestamp字段: {data_point}")
            return False
        
        timestamp = data_point['timestamp']
        current_time = time.time()
        
        # 检查数据是否过于延迟
        if current_time - timestamp > self.max_delay_seconds:
            logger.warning(f"数据延迟过大，丢弃: session_id={session_id}, "
                          f"delay={current_time - timestamp:.2f}s")
            self._stats['delayed_data_count'] += 1
            return False
        
        with self._buffer_lock:
            # 检查重复数据
            if timestamp in self._processed_timestamps[session_id]:
                logger.debug(f"重复数据，跳过: session_id={session_id}, timestamp={timestamp}")
                self._stats['duplicates_filtered'] += 1
                return False
            
            # 添加到缓冲区
            self._buffers[session_id].append(data_point)
            self._processed_timestamps[session_id].add(timestamp)
            self._stats['total_received'] += 1
            
            # 检查缓冲区大小
            if len(self._buffers[session_id]) > self.max_buffer_size:
                logger.warning(f"缓冲区超过最大大小，强制刷新: session_id={session_id}")
                self._flush_session_data(session_id)
        
        logger.debug(f"数据点已添加到缓冲区: session_id={session_id}, timestamp={timestamp}")
        return True
    
    def _start_timer(self):
        """启动定时器"""
        if self._timer:
            self._timer.cancel()
        self._timer = threading.Timer(self.flush_interval, self._flush_all_data)
        self._timer.daemon = True
        self._timer.start()
    
    def _flush_all_data(self):
        """刷新所有缓冲区数据"""
        try:
            with self._buffer_lock:
                session_ids = list(self._buffers.keys())
            
            for session_id in session_ids:
                self._flush_session_data(session_id)
            
            logger.info(f"定时刷新完成，处理了 {len(session_ids)} 个会话的数据")
            
        except Exception as e:
            logger.error(f"定时刷新失败: {str(e)}")
        finally:
            # 重新启动定时器
            self._start_timer()
    
    def _flush_session_data(self, session_id: str):
        """刷新指定会话的数据
        
        Args:
            session_id: 会话ID
        """
        try:
            with self._buffer_lock:
                if session_id not in self._buffers or not self._buffers[session_id]:
                    return
                
                # 获取并清空缓冲区
                data_list = list(self._buffers[session_id])
                self._buffers[session_id].clear()
                
                # 清理过期的时间戳记录（保留最近1小时的记录）
                current_time = time.time()
                cutoff_time = current_time - 3600  # 1小时前
                self._processed_timestamps[session_id] = {
                    ts for ts in self._processed_timestamps[session_id] 
                    if ts > cutoff_time
                }
            
            # 按时间戳排序
            sorted_data = sorted(data_list, key=lambda x: x['timestamp'])
            
            # 调用回调函数
            if self._flush_callback and sorted_data:
                if asyncio.iscoroutinefunction(self._flush_callback):
                    # 如果是异步函数，需要在事件循环中运行
                    try:
                        loop = asyncio.get_event_loop()
                        if loop.is_running():
                            # 如果事件循环正在运行，创建任务
                            asyncio.create_task(self._flush_callback(session_id, sorted_data))
                        else:
                            # 如果事件循环未运行，直接运行
                            loop.run_until_complete(self._flush_callback(session_id, sorted_data))
                    except RuntimeError:
                        # 如果没有事件循环，创建新的
                        asyncio.run(self._flush_callback(session_id, sorted_data))
                else:
                    # 同步函数直接调用
                    self._flush_callback(session_id, sorted_data)
                self._stats['total_processed'] += len(sorted_data)
                logger.info(f"会话数据已刷新: session_id={session_id}, count={len(sorted_data)}")
            
        except Exception as e:
            logger.error(f"刷新会话数据失败: session_id={session_id}, error={str(e)}")
    
    def force_flush(self, session_id: Optional[str] = None):
        """强制刷新数据
        
        Args:
            session_id: 指定会话ID，None表示刷新所有会话
        """
        if session_id:
            self._flush_session_data(session_id)
        else:
            self._flush_all_data()
    
    def get_stats(self) -> Dict[str, Any]:
        """获取统计信息
        
        Returns:
            dict: 统计信息
        """
        with self._buffer_lock:
            buffer_counts = {sid: len(buf) for sid, buf in self._buffers.items()}
        
        return {
            **self._stats,
            'buffer_counts': buffer_counts,
            'total_sessions': len(buffer_counts),
            'timestamp_records': {sid: len(ts_set) for sid, ts_set in self._processed_timestamps.items()}
        }
    
    def cleanup(self):
        """清理资源"""
        if self._timer:
            self._timer.cancel()
        
        # 强制刷新所有剩余数据
        self._flush_all_data()
        
        logger.info("时序缓冲器已清理")