"""流式处理器 - 处理LangChain流式输出."""

import asyncio
import logging
from datetime import datetime
from typing import AsyncGenerator, Optional, Dict, Any, List
from dataclasses import dataclass, field

logger = logging.getLogger(__name__)


@dataclass
class StreamBuffer:
    """流缓冲区."""
    
    content: str = ""
    token_count: int = 0
    chunk_size: int = 10
    flush_on_punctuation: bool = True
    punctuation_marks: List[str] = field(default_factory=lambda: ['.', '!', '?', '。', '！', '？', '\n'])
    
    def add_token(self, token: str) -> Optional[str]:
        """添加token到缓冲区.
        
        Args:
            token: 要添加的token
            
        Returns:
            如果需要刷新，返回缓冲区内容；否则返回None
        """
        self.content += token
        self.token_count += 1
        
        # 检查是否需要刷新
        should_flush = (
            len(self.content) >= self.chunk_size or
            (self.flush_on_punctuation and any(p in token for p in self.punctuation_marks))
        )
        
        if should_flush:
            return self.flush()
        return None
        
    def flush(self) -> str:
        """刷新缓冲区.
        
        Returns:
            缓冲区内容
        """
        content = self.content
        self.content = ""
        return content
        
    def has_content(self) -> bool:
        """检查缓冲区是否有内容."""
        return bool(self.content)


class StreamProcessor:
    """流式处理器."""
    
    def __init__(self):
        self.active_tasks: Dict[str, Dict[str, Any]] = {}
        self.buffer_size = 10
        self.flow_control_delay = 0.01  # 秒
        
    async def process_token_stream(
        self,
        task_id: str,
        token_generator: AsyncGenerator[str, None],
        metadata: Optional[Dict[str, Any]] = None
    ) -> AsyncGenerator[Dict[str, Any], None]:
        """处理token流.
        
        Args:
            task_id: 任务ID
            token_generator: token生成器
            metadata: 元数据
            
        Yields:
            处理后的消息字典
        """
        # 初始化任务信息
        task_info = {
            "task_id": task_id,
            "start_time": datetime.utcnow(),
            "token_count": 0,
            "char_count": 0,
            "metadata": metadata or {},
            "status": "processing"
        }
        self.active_tasks[task_id] = task_info
        
        # 创建缓冲区
        buffer = StreamBuffer(chunk_size=self.buffer_size)
        
        try:
            # 发送开始事件
            yield {
                "type": "start",
                "task_id": task_id,
                "timestamp": datetime.utcnow().isoformat(),
                "metadata": metadata
            }
            
            # 处理token流
            async for token in token_generator:
                # 更新统计
                task_info["token_count"] += 1
                task_info["char_count"] += len(token)
                
                # 添加到缓冲区
                content = buffer.add_token(token)
                
                if content:
                    # 发送token事件
                    yield {
                        "type": "token",
                        "content": content,
                        "task_id": task_id,
                        "timestamp": datetime.utcnow().isoformat(),
                        "metadata": {
                            "token_count": task_info["token_count"],
                            "char_count": task_info["char_count"]
                        }
                    }
                    
                    # 流量控制
                    await asyncio.sleep(self.flow_control_delay)
                    
            # 发送剩余内容
            if buffer.has_content():
                yield {
                    "type": "token",
                    "content": buffer.flush(),
                    "task_id": task_id,
                    "timestamp": datetime.utcnow().isoformat(),
                    "metadata": {
                        "token_count": task_info["token_count"],
                        "char_count": task_info["char_count"]
                    }
                }
                
            # 更新任务状态
            task_info["status"] = "completed"
            task_info["end_time"] = datetime.utcnow()
            
            # 发送完成事件
            yield {
                "type": "complete",
                "task_id": task_id,
                "timestamp": datetime.utcnow().isoformat(),
                "metadata": {
                    "token_count": task_info["token_count"],
                    "char_count": task_info["char_count"],
                    "duration": (task_info["end_time"] - task_info["start_time"]).total_seconds()
                }
            }
            
        except asyncio.CancelledError:
            # 任务被取消
            task_info["status"] = "cancelled"
            yield {
                "type": "cancelled",
                "task_id": task_id,
                "timestamp": datetime.utcnow().isoformat()
            }
            raise
            
        except Exception as e:
            # 发生错误
            task_info["status"] = "error"
            task_info["error"] = str(e)
            logger.error(f"流处理错误: task_id={task_id}, error={e}")
            
            yield {
                "type": "error",
                "task_id": task_id,
                "error": str(e),
                "timestamp": datetime.utcnow().isoformat()
            }
            
        finally:
            # 清理任务信息
            if task_id in self.active_tasks:
                del self.active_tasks[task_id]
                
    async def batch_process(
        self,
        task_id: str,
        messages: List[Dict[str, Any]],
        batch_size: int = 10
    ) -> AsyncGenerator[List[Dict[str, Any]], None]:
        """批量处理消息.
        
        Args:
            task_id: 任务ID
            messages: 消息列表
            batch_size: 批大小
            
        Yields:
            消息批次
        """
        batch = []
        
        for message in messages:
            batch.append(message)
            
            if len(batch) >= batch_size:
                yield batch
                batch = []
                # 流量控制
                await asyncio.sleep(self.flow_control_delay)
                
        # 发送剩余批次
        if batch:
            yield batch
            
    def apply_backpressure(self, task_id: str, queue_size: int) -> float:
        """应用背压控制.
        
        Args:
            task_id: 任务ID
            queue_size: 队列大小
            
        Returns:
            建议的延迟时间（秒）
        """
        # 简单的线性背压策略
        base_delay = self.flow_control_delay
        
        if queue_size < 100:
            return base_delay
        elif queue_size < 500:
            return base_delay * 2
        elif queue_size < 1000:
            return base_delay * 5
        else:
            return base_delay * 10
            
    def cancel_task(self, task_id: str) -> bool:
        """取消任务.
        
        Args:
            task_id: 任务ID
            
        Returns:
            是否成功取消
        """
        if task_id in self.active_tasks:
            self.active_tasks[task_id]["status"] = "cancelled"
            logger.info(f"任务已取消: task_id={task_id}")
            return True
        return False
        
    def get_task_info(self, task_id: str) -> Optional[Dict[str, Any]]:
        """获取任务信息.
        
        Args:
            task_id: 任务ID
            
        Returns:
            任务信息字典
        """
        return self.active_tasks.get(task_id)
        
    def get_active_tasks(self) -> List[str]:
        """获取活跃任务列表.
        
        Returns:
            活跃任务ID列表
        """
        return list(self.active_tasks.keys())
        
    def get_statistics(self) -> Dict[str, Any]:
        """获取统计信息.
        
        Returns:
            统计信息字典
        """
        total_tokens = sum(t.get("token_count", 0) for t in self.active_tasks.values())
        total_chars = sum(t.get("char_count", 0) for t in self.active_tasks.values())
        
        return {
            "active_tasks": len(self.active_tasks),
            "total_tokens": total_tokens,
            "total_chars": total_chars,
            "tasks": {
                task_id: {
                    "status": info["status"],
                    "token_count": info.get("token_count", 0),
                    "char_count": info.get("char_count", 0)
                }
                for task_id, info in self.active_tasks.items()
            }
        }


# 全局流处理器实例
stream_processor = StreamProcessor()