import asyncio
from typing import AsyncGenerator, Any, Dict, Optional
from .stream_config import StreamConfig
from data_handler.data_input import get_input_handler
from data_handler.data_output import get_output_handler
from data_handler.messageQueue import MessageQueue, Message, StreamPipeline, create_model_processor

from utils.logger import setup_logger
logger = setup_logger()

from models.model_manager import ModelManager
from data_handler.data_model_process import ModelHandler

class StreamOrchestrator:
    # 保存已创建的处理器实例，解决模型只在第一次调用时有效的问题
    _processor_cache = {}
    
    def __init__(self, config: StreamConfig):
        self.config = config
        # logger.info(f"config{config}")
        self.input_handler = get_input_handler(config.input_type)
        self.model_handler = ModelHandler(config.model_name, config.input_type)
        self.output_handler = get_output_handler(config.output_type)
        self.message_queue = MessageQueue(maxsize=100)
        self.connection_active = False
        self.session_id = id(self)  # 为每个会话创建唯一ID
        
    def _get_processor_key(self):
        """生成处理器缓存的键"""
        return f"{self.config.model_name}_{self.config.input_type.value}_{str(hash(str(self.config.params)))}"  
        
    def _get_or_create_processor(self):
        """获取或创建模型处理器，实现处理器复用"""
        key = self._get_processor_key()
        
        if key not in self._processor_cache:
            logger.info(f"创建新的模型处理器: {self.config.model_name}")
            processor = create_model_processor(self.config.model_name, self.config.input_type.value, self.config.params)
            self._processor_cache[key] = processor
        else:
            logger.info(f"复用现有的模型处理器: {self.config.model_name}")
            
        return self._processor_cache[key]
        
    async def process(self, input_data: Any) -> AsyncGenerator[bytes, None]:
        logger.info(f"Starting stream processing with message queue integration for session {self.session_id}")
        self.connection_active = True
        
        # 获取或创建处理器
        processor = self._get_or_create_processor()
        
        # 创建消息队列处理管道
        pipeline = StreamPipeline([processor])
        
        # 使用标准处理流程作为备用方案
        try:
            # 尝试使用消息队列处理
            input_stream = self.input_handler.process(input_data)
            
            # 转换输出格式
            async def process_output(output_stream):
                async for output in output_stream:
                    # 根据输出类型进行格式转换
                    if isinstance(output, str):
                        yield output.encode('utf-8')
                    elif isinstance(output, bytes):
                        yield output
                    else:
                        yield str(output).encode('utf-8')
                    
                    if not self.connection_active:
                        logger.info(f"Connection closed for session {self.session_id}, stopping stream processing")
                        break
            
            # 处理流式输出
            async for output in process_output(pipeline.process(input_stream)):
                yield output
                
        except Exception as e:
            logger.error(f"Error in message queue processing for session {self.session_id}: {e}, falling back to standard processing")
            # 回退到标准处理流程
            input_stream = self.input_handler.process(input_data)
            model_stream = self.model_handler.process(input_stream)
            output_stream = self.output_handler.process(model_stream, self.config.output_type)
            async for output in output_stream:
                yield output
                if not self.connection_active:
                    logger.info(f"Connection closed for session {self.session_id}, stopping stream processing")
                    break
        finally:
            self.connection_active = False
            logger.info(f"Stream processing completed for session {self.session_id}")
    
    def close_connection(self):
        """关闭连接，停止流处理"""
        self.connection_active = False
        logger.info("Connection marked for closing")