"""
工作流管理器模块
管理所有工作流实例，提供缓存、复用和性能监控功能
"""

import asyncio
import logging
import threading
import time
from collections import OrderedDict, defaultdict, deque
from typing import Any, Dict, List, Optional
from langgraph.graph.state import CompiledStateGraph

from src.utils.metrics import track_performance
from src.research_core.workflow_registry import workflow_registry

logger = logging.getLogger(__name__)


class WorkflowManager:
    """工作流管理器，提供工作流实例管理、缓存和复用功能"""
    
    def __init__(self, cache_size: int = 10):
        """
        初始化工作流管理器
        
        Args:
            cache_size: 工作流缓存大小
        """
        self.current_workflow = "optimized-multi-agent"
        self.lock = threading.RLock()
        self.cache_size = cache_size
        # 使用OrderedDict实现LRU缓存
        self.workflow_cache: OrderedDict[str, CompiledStateGraph] = OrderedDict()
        self.workflow_execution_times: Dict[str, deque] = defaultdict(lambda: deque(maxlen=100))
        self.metrics = {
            "cache_hits": 0,
            "cache_misses": 0,
            "errors": 0
        }
        self.semaphore = asyncio.Semaphore(10)  # 限制并发执行数
        
    def preload_workflows(self, workflow_names: List[str]):
        """
        预加载工作流到缓存
        
        Args:
            workflow_names: 工作流名称列表
        """
        for name in workflow_names:
            try:
                self.get_workflow(name)
                logger.info(f"预加载工作流: {name}")
            except Exception as e:
                logger.warning(f"预加载工作流 {name} 失败: {e}")
    
    def list_workflows(self) -> Dict[str, Dict]:
        """列出所有注册的工作流"""
        return workflow_registry.list_workflows()
    
    def get_workflow(self, name: str, version: str = "latest") -> CompiledStateGraph:
        """
        获取工作流实例（带缓存机制）
        
        Args:
            name: 工作流名称
            version: 工作流版本
            
        Returns:
            CompiledStateGraph: LangGraph工作流实例
            
        Raises:
            ValueError: 如果工作流不存在
        """
        cache_key = f"{name}:{version}"
        with self.lock:
            try:
                # 检查缓存
                if cache_key in self.workflow_cache:
                    self.metrics["cache_hits"] += 1
                    # 将访问的项移到末尾（LRU）
                    workflow = self.workflow_cache.pop(cache_key)
                    self.workflow_cache[cache_key] = workflow
                    logger.debug(f"工作流缓存命中: {cache_key}")
                    return workflow
                
                # 缓存未命中，创建新实例
                self.metrics["cache_misses"] += 1
                workflow = workflow_registry.get_workflow(name)
                
                # 编译工作流（如果尚未编译）
                if not isinstance(workflow, CompiledStateGraph):
                    workflow = workflow.compile()
                
                # 添加到缓存
                self.workflow_cache[cache_key] = workflow
                
                # 如果缓存超过限制，移除最久未使用的项
                if len(self.workflow_cache) > self.cache_size:
                    # 移除最前面的项（最早添加的）
                    try:
                        self.workflow_cache.popitem(last=False)
                    except TypeError:
                        # 如果last参数不被支持，则手动找到并删除第一个键
                        first_key = next(iter(self.workflow_cache))
                        self.workflow_cache.pop(first_key)
                
                logger.debug(f"创建新的工作流实例: {cache_key}")
                return workflow
                
            except Exception as e:
                self.metrics["errors"] += 1
                logger.error(f"获取工作流失败 {name}: {str(e)}")
                raise ValueError(f"获取工作流失败 {name}: {str(e)}")
    
    def get_current_workflow_instance(self) -> CompiledStateGraph:
        """
        获取当前工作流实例
        
        Returns:
            CompiledStateGraph: 当前工作流实例
        """
        return self.get_workflow(self.current_workflow)
    
    def clear_cache(self):
        """清空工作流缓存"""
        with self.lock:
            self.workflow_cache.clear()
            logger.info("工作流缓存已清空")
    
    def get_metrics(self) -> Dict[str, Any]:
        """
        获取工作流管理器指标
        
        Returns:
            Dict[str, Any]: 指标数据
        """
        with self.lock:
            return self.metrics.copy()
    
    def record_execution_time(self, workflow_name: str, execution_time: float):
        """
        记录工作流执行时间
        
        Args:
            workflow_name: 工作流名称
            execution_time: 执行时间
        """
        with self.lock:
            self.workflow_execution_times[workflow_name].append(execution_time)
            # 限制记录数量，只保留最近100条记录
    
    def get_average_execution_time(self, workflow_name: str) -> float:
        """
        获取工作流平均执行时间
        
        Args:
            workflow_name: 工作流名称
            
        Returns:
            float: 平均执行时间
        """
        with self.lock:
            times = self.workflow_execution_times[workflow_name]
            if times:
                return sum(times) / len(times)
            return 0.0
    
    def get_execution_time_stats(self) -> Dict[str, Dict[str, float]]:
        """
        获取所有工作流的执行时间统计
        
        Returns:
            Dict[str, Dict[str, float]]: 执行时间统计
        """
        with self.lock:
            stats = {}
            for workflow_name, times in self.workflow_execution_times.items():
                if times:
                    stats[workflow_name] = {
                        "average": sum(times) / len(times),
                        "min": min(times),
                        "max": max(times),
                        "count": len(times)
                    }
            return stats
    
    async def execute_with_limit(self, workflow_func, *args, **kwargs):
        """
        限制并发执行工作流
        
        Args:
            workflow_func: 工作流执行函数
            *args: 参数
            **kwargs: 关键字参数
            
        Returns:
            工作流执行结果
        """
        async with self.semaphore:
            return await workflow_func(*args, **kwargs)
    
    async def execute_workflow(self, workflow_name: str, inputs: Dict[str, Any], 
                              timeout: int = 300, max_iterations: int = 50,
                              version: str = "latest") -> Dict[str, Any]:
        """
        执行指定的工作流
        
        Args:
            workflow_name: 工作流名称
            inputs: 输入数据
            timeout: 超时时间（秒）
            max_iterations: 最大迭代次数
            version: 工作流版本
            
        Returns:
            工作流执行结果
        """
        start_time = time.time()
        
        try:
            # 获取工作流实例
            workflow = self.get_workflow(workflow_name, version)
            
            # 异步执行工作流
            async def _execute():
                result = await workflow.ainvoke(
                    input=inputs,
                    config={
                        "recursion_limit": max_iterations,
                    },
                    timeout=timeout
                )
                return result
            
            result = await self.execute_with_limit(_execute)
            
            # 记录执行时间
            execution_time = time.time() - start_time
            self.record_execution_time(workflow_name, execution_time)
            
            return result
            
        except Exception as e:
            self.metrics["errors"] += 1
            execution_time = time.time() - start_time
            self.record_execution_time(workflow_name, execution_time)
            logger.error(f"工作流执行失败 {workflow_name}: {str(e)}")
            raise

    def get_workflow_stats(self) -> Dict[str, Any]:
        """
        获取工作流统计信息
        
        Returns:
            Dict[str, Any]: 工作流统计信息
        """
        with self.lock:
            execution_stats = self.get_execution_time_stats()
            cache_stats = {
                "cache_size": len(self.workflow_cache),
                "cache_capacity": self.cache_size,
                "cache_hits": self.metrics["cache_hits"],
                "cache_misses": self.metrics["cache_misses"]
            }
            error_stats = {
                "error_count": self.metrics["errors"]
            }
            
            return {
                "execution_stats": execution_stats,
                "cache_stats": cache_stats,
                "error_stats": error_stats
            }

    def get_performance_stats(self) -> Dict[str, Any]:
        """
        获取工作流性能统计信息
        
        Returns:
            Dict[str, Any]: 性能统计信息
        """
        with self.lock:
            return self.get_execution_time_stats()


# 全局工作流管理器实例
workflow_manager = WorkflowManager()


def get_workflow_manager() -> WorkflowManager:
    """获取工作流管理器实例"""
    return workflow_manager


# 预加载常用工作流
def initialize_workflow_manager():
    """初始化工作流管理器，预加载常用工作流"""
    # 检查工作流是否存在后再预加载
    from src.research_core.workflow_registry import workflow_registry
    preload_list = []
    
    if workflow_registry.workflow_exists("multi-agent"):
        preload_list.append("multi-agent")
        
    if workflow_registry.workflow_exists("optimized-multi-agent"):
        preload_list.append("optimized-multi-agent")
        
    workflow_manager.preload_workflows(preload_list)


# 导出
__all__ = [
    "WorkflowManager", 
    "workflow_manager", 
    "get_workflow_manager",
    "initialize_workflow_manager"
]
