"""Model routing and load balancing."""

from typing import Dict, Any, Optional, List
import random
import asyncio
from collections import defaultdict
from datetime import datetime, timedelta

from .base import BaseModelAdapter, ModelConfig, ModelType
from .openai_adapter import OpenAIAdapter
from .claude_adapter import ClaudeAdapter
from .anthropic_adapter import AnthropicAdapter  # 新增Anthropic适配器
from .qwen_adapter import QwenAdapter
from .wenxin_adapter import WenxinAdapter
from .spark_adapter import SparkAdapter
from .kimi_adapter import KimiAdapter
from .deepseek_adapter import DeepSeekAdapter
from .doubao_adapter import DoubaoAdapter
from .hunyuan_adapter import HunyuanAdapter
from .config_manager import ConfigManager


class ModelRouter:
    """Routes requests to appropriate model adapters with load balancing."""
    
    def __init__(self, config_manager: ConfigManager = None):
        """Initialize model router.
        
        Args:
            config_manager: Configuration manager instance
        """
        self.config_manager = config_manager or ConfigManager()
        self._adapters: Dict[str, BaseModelAdapter] = {}
        self._usage_stats = defaultdict(lambda: {"requests": 0, "tokens": 0})
        self._health_status = {}
        self._fallback_map = self._init_fallback_map()
        self._initialize_adapters()
    
    def _init_fallback_map(self) -> Dict[str, List[str]]:
        """Initialize model fallback mappings.
        
        Returns:
            Dictionary of primary to fallback models
        """
        return {
            "gpt-4": ["gpt-3.5-turbo", "claude-3", "qwen-max"],
            "claude-3": ["gpt-4", "qwen-max", "gpt-3.5-turbo"],
            "claude-sonnet-4-20250514": ["claude-3-5-sonnet-20241022", "claude-3", "gpt-4"],
            "claude-3-5-sonnet-20241022": ["claude-3-5-haiku-20241022", "gpt-3.5-turbo"],
            "claude-3-5-haiku-20241022": ["claude-3-5-sonnet-20241022", "gpt-3.5-turbo"],
            "qwen-max": ["qwen-plus", "qwen-turbo", "gpt-3.5-turbo"],
            "ernie-bot-4": ["ernie-bot", "qwen-max", "gpt-3.5-turbo"]
        }
    
    def _initialize_adapters(self) -> None:
        """Initialize available model adapters."""
        adapter_classes = {
            "gpt-4": OpenAIAdapter,
            "gpt-3.5-turbo": OpenAIAdapter,
            "claude-3": ClaudeAdapter,
            "claude-3-opus": ClaudeAdapter,
            "claude-3-sonnet": ClaudeAdapter,
            # Anthropic原生API模型（从API获取的可用模型列表）
            "claude-3-5-sonnet-20241022": AnthropicAdapter,
            "claude-3-5-haiku-20241022": AnthropicAdapter,
            "claude-sonnet-4-20250514": AnthropicAdapter,
            "qwen-max": QwenAdapter,
            "qwen-plus": QwenAdapter,
            "qwen-turbo": QwenAdapter,
            "ernie-bot-4": WenxinAdapter,
            "ernie-bot": WenxinAdapter,
            "spark-3.5": SparkAdapter,
            "spark-3.0": SparkAdapter,
            "moonshot-v1-8k": KimiAdapter,
            "moonshot-v1-32k": KimiAdapter,
            "moonshot-v1-128k": KimiAdapter,
            "deepseek-coder": DeepSeekAdapter,
            "deepseek-chat": DeepSeekAdapter,
            "skylark-chat": DoubaoAdapter,
            "skylark-lite": DoubaoAdapter,
            "skylark-pro": DoubaoAdapter,
            "hunyuan-standard": HunyuanAdapter,
            "hunyuan-lite": HunyuanAdapter,
            "hunyuan-pro": HunyuanAdapter,
        }
        
        for model_name, adapter_class in adapter_classes.items():
            config = self.config_manager.get_config(model_name)
            if config and config.api_key:
                try:
                    self._adapters[model_name] = adapter_class(config)
                    self._health_status[model_name] = True
                except Exception as e:
                    print(f"Failed to initialize {model_name}: {e}")
                    self._health_status[model_name] = False
    
    def select_model(
        self,
        task_type: str = "general",
        language: str = "zh",
        max_tokens: int = 2000,
        optimize_for: str = "quality",  # quality, cost, speed
        preferred_models: List[str] = None
    ) -> Optional[str]:
        """Select best model for the task.
        
        Args:
            task_type: Type of task (general, creative, technical, analysis)
            language: Target language
            max_tokens: Required max tokens
            optimize_for: Optimization goal
            preferred_models: List of preferred models
            
        Returns:
            Selected model name or None
        """
        # Model capabilities
        capabilities = {
            "gpt-4": {
                "languages": ["en", "zh", "ja", "ko"],
                "specialties": ["creative", "technical", "analysis"],
                "max_tokens": 8192,
                "cost": 0.03
            },
            "gpt-3.5-turbo": {
                "languages": ["en", "zh", "ja", "ko"],
                "specialties": ["general", "creative"],
                "max_tokens": 4096,
                "cost": 0.002
            },
            "claude-3": {
                "languages": ["en", "zh", "ja", "ko"],
                "specialties": ["analysis", "technical", "creative"],
                "max_tokens": 100000,
                "cost": 0.015
            },
            # Anthropic最新模型能力配置
            "claude-3-5-sonnet-20241022": {
                "languages": ["en", "zh", "ja", "ko"],
                "specialties": ["analysis", "technical", "creative", "general"],
                "max_tokens": 100000,
                "cost": 0.003
            },
            "claude-3-5-haiku-20241022": {
                "languages": ["en", "zh", "ja", "ko"],
                "specialties": ["general", "creative"],
                "max_tokens": 100000,
                "cost": 0.001
            },
            "claude-sonnet-4-20250514": {
                "languages": ["en", "zh", "ja", "ko"],
                "specialties": ["analysis", "technical", "creative", "general"],
                "max_tokens": 100000,
                "cost": 0.012
            },
            "qwen-max": {
                "languages": ["zh", "en"],
                "specialties": ["chinese_content", "technical"],
                "max_tokens": 6000,
                "cost": 0.01
            },
            "ernie-bot-4": {
                "languages": ["zh"],
                "specialties": ["chinese_content", "search"],
                "max_tokens": 4096,
                "cost": 0.012
            }
        }
        
        candidates = []
        
        # Filter by availability and health
        for model_name in self._adapters:
            if not self._health_status.get(model_name, False):
                continue
            
            # Check preferences
            if preferred_models and model_name not in preferred_models:
                continue
            
            # Check capabilities
            model_caps = capabilities.get(model_name, {})
            
            # Language support
            if language not in model_caps.get("languages", []):
                continue
            
            # Token limit
            if max_tokens > model_caps.get("max_tokens", 0):
                continue
            
            # Task type
            if task_type in model_caps.get("specialties", []) or task_type == "general":
                candidates.append(model_name)
        
        if not candidates:
            # Try to find any available model
            candidates = [m for m in self._adapters if self._health_status.get(m, False)]
        
        if not candidates:
            return None
        
        # Select based on optimization goal
        if optimize_for == "cost":
            # Sort by cost
            candidates.sort(key=lambda m: capabilities.get(m, {}).get("cost", float('inf')))
            return candidates[0]
        elif optimize_for == "quality":
            # Prefer GPT-4 or Claude models
            quality_order = [
                "gpt-4",
                "claude-sonnet-4-20250514",  # Claude 4 Sonnet
                "claude-3-5-sonnet-20241022", # Claude 3.5 Sonnet
                "claude-3",
                "qwen-max",
                "claude-3-5-haiku-20241022",  # Claude 3.5 Haiku (快速)
                "gpt-3.5-turbo"
            ]
            for model in quality_order:
                if model in candidates:
                    return model
            return candidates[0]
        else:  # speed or load balancing
            # Use least recently used
            usage_counts = {m: self._usage_stats[m]["requests"] for m in candidates}
            return min(usage_counts, key=usage_counts.get)
    
    def get_adapter(self, model_name: str) -> Optional[BaseModelAdapter]:
        """Get adapter for a specific model.
        
        Args:
            model_name: Name of the model
            
        Returns:
            Model adapter or None
        """
        return self._adapters.get(model_name)
    
    async def route_request(
        self,
        messages: List[Dict[str, str]],
        model_name: str = None,
        **kwargs
    ) -> Any:
        """Route request to appropriate model with fallback.
        
        Args:
            messages: Conversation messages
            model_name: Preferred model name
            **kwargs: Additional parameters
            
        Returns:
            Model response
        """
        # Select model if not specified
        if not model_name:
            model_name = self.select_model()
        
        if not model_name:
            raise Exception("No available models")
        
        # Try primary model
        adapter = self.get_adapter(model_name)
        if adapter:
            try:
                # Update stats
                self._usage_stats[model_name]["requests"] += 1
                
                response = await adapter.generate(messages, **kwargs)
                
                # Update token usage
                self._usage_stats[model_name]["tokens"] += response.total_tokens
                
                return response
            except Exception as e:
                print(f"Primary model {model_name} failed: {e}")
                # Mark as unhealthy
                self._health_status[model_name] = False
        
        # Try fallback models
        fallbacks = self._fallback_map.get(model_name, [])
        for fallback_model in fallbacks:
            if fallback_model in self._adapters and self._health_status.get(fallback_model, False):
                adapter = self._adapters[fallback_model]
                try:
                    print(f"Using fallback model: {fallback_model}")
                    self._usage_stats[fallback_model]["requests"] += 1
                    
                    response = await adapter.generate(messages, **kwargs)
                    
                    self._usage_stats[fallback_model]["tokens"] += response.total_tokens
                    return response
                except Exception as e:
                    print(f"Fallback model {fallback_model} failed: {e}")
                    self._health_status[fallback_model] = False
        
        raise Exception("All models failed")
    
    async def health_check_all(self) -> Dict[str, bool]:
        """Check health of all models.
        
        Returns:
            Health status dictionary
        """
        tasks = {}
        for model_name, adapter in self._adapters.items():
            tasks[model_name] = adapter.health_check()
        
        results = await asyncio.gather(*tasks.values(), return_exceptions=True)
        
        for model_name, result in zip(tasks.keys(), results):
            if isinstance(result, Exception):
                self._health_status[model_name] = False
            else:
                self._health_status[model_name] = result
        
        return self._health_status
    
    def get_usage_stats(self) -> Dict[str, Any]:
        """Get usage statistics for all models.
        
        Returns:
            Usage statistics dictionary
        """
        return dict(self._usage_stats)
    
    def get_available_models(self) -> List[str]:
        """Get list of available models.
        
        Returns:
            List of available model names
        """
        return [m for m in self._adapters if self._health_status.get(m, False)]