from enum import Enum
from typing import Dict, Any
from functools import lru_cache

from contentforge.core.model_interface import AIModelInterface, ModelConfig
from contentforge.core.models.openai_model import OpenAIModel
from contentforge.core.models.claude_model import ClaudeModel
from contentforge.core.models.kimi_model import KimiModel

class ModelType(Enum):
    OPENAI = "openai"
    CLAUDE = "claude"
    KIMI = "kimi"

class ModelFactory:
    @staticmethod
    def create_model(
        model_type: ModelType, 
        config: ModelConfig
    ) -> AIModelInterface:
        """
        创建AI模型实例
        
        Args:
            model_type (ModelType): 模型类型
            config (ModelConfig): 模型配置
        
        Returns:
            AIModelInterface: AI模型实例
        """
        default_models = {
            ModelType.OPENAI: "gpt-3.5-turbo",
            ModelType.CLAUDE: "claude-3-opus-20240229", 
            ModelType.KIMI: "moonshot-v1-8k"
        }
        
        if not config.model_name or config.model_name == "default":
            config.model_name = default_models.get(model_type, "default")
        
        model_map: Dict[ModelType, Any] = {
            ModelType.OPENAI: OpenAIModel,
            ModelType.CLAUDE: ClaudeModel,
            ModelType.KIMI: KimiModel
        }
        
        if model_type not in model_map:
            raise ValueError(f"不支持的模型类型: {model_type}")
        
        return model_map[model_type](config) 

    @staticmethod
    @lru_cache(maxsize=None)
    def get_model_class(model_type: ModelType) -> type:
        """
        获取模型类（支持缓存）
        """
        model_map = {
            ModelType.OPENAI: OpenAIModel,
            ModelType.CLAUDE: ClaudeModel,
            ModelType.KIMI: KimiModel
        }
        
        if model_type not in model_map:
            raise ValueError(f"不支持的模型类型: {model_type}")
            
        return model_map[model_type]