"""模型加载工具模块 - 提供统一的模型加载功能。"""

from langchain_core.language_models import BaseChatModel
from langchain_openai import ChatOpenAI
import os
from dotenv import load_dotenv
from langchain.chat_models import init_chat_model
import logging

# 加载环境变量
load_dotenv(override=True)
llm = None
logger = logging.getLogger(__name__)

def load_chat_model() -> BaseChatModel:
    """Load a chat model from a fully specified name.
    
    首先尝试连接 OpenAI，如果失败则自动切换到 DeepSeek。
        
    Returns:
        BaseChatModel: The loaded chat model.
    """
    global llm
    if llm is None:
        #   llm =ChatOpenAI(
        #                 model="deepseek-ai/DeepSeek-V3",
        #                 api_key=os.getenv("DASHSCOPE_API_KEY"),
        #                 base_url=os.getenv("DASHSCOPE_BASE_URL"),
        #                 temperature=0.3,
        #                 max_tokens=4096,
        #                 timeout=60,
        #             )
        try:
            # 首先尝试使用 OpenAI
            logger.info("尝试连接 OpenAI 模型...")
            llm = init_chat_model(
                model=os.getenv("OPENAI_MODEL_ID"),
                model_provider=os.getenv("OPENAI_MODEL_PROVIDER"),
                base_url=os.getenv("OPENAI_BASE_URL"),
                api_version=os.getenv("OPENAI_API_VERSION"),
                api_key=os.getenv("OPENAI_API_KEY"),
                temperature=0.5
            )
            # 测试连接
            test_response = llm.invoke("Hello")
            logger.info("OpenAI 模型连接成功")
            
        except Exception as e:
            logger.warning(f"OpenAI 连接失败: {str(e)}")
            logger.info("切换到 DeepSeek 模型...")
            try:
                # 切换到 DeepSeek
                llm = ChatOpenAI(
                    model="deepseek-ai/DeepSeek-V3",
                    api_key=os.getenv("DASHSCOPE_API_KEY"),
                    base_url=os.getenv("DASHSCOPE_BASE_URL"),
                    temperature=0.3,
                    max_tokens=1024,
                    timeout=60,
                )
                # 测试 DeepSeek 连接
                test_response = llm.invoke("Hello")
                logger.info("DeepSeek 模型连接成功")
                
            except Exception as deepseek_error:
                logger.error(f"DeepSeek 连接也失败: {str(deepseek_error)}")
                raise Exception(f"所有模型连接都失败。OpenAI错误: {str(e)}, DeepSeek错误: {str(deepseek_error)}")

  
    return llm
