from langchain_community.chat_models import ChatOpenAI
import os

class QwenChatModel(ChatOpenAI):
    """通义千问聊天模型"""
    
    def __init__(
        self,
        api_key: str = None,
        api_base: str = None,
        model: str = "qwen2.5:3b", # qwen-max
        **kwargs
    ):
        # 设置默认的API基础URL为Ollama地址 http://localhost:11434
        api_base = api_base or "http://localhost:11434" # https://dashscope.aliyuncs.com/compatible-mode/v1
        
        # 对于本地Ollama模型，不需要API密钥
        # api_key = api_key or os.getenv("QWEN_API_KEY")
        # if not api_key:
        #     raise ValueError(
        #         "Qwen API key is required. Set it via QWEN_API_KEY environment variable "
        #         or pass it directly."
        #     )
        
        # 设置为OpenAI格式的API密钥（对于Ollama，可以设置为任意值）
        os.environ["OPENAI_API_KEY"] = "ollama" # api_key
        
        super().__init__(
            model_name=model,
            openai_api_base=api_base,
            **kwargs
        )