from typing import Literal

from langchain_core.language_models.chat_models import BaseChatModel

# 支持的LLM类型
SUPPORTED_LLMS = ["zhipu", "ollama"]
LLMType = Literal["zhipu", "ollama"]


def get_llm(llm_name: LLMType = "zhipu") -> BaseChatModel:
    """获取指定的语言模型实例。

    Args:
        llm_name: 语言模型的名称，当前支持 "zhipu" 和 "ollama"

    Returns:
        对应的语言模型实例

    Raises:
        ValueError: 当指定了不支持的语言模型名称时
    """
    if llm_name not in SUPPORTED_LLMS:
        raise ValueError(
            f"Unsupported LLM: {llm_name}. Supported LLMs are: {', '.join(SUPPORTED_LLMS)}"
        )

    if llm_name == "zhipu":
        from .zhipu import zhipu_llm_plus

        return zhipu_llm_plus
    elif llm_name == "ollama":
        from .ollama import get_ollama_client

        return get_ollama_client()


llm = get_llm()
