import dotenv
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import ConfigurableField
from langchain_openai import ChatOpenAI
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint

dotenv.load_dotenv()

#1.创建提示模板&定义默认大语言模型
prompt = ChatPromptTemplate.from_template("{query}")

# 2. 创建提示词模板并配置支持动态配置的字段
llm = ChatOpenAI(model="kimi-k2-0711-preview").configurable_alternatives(
    ConfigurableField(id="llm"),
    default_key="kimi-k2",
    gpt4_key="kimi-k2",
    wenxin=QianfanChatEndpoint(),
)

# 3.构建链应用
chain = prompt | llm | StrOutputParser()

content = chain.invoke(
    {"query": "你好，你是什么模型？"},
    config={"configurable": {"llm": "wenxin"}}
)

print(content)