import os
from langchain.chat_models import init_chat_model
from langchain_core.prompts import PromptTemplate
from langchain_core.runnables import RunnableBranch
from dotenv import load_dotenv
load_dotenv()

model = init_chat_model(
    model="Qwen/Qwen3-8B",
    model_provider="openai",
    base_url="https://api.siliconflow.cn/v1/",
    api_key=os.environ.get('OPENAI_API_KEY'), #你注册硅基流动api_key
)

# 定义两个分支链
tech_chain = PromptTemplate.from_template("技术问题：{question} → 解答：...") | model
non_tech_chain = PromptTemplate.from_template("非技术问题：{question} → 解答：...") | model

# 分支条件：判断问题是否包含“技术”关键词
branch = RunnableBranch(
    (lambda x: "技术" in x["question"], tech_chain),  # 条件1：匹配则执行tech_chain
    non_tech_chain  # 默认：执行non_tech_chain
)

# 执行分支链
result = branch.invoke({"question": "什么是Python技术？"})  # 走tech_chain
print(result)