from langgraph.graph import StateGraph,START,END,MessagesState
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI

#定义状态结构体
class ChatState(MessagesState):
    user_question:str
    llm_response:str

def llm_node(sate):
    template = "You are a helpful assistant. User: {question}"
    prompt = ChatPromptTemplate.from_template(template)  # 正确
    model = ChatOpenAI(
        model="qwen-plus",  # 阿里云千问-plus模型
        openai_api_key='sk-965dc39b016c49ecbe29de180f4db2b6',
        openai_api_base='https://dashscope.aliyuncs.com/compatible-mode/v1',
        temperature=0.7,  # 控制生成文本的随机性
        max_tokens=2048,  # 最大生成长度
    )
    chain = prompt | model
    response = chain.invoke({"question":sate["user_question"]}).content
    return {"llm_response":response}

#构建图
builder = StateGraph(ChatState)
builder.add_node("llm_node",llm_node)
builder.add_edge(START,"llm_node")
builder.add_edge("llm_node",END)
graph = builder.compile()

#调用图
result= graph.invoke({"user_question":"你好，LangGraph！"})
print(result)