"""
@Author: Bright
@File: hello_langgraph.py
@Time: 2025/10/9
@Desc:  LangGraph 入门
"""
import os
from typing import TypedDict
from dotenv import load_dotenv
from langchain_openai import AzureChatOpenAI
from langgraph.graph import StateGraph, END

load_dotenv()

llm = AzureChatOpenAI(
    # openai_api_key=
    # openai_api_base=os.getenv("AZURE_OPENAI_ENDPOINT"),
    api_key=os.getenv("AZURE_OPENAI_API_KEY"),
    azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
    azure_deployment=os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME"),
    api_version=os.getenv("AZURE_OPENAI_API_VERSION"),
    temperature=0.7
)


class GraphState(TypedDict):
    question: str
    answer: str


# 定义第二个节点函数 - 处理问题
def process_question(state: GraphState) -> GraphState:
    """处理问题并生成回答的节点"""
    question = state.get("question", "")
    answer = f"关于'{question}'，这是一个很好的问题！LangGraph是用于构建AI工作流的工具。"
    print(f"处理问题: {answer}")
    return {**state, "answer": answer}


def process_answer(state: GraphState) -> GraphState:
    """处理问题并生成回答的节点"""
    question = state.get("question", "")
    answer = llm.invoke(question)
    print(f"处理问题: {answer}")
    return {**state, "answer": answer}


##
graph = StateGraph(GraphState)

##
# graph.add_node("ask_user", lambda state: {"question":"请告诉我你需要什么帮助？"})
# graph.add_node("llm_response", lambda state: llm(state["question"]))

graph.add_node("ask_user", process_question)
graph.add_node("llm_response", process_answer)

graph.set_entry_point("ask_user")
graph.add_edge("ask_user", "llm_response")
graph.add_edge("llm_response", END)

app = graph.compile()

# 定义初始状态
initial_state = {
    "question": "请告诉我你需要什么帮助？",
    "answer": ""
}

result = app.invoke(initial_state)
# pprint(result)

print("\n=== 最终执行结果 ===")
for key, value in result.items():
    print(f"{key}: {value}")
