from dotenv import load_dotenv
from langgraph.checkpoint.memory import MemorySaver, logger
from langgraph.graph import StateGraph, MessagesState, START, END

from com.wp.langGraph.utils import save_graph_visualization

# 加载环境变量
load_dotenv()

from langchain.chat_models import init_chat_model

llm = init_chat_model(
    model="deepseek-chat",
    temperature=0,
    model_provider="deepseek",
    # deepseek_api_key=os.getenv('DEEPSEEK_API_KEY'),
)

# 创建 Graph
# MessagesState 是一个 State 内置对象，add_messages 是内置的一个方法，将新的消息列表追加在原列表后面
graph_builder = StateGraph(MessagesState)


# 定义一个执行节点
# 输入是 State，输出是系统回复
def chatbot(state: MessagesState):
    # 调用大模型，并返回消息（列表）
    # 返回值会触发状态更新 add_messages
    return {"messages": [llm.invoke(state["messages"])]}


graph_builder.add_node("chatbot", chatbot)
graph_builder.add_edge(START, "chatbot")
graph_builder.add_edge("chatbot", END)

# 为了添加持久性，我们需要在编译图表时传递检查点。使用 MemorySaver 就可以记住以前的消息！
graph = graph_builder.compile(checkpointer=MemorySaver())


# 保存状态图的可视化表示
save_graph_visualization(graph)

config = {"configurable": {"thread_id": "1"}}
input_message = {"role": "user", "content": "hi! I'm kevin"}
for chunk in graph.stream({"messages": [input_message]}, config, stream_mode="values"):
    chunk["messages"][-1].pretty_print()
print("==================")
input_message = {"role": "user", "content": "what's my name?"}
for chunk in graph.stream({"messages": [input_message]}, config, stream_mode="values"):
    chunk["messages"][-1].pretty_print()
print("==================")
input_message = {"role": "user", "content": "what's my name?"}
for chunk in graph.stream(
        {"messages": [input_message]},
        {"configurable": {"thread_id": "2"}},
        stream_mode="values",
):
    chunk["messages"][-1].pretty_print()
