import os
from typing import Annotated, TypedDict

from langchain_community.chat_models.zhipuai import ChatZhipuAI
from langchain_core.messages import BaseMessage
from langchain_core.runnables import RunnableConfig
from langchain_core.tools import tool
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import add_messages, StateGraph, END
from langgraph.prebuilt import ToolNode

os.environ["ZHIPUAI_API_KEY"] = "97738d4998b8732d707daf91a2b1c56d.2y6VKEuOlidwHDpI"


@tool
def get_weather(location: str) -> int:
    """调用该工具获取某个城市的天气温度"""
    return 20


@tool
def square(num: str) -> int:
    """调用该工具获取某个数值的平方值"""
    return int(num) * int(num)


class AgentState(TypedDict):
    """The state of the agent."""
    messages: Annotated[list[BaseMessage], add_messages]


tools = [get_weather, square]
tool_node = ToolNode(tools)
model = ChatZhipuAI(
    model="glm-4",
    temperature=0.95
).bind_tools(tools)


def call_model(state: AgentState, config: RunnableConfig):
    return {"messages": model.invoke(state["messages"], config)}


def should_continue(state: AgentState):
    last_message = state["messages"][-1]
    if not last_message.tool_calls:
        return "end"
    else:
        return "tools"


builder = StateGraph(AgentState)
builder.add_node("call_model", call_model)
builder.add_node("tool_node", tool_node)

builder.set_entry_point("call_model")
builder.add_conditional_edges("call_model", should_continue, {"end": END, "tools": "tool_node"})
builder.add_edge("tool_node", "call_model")

graph = builder.compile(checkpointer=MemorySaver())

# def print_stream(stream):
#     for s in stream:
#         message = s["messages"][-1]
#         if isinstance(message, tuple):
#             print(message)
#         else:
#             message.pretty_print()
#
#
# inputs = {"messages": [("user", "帮我查询下北京的温度，并计算温度的平方值")]}
# config = {"configurable": {"thread_id": "1"}}
# print_stream(graph.stream(inputs, config, stream_mode="values"))
