import os
from langchain_community.chat_models.zhipuai import ChatZhipuAI
from langchain_core.tools import tool

from langgraph.graph import StateGraph, MessagesState, START, END
from langgraph.checkpoint.memory import MemorySaver
from langgraph.prebuilt import ToolNode

os.environ["ZHIPUAI_API_KEY"] = "97738d4998b8732d707daf91a2b1c56d.2y6VKEuOlidwHDpI"

@tool
def play_song_on_spotify(song: str):
    """Play a song on Spotify"""
    return f"Successfully played {song} on Spotify!"

def play_song_on_apple(song: str):
    """Play a song on Apple Music"""
    return f"Successfully played {song} on Apple Music!"

tools = [play_song_on_spotify, play_song_on_apple]
tool_node = ToolNode(tools)

model = ChatZhipuAI(
    model="glm-4",
    temperature=0.95
).bind_tools(tools)

def should_continue(state: MessagesState):
    last_message = state["messages"][-1]
    if not last_message.tool_calls:
        return "end"
    else:
        return "continue"

def call_model(state: MessagesState):
    return {"messages": [model.invoke(state["messages"])]}

builder = StateGraph(MessagesState)
builder.add_node("call_model", call_model)
builder.add_node("tool_node", tool_node)

builder.add_edge(START, "call_model")
builder.add_conditional_edges("call_model", should_continue, {"end": END, "continue": "tool_node"})
builder.add_edge("tool_node", "call_model")

graph = builder.compile(checkpointer=MemorySaver())


from langchain_core.messages import HumanMessage

config = {"configurable": {"thread_id": "1"}}
input_message = HumanMessage(content="Can you play Taylor Swift's most popular song?")
for event in graph.stream({"messages": [input_message]}, config, stream_mode="values"):
    event["messages"][-1].pretty_print()

print(graph.get_state(config).values["messages"])
for state in graph.get_state_history(config):
    print(state)