import os
from IPython.display import Image, display
from langchain_core.tools import tool
from langchain_core.messages import HumanMessage, ToolMessage
from langgraph.graph import START, END, StateGraph, MessagesState
from langgraph.prebuilt import ToolNode
from langgraph.checkpoint.memory import MemorySaver
from langchain_community.chat_models.zhipuai import ChatZhipuAI
from pydantic import BaseModel

os.environ["ZHIPUAI_API_KEY"] = "97738d4998b8732d707daf91a2b1c56d.2y6VKEuOlidwHDpI"

@tool
def search(query: str):
    """Call to surf the web."""
    return f"I looked up: {query}. Result: It's sunny in San Francisco, but you better look out if you're a Gemini 😈."

tools = [search]
tool_node = ToolNode(tools)

class AskHuman(BaseModel):
    """Ask the human a question"""
    question: str

model = ChatZhipuAI(
    model="glm-4",
    temperature=0.95
).bind_tools(tools + [AskHuman])

def should_continue(state: MessagesState):
    messages = state["messages"]
    last_message = messages[-1]
    if not last_message.tool_calls:
        return "end"
    elif last_message.tool_calls[0]["name"] == "AskHuman":
        return "ask_human"
    else:
        return "continue"

def call_model(state: MessagesState):
    response = model.invoke(state["messages"])
    return {"messages": [response]}

def ask_human(state: MessagesState):
    pass

builder = StateGraph(MessagesState)
builder.add_node("call_model", call_model)
builder.add_node("ask_human", ask_human)
builder.add_node("tool_node", tool_node)

builder.add_edge(START, "call_model")
builder.add_conditional_edges(
    "call_model",
    should_continue,
    {
        "end": END,
        "ask_human": "ask_human",
        "continue": "tool_node"
    }
)
builder.add_edge("ask_human", "call_model")
builder.add_edge("tool_node", "call_model")

graph = builder.compile(checkpointer=MemorySaver(), interrupt_before=["ask_human"])

# display(Image(graph.get_graph().draw_mermaid_png()))

config = {"configurable": {"thread_id": "2"}}
input_message = HumanMessage(
    content="First ask the user where they are, then use the search tool to look up the weather there"
)
for event in graph.stream({"messages": [input_message]}, config, stream_mode="values"):
    event["messages"][-1].pretty_print()

tool_call_id = graph.get_state(config).values["messages"][-1].tool_calls[0]["id"]
tool_message = ToolMessage(
    content="san francisco", tool_call_id=tool_call_id
)
graph.update_state(config, {"messages": [tool_message]}, as_node="ask_human")
# print(graph.get_state(config).values)
for event in graph.stream(None, config, stream_mode="values"):
    event["messages"][-1].pretty_print()