import os
from typing import TypedDict, Literal

from typing_extensions import Annotated
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode
from langgraph.graph import START, END, StateGraph
from langchain_community.chat_models.zhipuai import ChatZhipuAI
from langchain_core.tools import tool
from langchain_core.runnables import RunnableConfig
from langchain_core.messages import AIMessageChunk, HumanMessage

os.environ["ZHIPUAI_API_KEY"] = "97738d4998b8732d707daf91a2b1c56d.2y6VKEuOlidwHDpI"

class State(TypedDict):
    messages: Annotated[list, add_messages]

@tool
def search(query: str):
    """Call to surf the web."""
    return ["Cloudy with a chance of hail."]

tools = [search]

tool_node = ToolNode(tools)

model = ChatZhipuAI(
    model="glm-4",
    temperature=0.95
).bind_tools(tools)

def should_continue(state: State):
    last_message = state["messages"][-1]
    if not last_message.tool_calls:
        return END
    else:
        return "tools"

async def call_model(state: State, config: RunnableConfig):
    return {"messages": [model.ainvoke(state["messages"], config)]}

workflow = StateGraph(State)

workflow.add_node("agent", call_model)
workflow.add_node("tools", tool_node)

workflow.add_edge(START, "agent")
workflow.add_conditional_edges(
    "agent",
    should_continue,
    ["tools", END],
)
workflow.add_edge("tools", "agent")

app = workflow.compile()

inputs = [HumanMessage(content="what is the weather in sf")]
first = True
config = {"configurable": {"thread_id": "1"}}
async for msg, metadata in app.astream({"messages": inputs}, config, stream_mode="messages"):
    if msg.content and not isinstance(msg, HumanMessage):
        print(msg.content, end="|", flush=True)

    if isinstance(msg, AIMessageChunk):
        if first:
            gathered = msg
            first = False
        else:
            gathered = gathered + msg

        if msg.tool_call_chunks:
            print(gathered.tool_calls)