from langgraph_sdk import get_client
import asyncio
from langgraph.types import Command

client = get_client(url="http://127.0.0.1:2024")
# client = get_client(url="http://192.168.0.101:2024")
# client = get_client(url="http://127.0.0.1:2025")

async def main():
    # create a thread
    thread = await client.threads.create()
    thread_id = thread["thread_id"]

    async for chunk in client.runs.stream(
        thread_id,  # Threadless run
        "human", # Name of assistant. Defined in langgraph.json.
        input={
        "messages": [{
            "role": "human",
            "content": "book a stay at McKittrick hotel",
            }],
        },
    ):
        # print(f"Receiving new event of type: <{chunk.event}>")
        # print(chunk)
        # print("\n")
        if chunk.event == "metadata":
            print(chunk)
            print("\n")
        else:
            if chunk.data:
                array = chunk.data.get("messages")
                if array:
                    last_msg = array[-1]
                    print(last_msg)
                    has_tools = False
                    if last_msg.get("tool_calls"):
                        has_tools = True
                    print(f"type: {last_msg['type']}, content: {last_msg['content']},  has-tools: {has_tools}")
                    print("\n")
    
    print("==================================================================================")

    async for chunk in client.runs.stream(
        thread_id,  # Threadless run
        "human", # Name of assistant. Defined in langgraph.json.
        command=Command(resume=[{"type": "accept"}])
    ):
        print(chunk)
        print("\n")

asyncio.run(main())
