from langgraph_sdk import get_client
import asyncio

client = get_client(url="http://127.0.0.1:2024")
# client = get_client(url="http://192.168.0.101:2024")
# client = get_client(url="http://127.0.0.1:2025")

async def main():
    thread = await client.threads.create()
    thread_id = thread["thread_id"]
    print("Thread ID:", thread_id)  # Threadless run

    async for chunk in client.runs.stream(
        thread_id,  # Threadless run
        "review2", # Name of assistant. Defined in langgraph.json.
        input={
        "messages": [{
            "role": "human",
            "content": "book a stay at McKittrick hotel",
            }],
        },
    ):
        # print(f"Receiving new event of type: <{chunk.event}>")
        # print(chunk)
        # print("\n")
        # if chunk.event == "metadata":
        #     print(chunk)
        #     print("\n")

        if chunk.data:
            array = chunk.data.get("messages")
            if array:
                print("============================================")
        #         # print(len(array))
                last_msg = array[-1]
                print(last_msg)
        #         has_tools = False
        #         if last_msg.get("tool_calls"):
        #             has_tools = True
        #         print(f"type: {last_msg['type']}, content: {last_msg['content']},  has-tools: {has_tools}")
        #         print("\n")

asyncio.run(main())
