from langgraph_sdk import get_client
import asyncio

client = get_client(url="http://127.0.0.1:2024")
# client = get_client(url="http://192.168.0.101:2024")
# client = get_client(url="http://127.0.0.1:2025")

async def main():
    async for chunk in client.runs.stream(
        None,  # Threadless run
        "agent", # Name of assistant. Defined in langgraph.json.
        input={
        "messages": [{
            "role": "human",
            "content": "What is weather like today in New York City?",
            }],
        },
    ):
        print(f"Receiving new event of type: <{chunk.event}>")
        if chunk.event == "metadata":
            print(chunk)
            print("\n")

        if chunk.data:
            array = chunk.data.get("messages")
            if array:
                # print("============================================")
                # print(len(array))
                last_msg = array[-1]
                has_tools = False
                if last_msg.get("tool_calls"):
                    has_tools = True
                print(f"type: {last_msg['type']}, content: {last_msg['content']},  has-tools: {has_tools}")
                print("\n")

asyncio.run(main())
