from langgraph_sdk import get_client
import asyncio

client = get_client(url="http://localhost:2024")


async def main():
    async for chunk in client.runs.stream(
            None,  # Threadless run
            "agent",  # Name of assistant. Defined in langgraph.json
            stream_mode="messages-tuple",
            config={"configurable": {
                "username": "alm"
            }},
            input={
                "messages": [
                    {
                        "role": "human",
                        "content": "请为我说一句祝福语，其中需要含有当前的username"
                    }
                ]
            }
    ):
        try:
            type = chunk.data[0].get("type")
            if type == "AIMessageChunk":
                print(chunk.data[0].get("content"), end="", flush=True)
        except:
            print("")


asyncio.run(main())
