import asyncio
from typing import TypedDict

from langchain_community.chat_models import ChatTongyi
from langgraph.graph import START, StateGraph


import os
os.environ["DASHSCOPE_API_KEY"] = os.getenv("OPENAI_API_KEY")

llm = ChatTongyi(model="qwen-plus")

class State(TypedDict):
    topic: str
    joke: str

async def call_model(state, config):
    topic = state["topic"]
    print("Generating joke...")
    joke_response = await llm.ainvoke(
        [{"role": "user", "content": f"Write a joke about {topic}"}],
        config,
    )
    return {"joke": joke_response.content}

graph = (
    StateGraph(State)
    .add_node(call_model)
    .add_edge(START, "call_model")
    .compile()
)

async def main():
    async for chunk, metadata in graph.astream(
        {"topic": "ice cream"},
        stream_mode="messages",
    ):
        if chunk.content:
            print(chunk.content, end="|", flush=True)

if __name__ == '__main__':
    asyncio.run(main())