from dataclasses import dataclass

from langchain_community.chat_models import ChatTongyi
from langgraph.graph import StateGraph, START


@dataclass
class MyState:
    topic: str
    joke: str = ""


import os
os.environ["DASHSCOPE_API_KEY"] = os.getenv("OPENAI_API_KEY")

llm = ChatTongyi(model="qwen-plus")

def call_model(state: MyState):
    """Call the LLM to generate a joke about a topic"""
    llm_response = llm.invoke(
        [
            {"role": "user", "content": f"Generate a joke about {state.topic}"}
        ]
    )
    return {"joke": llm_response.content}

graph = (
    StateGraph(MyState)
    .add_node(call_model)
    .add_edge(START, "call_model")
    .compile()
)

for message_chunk, metadata in graph.stream(
    {"topic": "ice cream"},
    stream_mode="messages",
):
    # 输出结果查看
    print(message_chunk)
    print(metadata)
