from typing import TypedDict
from langgraph.graph import StateGraph,START
from langgraph.checkpoint.memory import MemorySaver


from openai import OpenAI
model = OpenAI(
        # 若没有配置环境变量，请用百炼API Key将下行替换为：api_key="sk-xxx",
        api_key="sk-965dc39b016c49ecbe29de180f4db2b6", # 如何获取API Key：https://help.aliyun.com/zh/model-studio/developer-reference/get-api-key
        base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    )

class State(TypedDict):
    topic:str
    joke:str

def refine_topic(state:State):
    return {"topic":state['topic'] + " and cats"}

def generate_joke(state:State):
    completion = model.chat.completions.create(
        # 模型列表：https://help.aliyun.com/zh/model-studio/getting-started/models
        model="qwen-plus",  # qwen-plus 属于 qwen3 模型，如需开启思考模式，请参见：https://help.aliyun.com/zh/model-studio/deep-thinking
        messages=[
            {'role': 'system', 'content': f'人工助手'},
            {'role': 'user', 'content': f'generate a joke aboue {state["topic"]}'}
        ]
    )

    llm_response = completion.choices[0].message.content
    return {"joke":llm_response}

graph = (
    StateGraph(State)
    .add_node(refine_topic)
    .add_node(generate_joke)
    .add_edge(START,"refine_topic")
    .add_edge("refine_topic","generate_joke")
    .compile(checkpointer=MemorySaver())
)
config = {"configurable":{"thread_id":"my_thread_1"}}
for chunk in graph.stream({"topic":"ice cream"},config=config,stream_mode="updates"):
    print(chunk)

print(graph.get_state(config).values)

state_history = list(graph.get_state_history(config))
for snapshot in state_history:
    print(f"存档点 ID:"
          f"{snapshot.config['configurable']['checkpoint_id']}")