import uuid
from typing import Any, Annotated
from typing_extensions import TypedDict, NotRequired
from langgraph.graph import StateGraph, START, END
# from langchain.chat_models import init_chat_model
from langgraph.checkpoint.memory import InMemorySaver
from langchain_core.runnables import RunnableConfig
from langgraph.runtime import Runtime
from dataclasses import dataclass
from langchain_core.tools import tool
from langgraph.runtime import get_runtime
from langgraph.prebuilt import ToolNode
from langchain_core.messages import AnyMessage
from langgraph.graph import add_messages

@tool
def add(a: int, b: int) -> int:
    """计算两个数字相加的和"""
    runtime = get_runtime(ContextSchema)
    user_name = runtime.context["user_name"]
    print(f"invoke tool for {user_name}")
    return a + b

@dataclass
class ContextSchema:
    user_name: str

class State(TypedDict):
    topic: NotRequired[str]
    joke: NotRequired[str]
    tool_calls: Any
    messages: Annotated[list[AnyMessage], add_messages]


# llm = init_chat_model(
#     "anthropic:claude-3-7-sonnet-latest",
#     temperature=0,
# )
from langchain_ollama import ChatOllama
llm = ChatOllama(model="qwen3:8b", temperature=0.5, reasoning=False)

tool_node = ToolNode([add])
model_with_tools = llm.bind_tools([add])

def generate_topic(state: State, config: RunnableConfig, runtime: Runtime[ContextSchema]):
    """通过调用大语言模型(LLM)来生成一个适合作为笑话的主题"""
    # print(f"{runtime.context}")
    user_name = runtime.context["user_name"]
    print(f"Generating topic for {user_name}")
    # for key in config.keys():
    #     print(f"- {key}")
    # msg = llm.invoke("给我一个20字以内的、有趣的笑话主题")
    msg = llm.invoke("生成一首10以内的数学加法题，格式为：a+b=?，其中a和b都是小于10的整数。")
    return {"topic": msg.content, "messages": [msg]}


def write_joke(state: State, runtime: Runtime[ContextSchema]):
    """调用大语言模型(LLM)根据主题编写笑话"""
    user_name = runtime.context["user_name"]
    print(f"Generating joke for {user_name}")
    messages = state["messages"]
    last_message = messages[-1]
    # print(f"last message: {last_message.content}, {last_message.type}")
    calculate_result = ""
    if last_message.type == "tool":
        calculate_result = f"计算结果为：{last_message.content}。"
    # msg = llm.invoke(f"用30个字以内写一个关于{state['topic']}的简短笑话")
    msg = model_with_tools.invoke(f"计算两个数字的和，题目是：{state['topic']}。{calculate_result} 如果已经得到结果，则跳过计算过程直接总结并输出计算结果。")
    # print(f"msg:{msg}")
    return {"joke": msg.content, "messages": [msg]}

def should_continue(state: State):
    messages = state["messages"]
    last_message = messages[-1] # 获取列表最后一个元素的写法
    if last_message.tool_calls:
    # if state["tool_calls"]:
        return "tools"
    return END

# Build workflow
workflow = StateGraph(State)

# Add nodes
workflow.add_node("generate_topic", generate_topic)
workflow.add_node("write_joke", write_joke)
workflow.add_node("tools", tool_node)

# Add edges to connect nodes
workflow.add_edge(START, "generate_topic")
workflow.add_edge("generate_topic", "write_joke")
workflow.add_conditional_edges("write_joke", should_continue, ["tools", END])
workflow.add_edge("tools", "write_joke")

# workflow.add_edge("generate_topic", "write_joke")
# workflow.add_edge("write_joke", END)

# Compile
checkpointer = InMemorySaver()
graph = workflow.compile(checkpointer=checkpointer)
graph

# Run workflow and print results
config = {
    "configurable": {
        "thread_id": uuid.uuid4(),
    }
}
state = graph.invoke(input={}, config=config, context={"user_name": "John Smith"})

print("--- topic output ---")
# print(state)
print(state["topic"])
print(state["messages"][-1].content)
print("--- Joke output ---")
# print(state["joke"])

# Get history of states for the run
print("--- graph state history ---")
states = list(graph.get_state_history(config))
for state in states:
    # print(state.next, state.config["configurable"]["checkpoint_id"], state.values)
    print(state.next, state.config["configurable"]["checkpoint_id"])

# This is the state before last (states are listed in chronological order)
print("--- selected state ---")
selected_state = states[1]
# print(selected_state.next, selected_state.values)
print(selected_state.next, selected_state.config["configurable"]["checkpoint_id"])

# Update state with new values and re-run the workflow from that point onwards
# new_config = graph.update_state(selected_state.config, values={"topic": "为周末总在下雨"})
new_config = graph.update_state(selected_state.config, values={"topic": "9+8=?"})
print("--- updated state ---")
print(new_config)

# Re-run the workflow from the selected state onwards
result = graph.invoke(input=None, config=new_config, context={"user_name": "hhwang"})
print("--- updated state output ---")
# print(result)
print(result["messages"][-1].content)
