from typing import TypedDict
from langgraph.graph import StateGraph, START, END
from langchain_core.runnables import RunnableConfig
import json
from uuid import UUID
import uuid
from langgraph.runtime import Runtime
from langgraph.typing import ContextT

class State(TypedDict):
  topic: str
  joke: str

def refine_topic(state: State, config: RunnableConfig):
    # context = get_context()
    # run_id = context.run_id
    # print(config)
    # print(json.dumps(config, indent=2, ensure_ascii=False)) 
    # run_id = config["configurable"].get("user_id")
    # run_id = config["run_id"]
    # print(f"refine_topic.run_id: {run_id}")
    for key in config.keys():
        print(f"- {key}")
        # print(f"- {config[key]}")
    # print(config["configurable"].get("user_id"), config["configurable"].get("run_id"))
    # print(config["configurable"].get("user_id"), config["run_id"])
    return {"topic": state["topic"] + " and cats"}


def generate_joke(state: State, runtime: Runtime[ContextT]):
    print(runtime)
    r = runtime.context.get("r", 1.0)
    print(f"r: {r}")
    # context = get_context()
    # run_id = context.run_id
    # run_id = config["configurable"].get("user_id")
    # run_id = config["run_id"]
    # print(config["configurable"].get("user_id"), config["configurable"].get("run_id"))
    # print(f"generate_joke.run_id: {run_id}")
    for key in config.keys():
        print(f"- {key}")
        # print(f"- {config[key]}")
    return {"joke": f"This is a joke about {state['topic']}"}

graph = (
  StateGraph(State)
  .add_node(refine_topic)
  .add_node(generate_joke)
  .add_edge(START, "refine_topic")
  .add_edge("refine_topic", "generate_joke")
  .add_edge("generate_joke", END)
  .compile()
)


# Generate a random UUID -- it must be a UUID
# config = {"run_id": uuid.uuid4(), "configurable": {"user_id": "user_123", "run_id": str(uuid.uuid4())}} 
config = {
    "run_id": uuid.uuid4(), 
    # "configurable": {"user_id": "user_123"}
}
print(config)
# Use this to stream only the state updates returned by the nodes after each step. The streamed outputs include the name of the node as well as the update.
for chunk in graph.stream(
    {"topic": "ice cream"},
    config=config,
    context={"r": 3.0},
    stream_mode="updates",
    # stream_mode="values",
):
    print("-----------------------")
    print(chunk)

# # Use this to stream the full state of the graph after each step.
# for chunk in graph.stream(
#     {"topic": "ice cream"},
#     stream_mode="values",
# ):
#     print(chunk)