from langdev_helper.llm.lcex import llm_lcex as llm
from tool import tools, sense_tools, CmdNode, BasicToolNode, cmd_condition, State, route_tools


from typing import Annotated
from typing_extensions import TypedDict

from langgraph.graph import StateGraph, START, END
from langgraph.prebuilt import ToolNode, tools_condition
from langgraph.checkpoint.memory import MemorySaver

CMD = 'CMD'
AGENT = 'AGENT'

graph_builder = StateGraph(State)
llm_with_tools = llm.bind_tools(tools)

def chatbot(state: State):
    return {"messages": [llm_with_tools.invoke(state["messages"])]}
# node
graph_builder.add_node(AGENT, chatbot)
graph_builder.add_node(CMD, CmdNode())

tool_node = BasicToolNode(tools=tools)
sense_tool_node = BasicToolNode(tools=sense_tools)
# tool_node = ToolNode(tools=tools)
graph_builder.add_node("tools", tool_node)
# graph_builder.add_node("sense_tools", sense_tool_node)

# edge
graph_builder.add_conditional_edges(AGENT, tools_condition)
# graph_builder.add_conditional_edges(
#     AGENT, route_tools, ["tools", "sense_tools", END]
# )
# graph_builder.add_conditional_edges(AGENT, sense_tools_condition)

graph_builder.add_edge("tools", AGENT)
# graph_builder.add_edge("sense_tools", AGENT)
graph_builder.add_edge(START, CMD)

graph_builder.add_conditional_edges(CMD, cmd_condition)
# graph_builder.add_edge(CMD, "chatbot")

memory = MemorySaver()
graph = graph_builder.compile(
    checkpointer=memory,
    # interrupt_before=["tools"],
)