from typing import TypedDict, Annotated, Literal, Optional

from langchain_core.messages import AnyMessage
from langgraph.constants import END
from langgraph.graph import add_messages, StateGraph
from nl2sql import nl2sql_graph
from choice_api import choice_api_graph

from chat_utils import create_chat

llm = create_chat()

members = ["researcher", "chat"]
# options = members + ["None"]

class CyberState(TypedDict):
    messages: Annotated[list[AnyMessage], add_messages]
    nl2sql_messages: Annotated[list[AnyMessage], add_messages]
    api_messages: Annotated[list[AnyMessage], add_messages]
    analysis_messages: Annotated[list[AnyMessage], add_messages]
    analysis_model: Optional[str]

class Router(TypedDict):
    """Worker to route to next. If no workers needed, route to FINISH."""

    next: Literal[*members]


def router(state: CyberState) -> Router:
    system_prompt = (
        f"你是一个监督人,当用户发来一个信息,请从{members}的工作者中选出一个符合用户需求的worker"
    )
    messages = [
                   {"role": "system", "content": system_prompt},
               ] + state["messages"]
    response = llm.with_structured_output(Router).invoke(messages)
    goto = response["next"]
    return goto

def should_analysis(state: CyberState):
    if state.get("analysis_model", None):
        return "analysis"
    return "END"
# nodes
def entry_point(state: CyberState):
    return state

def nl2sql(state: CyberState):
    response = nl2sql_graph.invoke({"nl2sql_messages": state["messages"]})
    return response

def researcher(state: CyberState):
    return state

def choice_api(state: CyberState):
    response = choice_api_graph.invoke({"api_messages": state["messages"]})
    return response

def analysis(state: CyberState):
    response = choice_api_graph.invoke({"analysis_messages": state["messages"]})
    return response

def map_reduce(state: CyberState):
    return state

def call_model(state: CyberState):
    messages = state["messages"]
    response = llm.invoke(messages)
    return {"messages": [response]}

builder = StateGraph(CyberState)
builder.add_node("router", entry_point)
builder.add_node("researcher", researcher)
builder.add_node("nl2sql", nl2sql)
builder.add_node("choice_api", choice_api)
builder.add_node("analysis", analysis)
builder.add_node("map_reduce", map_reduce)
builder.add_node("chat", call_model)

builder.set_entry_point("router")
builder.add_conditional_edges("router", router,
                              {"researcher": "researcher",
                               "chat": "chat"})
builder.add_edge("chat", END)
builder.add_edge("researcher", "nl2sql")
builder.add_edge("researcher", "choice_api")
builder.add_edge("nl2sql", "map_reduce")
builder.add_edge("choice_api", "map_reduce")
builder.add_edge("map_reduce", "analysis")
builder.add_conditional_edges("map_reduce", should_analysis, {"analysis":"analysis", "END": END})
builder.add_edge("analysis", END)

# 获取三江化工和传化合成的昨日上游来水和下游来水
graph = builder.compile()
response = graph.invoke(
    {"messages": [("human", "获取三江化工和传化合成的昨日上游来水和下游来水")]},#我明天要去阿里，想买件衣服，有什么建议吗
    {"recursion_limit": 10},
    subgraphs=True
)
# print(response)
# for message in response["messages"]:
#     string_representation = f"{message.type.upper()}: {message.content}\n"
#     print(string_representation)

# for chunk in graph.stream(
#     {"messages": [("human", "获取三江化工和传化合成的昨日上游来水和下游来水")]},#我明天要去阿里，想买件衣服，有什么建议吗
#     {"recursion_limit": 10},
#     subgraphs=True):
#     print(chunk)


