from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage,AIMessage,SystemMessage

from langgraph.graph import MessagesState,START,END,StateGraph

from typing import Literal
from typing_extensions import TypedDict

api_key = "sk-6S0PtpNia71gjcfwSsDPsJ9mGqsVPr2XRQzAx1dHbJS7RW4t"
api_base="https://chatapi.littlewheat.com/v1"

llm = ChatOpenAI(model="gpt-4o",api_key=api_key,base_url=api_base,temperature=0)

class AgentState(MessagesState):
    next:str

members = ["chat","coder","sqler"]
options = members + ["FINISH"]
print(options)

class Router(TypedDict):
    next:Literal[*options]

def supervisor(state:AgentState):
    system_prompt = (
        "You are a supervisor tasked with managing a conversation between the"
        f" following workers: {members}.\n\n"
        "Each worker has a specific role:\n"
        "- chat: Responds directly to user inputs using natural language.\n"
        "- coder: Activated for tasks that require mathematical calculations or specific coding needs.\n"
        "- sqler: Used when database queries or explicit SQL generation is needed.\n\n"
        "Given the following user request, respond with the worker to act next."
        " Each worker will perform a task and respond with their results and status."
        " When finished, respond with FINISH."
    )
    messages = [{"role": "system", "content": system_prompt},] + state["messages"]

    response = llm.with_structured_output(Router).invoke(messages)
    _next = response["next"]
    if _next == "FINISH":
        return {"next":END}

    return {"next":_next}

def chat(state:AgentState):
    messages = state["messages"][-1]
    model_response = llm.invoke(messages.content)
    final_response = [HumanMessage(content=model_response.content,name="chat")] # 这里要添加名称
    return {"messages":final_response}

def coder(state:AgentState):
    messages = state["messages"][-1]
    model_response = llm.invoke(messages.content)
    final_response = [HumanMessage(content=model_response.content,name="coder")]
    return {"messages":final_response}

def sqler(state:AgentState):
    messages = state["messages"][-1]
    model_response = llm.invoke(messages.content)
    final_response = [HumanMessage(content=model_response.content,name="sqler")]
    return {"messages":final_response}

builder = StateGraph(AgentState)

builder.add_node("supervisor",supervisor)
builder.add_node("chat",chat)
builder.add_node("coder",coder)
builder.add_node("sqler",sqler)

for member in members:
    # 我们希望我们的工人在完成工作后总是向主管“汇报”
    builder.add_edge(member,"supervisor")

builder.add_conditional_edges("supervisor",lambda state:state["next"])
builder.add_edge(START,"supervisor")

graph=builder.compile()

for chunk in graph.stream({"messages": "什么是A2A"},stream_mode="values"):
    print(chunk)
