from typing import TypedDict,Annotated
from operator import add
from langchain_core.messages import AnyMessage,HumanMessage
from langgraph.graph import StateGraph  
from langgraph.config import get_stream_writer
from langgraph.constants import START, END
from langgraph.checkpoint.memory import InMemorySaver
from langchain.chat_models import init_chat_model
import asyncio
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
llm =init_chat_model("deepseek-chat",model_provider="DeepSeek",base_url="https://api.deepseek.com1",api_key="sk-1074763df8f14b378cda21513e24f839")

nodes=["supervisor","travel","joke","other"]

class State(TypedDict):
    messages:Annotated[list[AnyMessage],add]
    type:str

def other_node(state:State):
    print(">>>other_node")
    writer=get_stream_writer()
    writer({"node":">>>>other_node"})

    return{"messages":[HumanMessage(content="我暂时无法回答这个问题")],"type":"other"}

def supervisor_node(state:State):
    print(">>>supervisor_node")
    writer=get_stream_writer()
    writer({"node":">>>>supervisor_node"})
    #根据用户问题，对问题进行分类。分类结果保存到type中
    prompt="""
    你是一个专业的客服助手，你的任务是对用户问题进行分类，并将任务分给其他Agent执行。
        如果用户问题是和旅游路线相关的，那就返回 travel 。
        如果用户问题是和笑话相关的，那就返回 joke 。
        如果用户问题是和对联相关的，那就返回 couplet 。
        如果是其他问题，那就返回 other 。
        除了这几个选项外不要返回任何内容。
    """
    prompts=[
        {"role":"system","content":prompt},
        {"role":"user","content":state["messages"][0]}
    ]
    #如果有type，说明问题交给其他节点处理完成后返回了，可以直接返回
    if "type" in state:
        writer({"supervisor_step",f"已获得{state['type']}智能体返回结果"})
        return {"type":END}
    else:
        response=llm.invoke(prompts)
        typeRes=response.content
        writer({"supervisor_step",f"问题分类结果为{typeRes}"})
        if typeRes in nodes:
            return {"type":typeRes}#如果大模型给的结果不对，直接抛出异常
        else:
            raise ValueError(f"大模型给的结果不在节点列表中")

def travel_node(state:State):
    print(">>>travel_node")
    writer=get_stream_writer()
    writer({"node":">>>>travel_node"})
    systemPrompt="你是一个专业的旅行规划助手，根据用户问题生成旅游路径规划，请用中文回答，并返回一个不超过100字的结果"
    prompts=[
        {"role":"system","content":systemPrompt},
        {"role":"user","content":state["messages"][0]}
    ]
    #高德MCP配置信息
    client=MultiServerMCPClient(
        {
            "amap-maps": {
                "command": "npx",
                "args": [
                    "-y",
                    "@amap/amap-maps-mcp-server"
                ],
                "env": {
                    "AMAP_MAPS_API_KEY": "ae77f4ecc3aa387ca59c8b1d08faa796"
                },
                "transport":"stdio"
            }
        }
    )
    tools=asyncio.run(client.get_tools())#异步运行此地的异步方法，从而异步方法同步实现
    agent=create_react_agent(
        model=llm,
        tools=tools
    )
    response=asyncio.run(agent.ainvoke({
        "messages":prompts
    }))
    writer({"travel_result":response["messages"][-1].content})
    return{"messages":[HumanMessage(content=response["messages"][-1].content)],"type":"travel"}

def joke_node(state:State):
    print(">>>joke_node")
    writer=get_stream_writer()
    writer({"node":">>>>joke_node"})

    systemPrompt="你是一个笑话大师，根据用户问题写一个不超过100字的笑话。"
    prompts=[
        {"role":"system","content":systemPrompt},
        {"role":"user","content":state["messages"][0]}
    ]
    response=llm.invoke(prompts)
    writer({"joke_result":response.content})
    return{"messages":[HumanMessage(content=response.content)],"type":"joke"}




#条件路由
def routing_func(state:State):
    if state["type"]=="travel":
        return"travel_node"
    elif state["type"]=="joke":
        return"joke_node"
    elif state["type"]==END:
        return END
    else:
        return"other_node"

#构建图
builder=StateGraph(State)
#添加node
builder.add_node("supervisor_node",supervisor_node)
builder.add_node("travel_node",travel_node)
builder.add_node("joke_node",joke_node)
builder.add_node("other_node",other_node)
#添加edge
builder.add_edge(START,"supervisor_node")
builder.add_conditional_edges("supervisor_node",routing_func,["travel_node","joke_node","other_node",END])
builder.add_edge("travel_node","supervisor_node")
builder.add_edge("joke_node","supervisor_node")
builder.add_edge("other_node","supervisor_node")

#构建Graph
checkpointer=InMemorySaver()
graph=builder.compile(checkpointer=checkpointer)

#执行任务的测试代码
if __name__=="__main__":
    config={
        "configurable":{
            "thread_id":"1"
        }
    }
    for chunk in graph.stream({"messages":["规划一条从长沙到北京的旅游路线"]}
                 ,config
                 ,stream_mode="custom"):
        print(chunk)
    # res=graph.invoke({"messages":["今天天气怎么样"]}
    #                  ,config
    #                   ,stream_mode="values")
    # print(res["messages"][-1].content)
