import os
from dotenv import load_dotenv
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain.chat_models import init_chat_model
from langchain.agents import create_openai_tools_agent
from langchain.agents import AgentExecutor
from langchain import hub
import asyncio

# Langchain agent 接入Mcp Server作为tool, 需先安装pip3 install langchain_mcp_adapters
# 调用Mcp Server是一个异步过程，所以整体也必须封装为异步
async def test_agent_with_mcp():
    # 字典类型用于定义mcp server连接信息，支持吃多个mcp server
    # 请先启动mcp_server.py
    mcp_connection_config ={
        "my-mcp": {
            "url": "http://localhost:8080/mcp",
            "transport": "streamable_http",
        }
    }
    mcp_client = MultiServerMCPClient(mcp_connection_config)
    # 获取所有mcp server的tools，异步过程需要await
    tools = await mcp_client.get_tools()
    print(f"!!! MCP available tools: {tools}")
    model = init_chat_model(model="deepseek-chat", model_provider="deepseek")
    model.bind_tools(tools)
    # 通过 LangChain Hub 拉取提示词模版
    prompt = hub.pull("hwchase17/openai-tools-agent")
    agent = create_openai_tools_agent(model, tools, prompt)
    # 使用agent executor来执行agent, verbose=True会打印中间过程, return_intermediate_steps=True会返回中间步骤，可在response['intermediate_steps']中查看
    agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, return_intermediate_steps=True)
    # 调用agent_executor是一个异步过程，需要await，并使用ainvoke方法
    response =  await agent_executor.ainvoke({"input":"成都和上海的天气怎么样，哪里更热，并将结果保存下来"})
    print(f"!!! Langchain agent with MCP tools, response:{response}, output:{response['output']}, \n intermediate_steps:{response['intermediate_steps']}")

if __name__ == '__main__':
    load_dotenv(override=True)
    DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
    print(f"DEEPSEEK_API_KEY: {DEEPSEEK_API_KEY}")

    asyncio.run(test_agent_with_mcp())









