import json
import asyncio
import os
from pathlib import Path

from dotenv import load_dotenv
from langchain_deepseek import ChatDeepSeek
from langchain_mcp import MCPAdapter      # 负责把 MCP 服务转成工具
from langchain.agents import initialize_agent, AgentType
from langchain.chat_models import ChatOpenAI   # 任意 LLM 均可

CONFIG_FILE = Path("weather_mcp_config.json")    # 放你的那段 JSON

async def main():
    # 1. 读配置
    cfg = json.loads(CONFIG_FILE.read_text(encoding="utf-8"))
    weather_server_cfg = cfg["mcpServers"]["weather"]

    # 2. 建立 SSE 连接并拿到工具列表
    async with MCPAdapter(
        server_params={
            "transport": weather_server_cfg["transport"],   # "sse"
            "url": weather_server_cfg["url"].strip(),       # "http://127.0.0.1:8000/sse"
        }
    ) as adapter:
        tools = await adapter.aload_tools()   # 此时已经变成 LangChain Tool 对象列表

        # 3. 挂到 Agent 里就能用
        # 读取env配置信息
        load_dotenv(override=True)
        deepseek_api_key = os.getenv("DEEPSEEK_API_KEY")

        # 初始化 deepseek
        pop_llm = ChatDeepSeek(
            model="deepseek-chat",
            temperature=0,
            max_tokens=None,
            timeout=None,
            max_retries=2,
            api_key=deepseek_api_key,
        )
        agent = initialize_agent(
            tools=tools,
            llm=pop_llm,
            agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,
            verbose=True,
        )

        # 4. 随便问一句
        ans = await agent.arun("请问北京现在天气怎么样？")
        print(ans)

if __name__ == "__main__":
    asyncio.run(main())