import json
import asyncio
import os
from pathlib import Path

from dotenv import load_dotenv
from langchain_deepseek import ChatDeepSeek
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain.agents import initialize_agent, AgentType
from langchain_openai import ChatOpenAI   # 或任意 LLM

CONFIG_FILE = Path("weather_mcp_config.json")

async def main():
    cfg = json.loads(CONFIG_FILE.read_text(encoding="utf-8"))

    # 1. 建立连接
    async with MultiServerMCPClient(cfg["mcpServers"]) as client:
        tools = await client.get_tools()          # 自动把 MCP 工具转成 LangChain Tool

        # 2. 挂到 Agent
        # 读取env配置信息
        load_dotenv(override=True)
        deepseek_api_key = os.getenv("DEEPSEEK_API_KEY")

        # 初始化 deepseek
        pop_llm = ChatDeepSeek(
            model="deepseek-chat",
            temperature=0,
            max_tokens=None,
            timeout=None,
            max_retries=2,
            api_key=deepseek_api_key,
        )
        agent = initialize_agent(
            tools=tools,
            llm=pop_llm,
            agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,
            verbose=True,
        )

        # 3. 使用
        ans = await agent.arun("北京现在天气如何？")
        print(ans)

if __name__ == "__main__":
    asyncio.run(main())