import os

from agents import set_default_openai_client, OpenAIChatCompletionsModel, Agent, Runner, set_tracing_disabled
from agents.mcp import MCPServerStdio
from dotenv import load_dotenv
from openai import AsyncOpenAI
import asyncio

#成功
load_dotenv(override=True)

os.environ["OPENAI_API_KEY"] = os.getenv("LLM_API_KEY")
os.environ["OPENAI_BASE_URL"] = os.getenv("LLM_BASE_URL")
#
# # 可选，设置代理
# os.environ['HTTP_PROXY'] = 'http://127.0.0.1:10809'
# os.environ['HTTPS_PROXY'] = 'http://127.0.0.1:10809'
#智勇体定义
client = AsyncOpenAI(api_key=os.getenv("LLM_API_KEY"), base_url=os.getenv("LLM_BASE_URL"))
set_default_openai_client(client)
set_tracing_disabled(True)
deepseek_model = OpenAIChatCompletionsModel(
    openai_client=client,
    model=os.getenv("LLM_MODEL")
)

async def run(baiduMapServer:MCPServerStdio,user_input:str):
    agent = Agent(name="Tavily Search Agent",instructions=f"你是一搜索助手，你会根据用户的搜索要求对搜索词进行扩展，以免搜索到更完整的相关内容。",
                  model=deepseek_model,mcp_servers=[baiduMapServer])
    message = user_input
    print("Running:",message)
    result = await Runner.run(agent,message)
    print(result.final_output)
async def mcp_run():
    async with MCPServerStdio(name="Tavily Search Server",cache_tools_list=True,client_session_timeout_seconds=60,
                              params = {
                                  "command": "cmd",
                                  "args": ["/c","npx","-y","tavily-mcp@0.1.4"],
                                  "env": {
                                    "TAVILY_API_KEY": "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",
                                  }
                                }) as server:
                              #params={"command": "uvenv", "args": ["run","mcp-server-git"]}) as server:
        await run(server,"帮我搜索一下最近的多智能体项目")

if __name__ == "__main__":
    asyncio.run(mcp_run())