import os

from agents import set_default_openai_client, OpenAIChatCompletionsModel, Agent, Runner, set_tracing_disabled
from agents.mcp import MCPServerStdio
from dotenv import load_dotenv
from openai import AsyncOpenAI
import asyncio

#失败
load_dotenv(override=True)

os.environ["OPENAI_API_KEY"] = os.getenv("LLM_API_KEY")
os.environ["OPENAI_BASE_URL"] = os.getenv("LLM_BASE_URL")

# 可选，设置代理
# os.environ['HTTP_PROXY'] = 'http://127.0.0.1:10809'
# os.environ['HTTPS_PROXY'] = 'http://127.0.0.1:10809'
#智勇体定义
client = AsyncOpenAI(api_key=os.getenv("LLM_API_KEY"), base_url=os.getenv("LLM_BASE_URL"))
set_default_openai_client(client)
set_tracing_disabled(True)
deepseek_model = OpenAIChatCompletionsModel(
    openai_client=client,
    model=os.getenv("LLM_MODEL")
)

async def run(weatherMCPServer:MCPServerStdio):
    agent = Agent(name="Weather Agent",instructions=f"Answer questions about the git repository",
                  model=deepseek_model,mcp_servers=[weatherMCPServer])
    message = "帮我搜索一下，我在GitHub中有哪些仓库，我在github中的账户名是：zhaoganglxh"
    print("Running:",message)
    result = await Runner.run(agent,message)
    print(result.final_output)
async def mcp_run():
    async with MCPServerStdio(name="Weather Server",cache_tools_list=True,
                              params = {"command":"cmd","args":["/c","npx","-y","@modelcontextprotocol/server-github"],"env": {
                                    "GITHUB_PERSONAL_ACCESS_TOKEN": "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",
                                  }},client_session_timeout_seconds=120) as server:
        await run(server)

if __name__ == "__main__":
    asyncio.run(mcp_run())