import os

from agents import set_default_openai_client, OpenAIChatCompletionsModel, Agent, Runner, set_tracing_disabled
from agents.mcp import MCPServerStdio
from dotenv import load_dotenv
from openai import AsyncOpenAI
import asyncio

#成功
load_dotenv(override=True)

os.environ["OPENAI_API_KEY"] = os.getenv("LLM_API_KEY")
os.environ["OPENAI_BASE_URL"] = os.getenv("LLM_BASE_URL")
#智勇体定义
client = AsyncOpenAI(api_key=os.getenv("LLM_API_KEY"), base_url=os.getenv("LLM_BASE_URL"))
set_default_openai_client(client)
set_tracing_disabled(True)

deepseek_model = OpenAIChatCompletionsModel(
    openai_client=client,
    model=os.getenv("LLM_MODEL")
)

async def run(weatherMCPServer:MCPServerStdio):
    agent = Agent(name="Weather Agent",instructions="""你是一个天气助手，你可以回答用户的天气相关问题。""",
                  model=deepseek_model,mcp_servers=[weatherMCPServer],trace=True)
    message = "请查询北京的今日天气"
    print("Running:",message)
    result = await Runner.run(agent,message)
    print(result.final_output)
async def mcp_run():
    async with MCPServerStdio(name="Weather Server",cache_tools_list=True,
                              params = {"command":"uv","args":["run","D:/sources/AI Program/MCPProjects/JiuTian/WeatherInfo/weather_server/src/weahter_server.py"]}) as server:
        await run(server)

if __name__ == "__main__":
    asyncio.run(mcp_run())