import os

from agents import set_default_openai_client, OpenAIChatCompletionsModel, Agent, Runner, ModelBehaviorError, \
    MessageOutputItem, ItemHelpers, HandoffOutputItem, ToolCallItem, ToolCallOutputItem, set_tracing_disabled
from agents.mcp import MCPServerStdio
from dotenv import load_dotenv
from openai import AsyncOpenAI
import asyncio

#成功
load_dotenv(override=True)

os.environ["OPENAI_API_KEY"] = os.getenv("LLM_API_KEY")
os.environ["OPENAI_BASE_URL"] = os.getenv("LLM_BASE_URL")
#
# # 可选，设置代理
# os.environ['HTTP_PROXY'] = 'http://127.0.0.1:10809'
# os.environ['HTTPS_PROXY'] = 'http://127.0.0.1:10809'
#智勇体定义
client = AsyncOpenAI(api_key=os.getenv("LLM_API_KEY"), base_url=os.getenv("LLM_BASE_URL"))
set_default_openai_client(client)
set_tracing_disabled(True)
deepseek_model = OpenAIChatCompletionsModel(
    openai_client=client,
    model=os.getenv("LLM_MODEL")
)

async def mcp_run():
    async with (MCPServerStdio(name="AMap MCP Server",cache_tools_list=True,client_session_timeout_seconds=60,
              params = {
                  "command": "cmd",
                  "args": ["/c","npx","-y","@amap/amap-maps-mcp-server"],
                  "env": {
                    "AMAP_MAPS_API_KEY": "f910a3dfb6a6a325e1637f16444a92ec",
                  }
                }) as server1 ,
                MCPServerStdio(name="Tavily Search Server", cache_tools_list=True, client_session_timeout_seconds=60,
                  params={
                      "command": "cmd",
                      "args": ["/c", "npx", "-y", "tavily-mcp@0.1.4"],
                      "env": {
                          "TAVILY_API_KEY": "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",
                      }
                }) as server2):
        servers = [server1, server2]
                          #params={"command": "uvenv", "args": ["run","mcp-server-git"]}) as server:
        await chat_assistant(servers)

async def chat_assistant(mcpServers:list[MCPServerStdio]):
    agent = Agent(name="Multi MCP Agent",
                  instructions=f"你是一助人为乐的助手，你会根据用户需求调用相应的mcp工具解决问题",
                  model=deepseek_model, mcp_servers=mcpServers)
    input_items = []

    while True:
        user_input = input("💬 请输入你的消息：")
        if user_input.lower() in ["exit", "quit"]:
            print("✅ 对话已结束")
            break

        input_items.append({"content": user_input, "role": "user"})
        try:
            result = await Runner.run(agent, input_items)

            for new_item in result.new_items:
                agent_name = new_item.agent.name
                if isinstance(new_item, MessageOutputItem):
                    print(f"🧠 {agent_name}: {ItemHelpers.text_message_output(new_item)}")
                elif isinstance(new_item, HandoffOutputItem):
                    print(f"🔀 Handed off from {new_item.source_agent.name} to {new_item.target_agent.name}")
                elif isinstance(new_item, ToolCallItem):
                    print(f"🔧 {agent_name}: Calling a tool {new_item.raw_item.name}...")
                elif isinstance(new_item, ToolCallOutputItem):
                    print(f"📦 {agent_name}: Tool call output: {new_item.output}")
                else:
                    print(f"🤷 {agent_name}: Skipping item: {new_item.__class__.__name__}")

            input_items = result.to_input_list()
        except Exception as e:
            print(f"❌ 发生错误：{e}")

if __name__ == "__main__":
    asyncio.run(mcp_run())