import os
import asyncio
import logging

from agents import (
    Agent,
    Model,
    ModelProvider,
    OpenAIChatCompletionsModel,
    ModelSettings,
    RunConfig,
    Runner,
    function_tool,
    set_tracing_disabled,
)
from agents.mcp.server import (
    MCPServerSse,
    MCPServerStdio,
)

from func_utils import (
    decode_json,
    get_current_time,
    search_google,
    search_searxng,
    fetch_url,
)

from custom_providers import CustomProvider
from dotenv import load_dotenv
load_dotenv()


# llm_provider=CustomProvider('ollama')
# llm_provider.set_base_url('http://192.168.0.10:11434/v1')
# llm_provider.set_model('qwen2.5:7b')

# llm_provider=CustomProvider('deepseek')
# llm_provider.set_model('deepseek-reasoner') 

llm_provider=CustomProvider('test')
llm_provider.set_base_url('https://dashscope.aliyuncs.com/compatible-mode/v1')
# llm_provider.set_model('qwen-turbo')
# llm_provider.set_model('qwen-plus-latest')
llm_provider.set_model('qwen-max-latest')
llm_provider.set_api_key('sk-1345a67e2358403b9ca6ed754b08c217')


# 使用模型创建Agent
news_crawler_agent = Agent(
    name="NewsCrawlerAgent",
    # model=llm_provider.get_model(),
    model_settings=ModelSettings(temperature=0),
    instructions=(
        "你是一个AI助手"
    ),
    # tools=[get_current_time,search_searxng,fetch_url]
)


# 运行Agent
async def run_agent():
   

    result = await Runner.run(
        starting_agent=news_crawler_agent, 
        input= "讲个笑话",
        run_config=RunConfig(model_provider=llm_provider)
        )
    print(result)


# 运行事件循环
asyncio.run(run_agent())






