from fastmcp import Client
import mcp
import json
from openai import OpenAI
from typing import List, Dict, Any
import pprint

from mia_app.infrastructure.external.llm_config import get_provider, get_qwen_api_key, get_qwen_model, get_qwen_base_url
def init_openai_client(api_key: str, base_url: str = "https://api.openai.com/v1"):
    """
    初始化OpenAI客户端

    Args:
        api_key: OpenAI API密钥
        base_url: API基础URL，默认为官方URL

    Returns:
        OpenAI客户端实例
    """
    return OpenAI(api_key=api_key, base_url=base_url)

def get_llm():
    api_key = get_qwen_api_key()  # 替换为您的API密钥
    base_url = get_qwen_base_url()  # 或自定义base_url
    llm = init_openai_client(api_key, base_url)
    return llm

# 提取函数调用
def extract_function_call_with_openai(
        client: OpenAI,
        user_query: str,
        available_tools: List[mcp.types.Tool],
        model: str = "gpt-3.5-turbo"
) -> Dict[str, Any]:
    """
    使用OpenAI客户端提取函数调用

    Args:
        client: OpenAI客户端实例
        user_query: 用户查询
        available_tools: 可用工具列表
        model: 使用的模型名称

    Returns:
        包含函数名和参数的字典
    """
    # 构建函数描述
    functions = []
    for tool in available_tools:
        function_def = {
            "name": tool.name,
            "description": tool.description,
            "parameters": tool.inputSchema
        }
        functions.append(function_def)

    try:
        # 调用OpenAI API
        response = client.chat.completions.create(
            model=model,
            messages=[{"role": "user", "content": user_query}],
            tools=[{"type": "function", "function": func} for func in functions],
            tool_choice="auto"
        )

        response_message = response.choices[0].message

        # 检查是否有工具调用
        if response_message.tool_calls:
            tool_call = response_message.tool_calls[0]
            function_name = tool_call.function.name
            function_args = json.loads(tool_call.function.arguments)

            return {
                "function": function_name,
                "parameters": function_args
            }
        else:
            # 如果没有工具调用，返回默认搜索
            return {
                "function": "bing_search",
                "parameters": {"query": user_query}
            }

    except Exception as e:
        print(f"OpenAI API调用错误: {e}")
        # 出错时返回默认搜索
        return {
            "function": "bing_search",
            "parameters": {"query": user_query}
        }


async def main():
    # Connect via stdio to a local script

    # Connect via SSE
    async with Client("https://mcp.api-inference.modelscope.net/9ba024603fdd42/sse") as client:
        # ... use the client
        tools = await client.list_tools()

        print(f"Available tools: {tools}")
        llm = get_llm()

        test_queries = [
            "搜索aiagent",
            "查询mcp client开发",
            "请帮我搜索人工智能技术",
            "用必应搜索一下机器学习"
        ]

        for query in test_queries:
            print(f"输入查询: {query}")
            result = extract_function_call_with_openai(llm, query, tools, model=get_qwen_model())
            print(f"提取结果: func = '{result['function']}', para = {result['parameters']}")
            result = await client.call_tool(result['function'], result['parameters'])
            pprint.pprint(result)
            print("-" * 60)


        #result = await client.call_tool("bing_search", {"query": 'mcp client开发'})
        #print(f"Result: {result}")

        #list[mcp.types.Tool]


if __name__ == "__main__":
    import asyncio
    asyncio.run(main())