from openai import OpenAI
from openai.types.chat import ChatCompletion
from loguru import logger

api_key = "123456"
base_url = "http://172.20.50.49:10000/v1"
# api_key = "sk-b80acbc44b764720a59d57c62d06ea63"
# base_url = "https://api.deepseek.com"

deepseek = OpenAI(api_key=api_key, base_url=base_url)
messages = [
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": "你好"},
        ]

try:
        response: ChatCompletion = deepseek.chat.completions.create(
            # model="deepseek-reasoner",
            model="deepseek-r1:7b",
            messages=messages,
            max_tokens=1000,
            stream=False,
        )
        logger.info(f"API调用成功，响应ID: {response.id}")
        logger.debug(f"响应内容: {response.json}")
    
except Exception as e:
            logger.error(f"请求失败: {str(e)}")



import asyncio
import json
import traceback
from contextlib import AsyncExitStack
from typing import Optional

from dotenv import load_dotenv
from loguru import logger
from mcp import ClientSession, StdioServerParameters, ListToolsResult
from mcp.client.stdio import stdio_client
from mcp.types import CallToolResult
from openai import OpenAI
from openai.types.chat import ChatCompletion

load_dotenv()

api_key = "sk-123456"
base_url = "https://api.deepseek.com"
print(f"base_url: {base_url}")

class MCPClient:
    def __init__(self):
        self.session: Optional[ClientSession] = None
        self.exit_stack = AsyncExitStack()
        self.deepseek = OpenAI(api_key=api_key, base_url=base_url)

    async def connect_to_server(self, server_script_path: str):
        is_python = server_script_path.endswith(".py")
        is_js = server_script_path.endswith(".js")
        if not is_python and not is_js:
            raise ValueError("Invalid server script path")

        command = "python" if is_python else "node"
        server_params = StdioServerParameters(
            command=command, args=[server_script_path], env=None
        )

        stdio_transport = await self.exit_stack.enter_async_context(
            stdio_client(server_params)
        )
        self.stdio, self.write = stdio_transport
        self.session = await self.exit_stack.enter_async_context(
            ClientSession(self.stdio, self.write)
        )

        await self.session.initialize()

        response = await self.session.list_tools()

        tools = response.tools
        logger.info("Connected to server with tools: {}", [tool.name for tool in tools])

    async def process_query(self, query: str) -> str:
        messages = [
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": query},
        ]

        response: ListToolsResult = await self.session.list_tools()
        available_tools = [
            {
                "type": "function",
                "function": {
                    "name": tool.name,
                    "description": tool.description,
                    "parameters": tool.inputSchema,
                },
            }
            for tool in response.tools
        ]

        # noinspection PyTypeChecker
        response: ChatCompletion = self.deepseek.chat.completions.create(
            model="deepseek-chat",
            messages=messages,
            max_tokens=1000,
            tools=available_tools,
            stream=False,
        )

        final_text = []

        assistant_message_content = []

        logger.info(f"{response}")

        for choice in response.choices:
            if choice.finish_reason != "tool_calls":
                # 确保 content 是可序列化的
                if hasattr(choice.message.content, "to_json"):
                    content = choice.message.content.to_json()
                else:
                    content = str(choice.message.content)
                final_text.append(content)
                assistant_message_content.append(content)
            else:
                for tool_call in choice.message.tool_calls:
                    tool_name = tool_call.function.name
                    tool_args = tool_call.function.arguments
                    # invoke tool
                    result: CallToolResult = await self.session.call_tool(
                        tool_name, json.loads(tool_args)
                    )

                    logger.info(f"tool call result:{result}")

                    final_text.append(
                        f"Calling tool {tool_name} with args {tool_args} returned: {result}"
                    )
                    # 确保 content 是可序列化的
                    if hasattr(choice.message.content, "to_json"):
                        content = choice.message.content.to_json()
                    else:
                        content = str(choice.message.content)
                    assistant_message_content.append(content)
                    # 修改此处，将列表转换为字符串
                    messages.append(
                        {
                            "role": "assistant",
                            "content": "\n".join(assistant_message_content),
                        }
                    )
                    messages.append(
                        {
                            "role": "user",
                            "content": result.content[0].text,
                        }
                    )

                # 确保 content 是可序列化的
                if hasattr(response.choices[0].message.content, "to_json"):
                    content = response.choices[0].message.content.to_json()
                else:
                    content = str(response.choices[0].message.content)
                final_text.append(content)

                # Call the model again with the tool result
                # noinspection PyTypeChecker
                logger.debug(f"messages: {messages}")

                # noinspection PyTypeChecker
                response = self.deepseek.chat.completions.create(
                    model="deepseek-chat",
                    messages=messages,
                    max_tokens=1000,
                    tools=available_tools,
                    stream=False,
                )
                final_text.append(response.choices[0].message.content)
                logger.info(f"{response}")

        return "\n".join(final_text)

    async def chat_loop(self):
        print("\nMCP Client Started!")
        print("Type your queries or 'quit' to exit.")

        while True:
            try:
                query = input("\nYour Query: ").strip()
                if query.lower() == "quit":
                    print("Exiting...")
                    break

                response = await self.process_query(query)
                print("\n" + response)
            except Exception as e:
                print(f"Error: {e}")
                traceback.print_exc()

    async def cleanup(self):
        await self.exit_stack.aclose()
        print("MCP Client Cleaned Up!")


async def main():
    if len(sys.argv) < 2:
        print("Usage: python client.py <path_to_server_script>")
        sys.exit(1)

    client = MCPClient()
    try:
        await client.connect_to_server(sys.argv[1])
        await client.chat_loop()
    finally:
        await client.cleanup()


if __name__ == "__main__":
    import sys

    asyncio.run(main())

