#!/usr/bin/env python

# https://gitee.com/williamzjc/mcp-llm/

import asyncio

from mcp.client.stdio import stdio_client
from mcp import ClientSession, StdioServerParameters

import json
from openai import AsyncOpenAI, OpenAIError

from dotenv import dotenv_values
config = dotenv_values(".env.key")
api_key = config[f'DEEPSEEK_API_KEY']
ds = AsyncOpenAI(api_key=api_key, base_url="https://api.deepseek.com")


# User's natural language input
instructions = "You are a math assistant, also can help me search information by Baidu."
system_message = {"role": "system", "content": instructions}

# user_input = "Please help me calculate 7 added by 8"

# Set the list of tools
tools = [
        {
            "type": "function",
            "description": "Addition operation",
            "function": {
                "name": "add",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "a": {"type": "number"},
                        "b": {"type": "number"}
                    },
                    "required": ["a", "b"]
                }
            }
        },
        {
            "type": "function",
            "description": "Subtraction operation",
            "function": {
                "name": "sub",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "a": {"type": "number"},
                        "b": {"type": "number"}
                    },
                    "required": ["a", "b"]
                }
            }
        },
        {
            "type": "function",
            "description": "Multiplication operation",
            "function": {
                "name": "multiply",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "a": {"type": "number"},
                        "b": {"type": "number"}
                    },
                    "required": ["a", "b"]
                }
            }
        },
        {
            "type": "function",
            "description": "Search by Baidu",
            "function": {
                "name": "search_baidu",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "query": {"type": "string"}
                    },
                    "required": ["query"]
                }
            }
        },
        {
            "type": "function",
            "description": "Search by Baidu pedia",
            "function": {
                "name": "search_baike",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "query": {"type": "string"}
                    },
                    "required": ["query"]
                }
            }
        }
    ]


tool_names = [tool["function"]["name"] for tool in tools]


params = StdioServerParameters(
    command="python3",
    args=["server.py"])


history = [system_message]


async def call_deepseek(user_input: str, history=None, n_loop=10, flag=True):

    if history is None:
        history = globals()['history']

    message = {"role": "user", "content": user_input}
    k = 0

    while True:
        try:
            resp = await ds.chat.completions.create(
                model="deepseek-chat",
                messages=history + [message],
                tools=tools)  # :ChatCompletion
            if flag:
                history.append(message)
            return resp
        except OpenAIError as e:
            k +=1
            if k >= n_loop:
                raise e
        except Exception as e:
            raise f"An unexpected error occurred: {e}"


async def main():

    global history

    async with stdio_client(params) as (stdio, write):
        async with ClientSession(stdio, write) as session:

            await session.initialize()

            while True:
                # chat-loop
                user_input = input("User: ")
                if user_input == 'bye':
                    print('Bye!')
                    break
                response = await call_deepseek(user_input, history)

                tool_calls = response.choices[0].message.tool_calls  # call the tools based on LLM

                if tool_calls:
                    # Get the name of the function and its arguments
                    fn_name = tool_calls[0].function.name
                    fn_args = json.loads(tool_calls[0].function.arguments)

                    if fn_name in tool_names:
                        # Call the function and get the result
                        result = await session.call_tool(fn_name, fn_args)
                        print(f"System: Model decides to call tool: {fn_name}.")
                        prompt = await session.get_prompt('show', {'value': result.content[0].text})
                        assistant_message = {"role": "assistant", "content": prompt.messages[0].content.text}
                        history.append(assistant_message)
                        response = await call_deepseek(f"Yes! (summarize the text if it is too long)",
                            history=history)
                    else:
                        print("Assistant: Function not found!")
                
                reply = response.choices[0].message.content
                print(f"Assistant: {reply}.")

                assistant_message = {"role": "assistant", "content": reply}
                history.append(assistant_message)

            # for a in history:
            #     print(a["role"], ":", a["content"])


if __name__ == "__main__":
    asyncio.run(main())

