from openai import OpenAI
import requests

from core.xiaoyu_qqbot._dto import AgentUsePlate


class _DeepSeek:
    def __init__(self, api_key):
        self.client = OpenAI(api_key=api_key, base_url="https://api.deepseek.com")

    def chat(self, message):
        response = self.client.chat.completions.create(
            model="deepseek-chat",
            messages=[
                {"role": "system", "content": "你是一个非常有用的小助手。"},
                {"role": "user", "content": message},
            ],
            stream=False
        )
        return response.choices[0].message.content


class _OllamaBySDK:
    def __init__(self):
        import ollama

        response: ollama.ChatResponse = ollama.chat(model='llama3.2', messages=[
            {
                'role': 'user',
                'content': 'Why is the sky blue?',
            },
        ])
        print(response['message']['content'])
        # or access fields directly from the response object
        print(response.message.content)


class _OllamaAPI:
    def __init__(self, base_url, model):
        """初始化Ollama API客户端

        Args:
            base_url (str): Ollama服务的基础URL，默认为本地服务地址
        """
        self.base_url = base_url.rstrip('/')

    def chat(self, message, model="llama3.2:latest"):
        """使用Ollama进行对话

        Args:
            message (str): 用户输入的消息
            model (str): 使用的模型名称，默认为llama3.2:latest

        Returns:
            str: 模型的回复内容
        """
        url = f"{self.base_url}/api/generate"

        payload = {
            "model": model,
            "prompt": message,
            "stream": False
        }

        response = requests.post(url, json=payload)
        response.raise_for_status()
        result = response.json()
        return result.get('response', '')

    def list_models(self):
        """获取可用模型列表

        Returns:
            list: 可用模型列表
        """
        url = f"{self.base_url}/api/tags"

        response = requests.get(url)
        response.raise_for_status()
        result = response.json()
        return [model['name'] for model in result.get('models', [])]


class AIAgent:
    def __init__(self,
                 agent_use_plate: AgentUsePlate(),
                 deep_seek_api_key: str = None,
                 ollama_model: str = 'llama3.2',
                 ollama_api_url="http://localhost:11434",
                 ):

        self._ai_agent = None
        if agent_use_plate == AgentUsePlate.DeepSeek:
            self._ai_agent = _DeepSeek(deep_seek_api_key)
        elif agent_use_plate == AgentUsePlate.OllamaSDK:
            ...
            # self._ai_agent = _OllamaBySDK()
        elif agent_use_plate == AgentUsePlate.OllamaAPI:
            self._ai_agent = _OllamaAPI(ollama_api_url, ollama_model)
        else:
            print('目前不支持该模型请求方式，程序退出')
            exit()

    def chat(self, message):
        return self._ai_agent.chat(message)

    def generate(self):
        ...

    def list_models(self):
        return self._ai_agent.list_models()

    def pull_model(self):
        ...

    def embed(self):
        ...

    def ps_model(self):
        ...

