import ollama
import requests
from ollama import ChatResponse, Message

from infrastructure.utils import pprint


class OllamaServer:
    @staticmethod
    def check_ollama_service():
        """检查Ollama服务是否运行"""
        try:
            response = requests.get('http://localhost:11434/api/tags')
            return response.status_code == 200
        except requests.exceptions.ConnectionError:
            return False

    @staticmethod
    def ensure_model_exists(model_name: str):
        """确保模型存在，如果不存在则拉取"""
        try:
            print(f"正在检查模型 {model_name} 的状态...")
            # 获取ollama中存在的模型列表
            list_response = ollama.list()
            model_exists = any(item.model == model_name for item in list_response.models)
            # 检查模型是否已存在
            if not model_exists:
                print(f"模型 {model_name} 不存在，开始拉取（这可能需要几分钟时间）...")
                pull_result = ollama.pull(model_name)
                print(f"模型 {model_name} 拉取完成！")
                print(f"详细信息:", pull_result)
            else:
                print(f"模型 {model_name} 已存在，可以直接使用。")
        except Exception as e:
            print(f"检查/拉取模型时出错: {str(e)}")

    def _chat(
            self,
            model_name: str = None,
            messages: list[Message] = None,
            stream: bool = False,
            options: dict = None
    ) -> ChatResponse:
        # 检查Ollama服务状态
        if not self.check_ollama_service():
            raise Exception("错误: Ollama服务未运行，请确保服务在端口11434上运行")
        # 确保模型存在
        self.ensure_model_exists(model_name)

        try:
            # 创建与本地Ollama服务的连接
            response = ollama.chat(
                model=model_name,
                stream=stream,
                messages=messages,
                options=options
            )
            # 打印参数
            print("聊天历史:")
            pprint.pretty_print_json([msg.model_dump() for msg in messages])

            # 打印响应内容
            print("Ollama响应:")
            pprint.pretty_print_json(response.model_dump())
            return response
        except Exception as e:
            raise Exception("与Ollama服务交互时出错:", e)

    def stream(self, model_name: str = None,
               messages: list[Message] = None,
               options: dict = None):
        """流式响应"""
        return self._chat(model_name=model_name, messages=messages, stream=True, options=options)

    def invoke(self, model_name: str = None,
               messages: list[Message] = None,
               options: dict = None):
        """同步调用"""
        raw_response = self._chat(model_name=model_name, messages=messages, options=options)
        return raw_response.message.content
