from langchain_core.language_models import BaseChatModel

from domain.model.platform.base import BasePlatformChatModel


class OllamaChatModel(BasePlatformChatModel):
    PLATFORM_NAME: str = "ollama"
    SUPPORT_MODEL_LIST: list[str] = ['qwen2.5:1.5b', 'deepseek-r1:1.5b', 'deepseek-coder:1.3b', 'llama3.1:latest',
                                     'llama3.2:latest', 'glm4:9b', 'qwen2.5-coder:3b', 'llama2:latest', 'gemma3:1b']
    MODEL_INSTANCE_MAP: dict[str, BaseChatModel] = {}

    def init_model(self, *model_names, **options) -> BasePlatformChatModel:
        if not model_names:
            raise Exception(f"Illegal model name: {model_names}")
        for model in model_names:
            if model not in self.SUPPORT_MODEL_LIST:
                raise Exception(f"{self.PLATFORM_NAME} platform no support models: {model}")
        from langchain_ollama import ChatOllama
        for model in model_names:
            self.MODEL_INSTANCE_MAP[model] = ChatOllama(
                model=model,
                base_url=options.get("base_url"),
                temperature=options.get('temperature'),
                num_ctx=options.get('max_token'),
            )
        return self


ollama_chat_model = OllamaChatModel()
