from langchain_ollama import OllamaLLM, ChatOllama, OllamaEmbeddings


class GetOllama:
    home_desktop_ip = "172.26.167.51:11434"
    ailab_linux_ip = "10.12.8.21:11434"

    def __init__(self, ip="127.0.0.1:11434", model_type=0, model_name="qwen2.5", num_ctx=4096, temperature=0.7,  streaming=False):
        self.ip = ip
        self.type = model_type
        self.model_name = model_name
        self.model = None
        if model_type == 0:
            self.model = OllamaLLM(base_url=self.ip, model=model_name, temperature=temperature, streaming=streaming)
        elif model_type == 1:
            self.model = ChatOllama(base_url=self.ip, model=model_name, num_ctx=num_ctx, temperature=temperature, streaming=streaming)
        else:
            self.model = OllamaEmbeddings(base_url=self.ip, model=model_name, temperature=temperature)

    def __call__(self):
        return self.model


if __name__ == '__main__':
    from langchain.schema import HumanMessage
    print(GetOllama.home_desktop_ip)
    model = GetOllama(model_type=1, model_name="deepseek-r1:14b")()
    text = "给生产杯子的公司取一个名字。"
    message = [HumanMessage(content=text)]
    print(model.invoke(message))