Spaces:
Sleeping
Sleeping
import ollama | |
from ollama import Options | |
from repository.repository_abc import Repository, Model | |
class OllamaRepository(Repository): | |
def __init__(self, model_info: Model, system_msg): | |
self.model_info: Model = model_info | |
self.system_msg: str = system_msg | |
self.message_history: list[dict[str, str]] = [ | |
{"role": self.model_info.roles.system_role, "content": system_msg}] | |
def send_prompt(self, prompt: str, add_to_history: bool = True) -> dict[str, str]: | |
print("Prompt to be sent:" + prompt) | |
options: Options = Options(temperature=0) | |
self.message_history.append({"role": self.model_info.roles.user_role, "content": prompt}) | |
response = ollama.chat(self.model_info.name, self.message_history, options=options) | |
answer = {"role": self.model_info.roles.ai_role, "content": response["message"]["content"]} | |
if add_to_history: | |
self.message_history.append(answer) | |
else: | |
self.message_history.pop() | |
return answer | |
def get_model_info(self) -> Model: | |
return self.model_info | |