metisllm-dashboard / llm_handler /mock_llm_handler.py
Gateston Johns
first real commit
9041389
raw
history blame
No virus
1.13 kB
from typing import List, Optional, Dict
import enum
import llm_handler.llm_interface as llm_interface
class MockLLMHandler(llm_interface.LLMInterface):
_chat_completion: Optional[List[str]]
_text_embedding: Optional[List[float]]
def __init__(self,
chat_completion: Optional[List[str]] = None,
text_embedding: Optional[List[float]] = None):
self._chat_completion = chat_completion
self._text_embedding = text_embedding
def get_chat_completion(self,
messages: List[Dict],
model: Optional[enum.Enum] = None,
temperature: float = 0.2,
**kwargs) -> str:
if not self._chat_completion:
raise ValueError(f'_chat_completion not set')
return self._chat_completion.pop(0)
def get_text_embedding(
self,
input: str,
model: Optional[enum.Enum] = None,
) -> List[float]:
if not self._text_embedding:
raise ValueError(f'_text_embedding not set')
return self._text_embedding