from openai import OpenAI
import tomli


class Model:
    def __init__(self, config_path: str = './config/config.toml'):
        with open(config_path, 'rb') as file: 
            config = tomli.load(file)
        
        self.config = config['llm']
        self.client = OpenAI(
            api_key=self.config['api_key'],
            base_url=self.config['base_url'],
        )

    def embed(self, text: str) -> list[float]:
        result = self.client.embeddings.create(
            input=text, 
            model=self.config['embed_model']
        )

        return result.data[0].embedding
    
    def chat(self, prompt: str) -> str:
        response = self.client.chat.completions.create(
            messages=[
                {'role': 'user', 'content': prompt},
            ],
            model=self.config['chat_model'],
        )

        return response.choices[0].message.content


