from openai import OpenAI

class LLM_client:
    def __init__(self,api_key, base_url, model_name=""):
        self.Model_name = model_name
        self.client = self.activate_LLM_client(api_key=api_key, base_url=base_url)

    def activate_LLM_client(self, api_key, base_url):
        client = OpenAI(
            api_key=api_key,
            base_url=base_url,
        )
        return client

    def get_contex(self, prompt_role, prompt_content):
        completion = self.client.chat.completions.create(
            model=self.Model_name,  # your model endpoint ID
            messages=[
                {"role": "system", "content": prompt_role},
                {"role": "user", "content": prompt_content},
            ],
        )
        return completion.choices[0].message.content
