from openai import OpenAI


class LLMModule(OpenAI):
    def __init__(self,config):

        self.api_key = config.get("api_key")
        self.model_name = config.get('model_name')
        self.url = config.get('url')
        self.client = OpenAI(api_key=self.api_key,base_url=self.url)

    def gen_response(self,input_text):
        dialogue = [
            {"role": "user", "content": input_text},
        ]
        response =self.client.chat.completions.create(
            model=self.model_name,
            messages=dialogue,
        )
        return response.choices[0].message.content

def run():

    llm = LLMModule()
    dialogue = [
        {"role": "user", "content": "你好"},
    ]
    resource = llm.gen_response(dialogue)
    print(resource)
