from loguru import logger
from openai import OpenAI

import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))


class LLM():
    def __init__(self, model):
        try:
            self.tokenizer = None
            self.api_key = 'sk-6bc589928d1e4b7289a8c85f5fe72a23'
            self.base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
            self.temperature = 0.0
            self.top_p = 0.95
            self.max_tokens = 8000
            self.model = model
            self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)
        except Exception as e:
            logger.error("Error loading configuration: llm.key or llm.api, please check the configuration file.")
            logger.error(f'The exception is {e}')
            exit(-1)

    def get_response(self, messages):
        try:
            response = self.client.chat.completions.create(
                model=self.model,
                messages=messages,
                temperature=self.temperature,
                top_p=self.top_p,
                max_tokens=self.max_tokens,
                # extra_body={"enable_thinking": False},
                stream=False
                # stop=self.stop,
            )
        except Exception as e:
            print(f"API request failed: {str(e)}")
            return None
        return response.choices[0].message.content

if __name__ == '__main__':
    allm = LLM()
    response = allm.get_response(messages = [
        {"role": "user", "content":  "Give me a quick sort code."}
    ])
    print(response)
