from nl2sql.prompt.entity_extract_prompt import entity_extract_prompt
from nl2sql.model.llm import LLM


class EntityExtraction:
    def __init__(self,
                 model: str = None,
                 api_key: str = None,
                 base_url: str = None,
                 temperature: float = 0) -> None:
        """
        :param model: 模型的名称
        :param api_key:
        :param base_url:
        """

        self.model = model or "qwen-turbo"
        self.api_key = api_key or "sk-34f5c792513c423f90b404d28b070f1f"
        self.base_url = base_url or "https://dashscope.aliyuncs.com/compatible-mode/v1"

        self.prompt = entity_extract_prompt

        self.llm = LLM(api_key=self.api_key,
                       base_url=self.base_url,
                       model_name=self.model,
                       temperature=temperature)

    def extract_entities(self, entity_list: list):
        """
        用于拆解用户的问题，为子问题
        :param query:
        :return: None or List of String
        """
        # 把Prompt模板，进行填充
        filled_prompt = self.prompt.format(entity_list=entity_list)
        # 让大模型对填充好的模板，进行推理
        llm_resp: str = self.llm.chat(prompt=filled_prompt)

        # 对大模型的输出，进行后处理（因为我们没办法保证大模型输出的格式就是我们想要的）
        result = llm_resp

        return result


