import ollama
from configparser import ConfigParser


class TranslateLlamaOllama():

    _instance = None

    _model_name = None


    def __init__(self, path_config):
        config = ConfigParser()
        config.read(path_config, encoding='utf-8')
        self._MODEL_NAME = config.get('default', 'llama_ollama_name')
        print("llama_ollama_name from:", self._MODEL_NAME)
        if self._model_name == None:
            self._model_name = self._MODEL_NAME
            print("init model llama_ollama_name: " + self._model_name)


    def __new__(cls, path_config):
        # print("__new__")
        if cls._instance is None:
            instance = super().__new__(cls)
            instance.__init__(path_config)  # 如果需要初始化操作
            cls._instance = instance
        return cls._instance


    def translate_text(self, text, source_language, target_language):
        response = ollama.chat(model=self._model_name, messages=[
                {"role": "system", "content": "You will be provided with a sentence in " + source_language
                                              + ", and your task is to translate it into " + target_language
                                              + ". Note that only the translated results are output"},
                {"role": "user", "content": text},
                {"role": "assistant", "content": "Translate the text to " + target_language
                                                 + ". Note that only the translated results are output"}
            ])
        # print("[response]:", response['message']['content'])
        trans_res = response['message']['content']
        trans_res = trans_res.replace("\n", "")
        # trans_res = trans_res.replace("The result of translation:", "")
        trans_res = trans_res[1:]
        # print("trans_res="+trans_res)
        return trans_res


    def translateList(self, zh_list):
        #
        en_list = []
        #
        for zh in zh_list:
            translated_output = self.translate_text(zh, "Chinese", "English")
            en_list.append(translated_output)
            #
            print(f"中文原文: {zh}")
            print(f"英文翻译: {translated_output}")
            print("---------------------------------------")
        return en_list



    def testOllamaTranslate(self):
        text = "青桔柠檬百香果茶"
        source_language = "Chinese"
        target_language = "English"
        response = ollama.chat(model=self._model_name, messages=[
                {"role": "system", "content": "You will be provided with a sentence in " + source_language
                                              + ", and your task is to translate it into " + target_language
                                              + ". Note that only the translated results are output"},
                {"role": "user", "content": text},
                {"role": "assistant", "content": "Translate the text to " + target_language
                                                 + ". Note that only the translated results are output"}
            ])
        # print(response['message']['content'])
        trans_res = response['message']['content']
        trans_res = trans_res.replace("\n", "")
        trans_res = trans_res[1:]
        print("trans_res="+trans_res)


    def testTranslate(self):
        #
        zh_list = ['柠檬芝士蛋糕', '巧克力树莓卷', '巴斯克芝士蛋糕', '蛋黄肉松青团', '双皮奶']
        en_list = []
        #
        for zh in zh_list:
            translated_output = self.translate_text(zh, "Chinese", "English")
            en_list.append(translated_output)
            #
            print(f"中文原文: {zh}")
            print(f"英文翻译: {translated_output}")
            print("---------------------------------------")
        return en_list





if __name__ == '__main__':
    th1 = TranslateLlamaOllama('../files/config.inf')
    th1.testOllamaTranslate()
    th1.testTranslate()
