
import openai
import pandas as pd
from prompt_template_parser import PromptTemplateParser
from prompts import TRANSLATE_PROMPT
import json_repair
from loguru import logger


class OpenAITranslator(object):

    LLM_URI = 'http://113.12.102.164:9998/v1'
    LLM_MODEL = 'gemma3:12b'

    def __init__(self):
        self.openai_client = openai.Client(
            base_url=OpenAITranslator.LLM_URI,
            api_key='123',
        )

    def llm(self, prompt):
        """一般LLM回答"""
        messages = [
            {'role': 'user', 'content': prompt}
        ]
        openai_args = {
            'model': OpenAITranslator.LLM_MODEL,
            'messages': messages,
            'temperature': 0,
            'top_p': 0.1,
            'max_tokens': None,
            'stop': None
        }
        response = self.openai_client.chat.completions.create(**openai_args)
        response_content = response.choices[0].message.content
        return response_content


    def translate(self, text: str, from_lan: str = "Chinese", to_lan: str = "Thai") -> str:
        prompt = PromptTemplateParser(TRANSLATE_PROMPT)
        prompt = prompt.format(
            inputs={
                "from_lan": from_lan,
                "to_lan": to_lan,
                "text": text
            },
            remove_template_variables=True
        )
        response = self.llm(prompt)
        try:
            data = json_repair.repair_json(response, return_objects=True)
            return data.get('translated', '')
        except Exception as e:
            logger.error(e)
            return response


if __name__ == '__main__':

    df = pd.read_excel(r'D:\datasets\中文-马来语互译100句.xlsx', sheet_name='中文翻译马来语')
    df['gemma3:12b'] = None
    translator = OpenAITranslator()
    for idx, row in df.iterrows():
        row['gemma3:12b'] = translator.translate(row['中文'], "Chinese", "Thai")
        logger.info(f"{idx} --- {row['中文']} --- {row['gemma3:12b']}")
        df.iloc[idx] = row
    print(df)