import os
import logging
from pathlib import Path
from external.llm_service import LlmService

def translate_with_llm(sentence, word):
    """
    使用LLM翻译句子并解释单词在句子中的含义
    
    参数:
        sentence: 英文句子
        word: 需要解释的单词
    返回:
        dict: 包含翻译结果的字典，格式为 {"word_meaning": "词义", "sentence_zh": "中文翻译"}
        如果翻译失败，返回 {"word_meaning": "", "sentence_zh": ""}
    """
    # 初始化返回结果
    default_result = {"word_meaning": "", "sentence_zh": ""}
    
    try:
        # 初始化OpenAI服务
        llm_service = LlmService()
        if not llm_service.available:
            logging.warning("aliyun服务不可用，无法进行翻译")
            return default_result
        
        # 读取系统提示词
        system_prompt = None
        system_prompt_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'prompt', 'sentence_translate.txt')

        if system_prompt_path and os.path.exists(system_prompt_path):
            with open(system_prompt_path, 'r', encoding='utf-8') as f:
                system_prompt = f.read().strip()
        
        user_prompt = f"word: \"{word}\"\nsentence: \"{sentence}\""
        
        # 调用OpenAI API
        messages = [
                {"role": "system", "content": system_prompt},
                {"role": "user", "content": user_prompt}
            ]
        response = llm_service.llm_chat_json(messages)
        
        if "word_meaning" in response and "sentence_zh" in response:
            if response['word_meaning'].strip() in response['sentence_zh']:
                return response
            else:
                logging.error(f"翻译失败: {sentence}:{response['sentence_zh']}\n{word}:{response['word_meaning']}")
                return None

        else:
            logging.error(f"翻译失败: {sentence}, {word}")
            return None
    
    except Exception as e:
        logging.error(f"翻译过程中发生错误: {str(e)}")
        return None