import json
import os
import hashlib
import requests
import redis
import logging
from config import TRANSLATIONS_DIR, REDIS_HOST, REDIS_PORT, REDIS_DB
from utils import normalize_url

class TranslationProcessor:
    def __init__(self):
        # 初始化 Redis 连接
        self.redis_client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True)
        self.api_url = "https://test-plugin.longpean.com/gpt/createCompletion"
        
        # 配置日志
        self.logger = logging.getLogger('TranslationProcessor')
        self.logger.setLevel(logging.INFO)
        
        if not self.logger.handlers:
            # 添加控制台日志处理器
            console_handler = logging.StreamHandler()
            console_handler.setLevel(logging.INFO)
            console_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
            console_handler.setFormatter(console_formatter)
            self.logger.addHandler(console_handler)
            
            # 添加文件日志处理器
            log_dir = os.path.join(os.path.dirname(TRANSLATIONS_DIR), 'logs')
            os.makedirs(log_dir, exist_ok=True)
            log_file = os.path.join(log_dir, 'translator.log')
            file_handler = logging.FileHandler(log_file, encoding='utf-8')
            file_handler.setLevel(logging.DEBUG)  # 文件日志记录更详细的信息
            file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
            file_handler.setFormatter(file_formatter)
            self.logger.addHandler(file_handler)
            
            self.logger.info(f"Log file initialized at: {log_file}")

    def get_global_dict(self, text_id, target_lang):
        """从 Redis 获取指定文本的翻译"""
        redis_key = f"global_trans:{text_id}"
        trans_data = self.redis_client.get(redis_key)
        if trans_data:
            trans_dict = json.loads(trans_data)
            self.logger.debug(f"Cache hit for text_id: {text_id}, target_lang: {target_lang}")
            return trans_dict.get(target_lang)
        self.logger.debug(f"Cache miss for text_id: {text_id}, target_lang: {target_lang}")
        return None

    def set_global_dict(self, text_id, target_lang, translated_text):
        """保存翻译到 Redis"""
        redis_key = f"global_trans:{text_id}"
        trans_data = self.redis_client.get(redis_key)
        if trans_data:
            trans_dict = json.loads(trans_data)
        else:
            trans_dict = {}

        trans_dict[target_lang] = translated_text
        self.redis_client.set(redis_key, json.dumps(trans_dict))

    def translate_text_batch(self, texts, target_lang='zh-cn'):
        """批量翻译文本，优先查全局字典，调用接口翻译"""
        self.logger.info(f"Starting batch translation for {len(texts)} texts to {target_lang}")
        to_translate = []
        translated_texts = []
        text_indices = []

        # 检查缓存
        for idx, text in enumerate(texts):
            text_id = f"text_{hashlib.md5(text.encode()).hexdigest()}"
            cached_translation = self.get_global_dict(text_id, target_lang)
            if cached_translation:
                self.logger.debug(f"Using cached translation for text[{idx}]: {text[:30]}...")
                translated_texts.append(cached_translation)
            else:
                to_translate.append(text)
                text_indices.append((idx, text_id))

        if to_translate:
            self.logger.info(f"Translating {len(to_translate)} uncached texts")

            text_to_send = json.dumps(to_translate)
            try:
                # 调用翻译接口
                self.logger.debug(f"Sending translation request with payload: {text_to_send}")
                
                # 构建请求数据
                payload = {
                    "operatorId": 1360,
                    "operatorName": "陈涛涛",
                    "message": "请将以下字符串形式的数组中的各个文本翻译成中文，返回时只返回数组形式的翻译好的文本，不要包含其他任何内容(注意翻译好的文本是字符串，所以用引号标识，我能通过python的json.loads()方法解析)：\n" + text_to_send,
                    "model": "gpt-4o",
                    "messageList": [],
                    "vip": "longpean"
                }
                
                # 发送请求并记录详细信息
                self.logger.debug(f"Sending POST request to {self.api_url}")
                response = requests.post(self.api_url, json=payload)
                
                # 记录响应状态和内容
                self.logger.debug(f"API Response status: {response.status_code}")
                self.logger.debug(f"API Response content: {response.text[:500]}")  # 只记录前500个字符
                
                response.raise_for_status()
                
                # 解析响应
                try:
                    response_data = response.json()
                    self.logger.debug(f"Parsed response data: {response_data}")
                    
                    translated_batch = response_data.get('data', {}).get('content', '')
                    if not translated_batch:
                        raise ValueError("Empty translation result from API")
                    
                    # 将返回结果转换为json
                    translated_list = json.loads(translated_batch)
                    if len(translated_list) != len(to_translate):
                        self.logger.error(
                            f"Translation response length mismatch. Expected: {len(to_translate)}, "
                            f"Got: {len(translated_list)}, Content: {translated_batch[:200]}"
                        )
                        raise ValueError("Translation response length mismatch")

                    # 更新全局字典并回填翻译结果
                    for (idx, text_id), translated_text in zip(text_indices, translated_list):
                        self.logger.debug(f"Caching translation for text_id: {text_id}")
                        self.set_global_dict(text_id, target_lang, translated_text)
                        translated_texts.insert(idx, translated_text)
                    
                    self.logger.info(f"Successfully translated and cached {len(to_translate)} texts")
                
                except json.JSONDecodeError as e:
                    self.logger.error(f"Failed to parse API response: {str(e)}")
                    self.logger.error(f"Raw response: {response.text}")
                    raise
                    
            except requests.RequestException as e:
                self.logger.error(f"API request failed: {str(e)}")
                self.logger.error(f"Request URL: {self.api_url}")
                self.logger.error(f"Request payload: {payload}")
                # 翻译失败时，回填原文
                for idx, _ in text_indices:
                    self.logger.warning(f"Using original text for failed translation at index {idx}")
                    translated_texts.insert(idx, texts[idx])
            
            except Exception as e:
                self.logger.error(f"Unexpected error during translation: {str(e)}", exc_info=True)
                # 翻译失败时，回填原文
                for idx, _ in text_indices:
                    self.logger.warning(f"Using original text for failed translation at index {idx}")
                    translated_texts.insert(idx, texts[idx])

        return translated_texts

    def process_dictionary(self, pre_trans_file, target_lang='zh-cn'):
        """处理单个初始字典文件，生成翻译字典"""
        self.logger.info(f"Processing dictionary file: {pre_trans_file}")
        
        with open(pre_trans_file, 'r', encoding='utf-8') as f:
            pre_dict = json.load(f)

        hash_value = pre_dict['hash']
        texts = [item['text'] for item in pre_dict['texts']]
        text_ids = [item['id'] for item in pre_dict['texts']]

        self.logger.info(f"Found {len(texts)} texts to translate in {pre_trans_file}")

        # 批量翻译
        translated_texts = self.translate_text_batch(texts, target_lang)

        # 按顺序回填翻译结果
        trans_texts = []
        for text_id, original_text, translated_text in zip(text_ids, texts, translated_texts):
            trans_texts.append({
                "id": text_id,
                "original": original_text,
                "translated": translated_text
            })

        # 保存翻译字典
        trans_dict = {
            "hash": hash_value,
            "type": pre_dict['type'],
            "texts": trans_texts
        }
        output_file = os.path.join(TRANSLATIONS_DIR, f"trans_{hash_value}.json")
        with open(output_file, 'w', encoding='utf-8') as f:
            json.dump(trans_dict, f, ensure_ascii=False, indent=2)

        self.logger.info(f"Successfully saved translation dictionary to: {output_file}")
        return output_file