import hashlib
import json
import os
import shutil

from openai import OpenAI


CONFIG_FILE = 'translate.config.json'
HASH_FILE = 'translate.hash.json'
SOURCE_DIR = ''
TARGET_DIR = ''
IGNORE_FILES = []
IGNORE_DIRS = []
client: OpenAI

config_template = {
    "source_dir": "./docs",
    "target_dir": "./i18n/en",
    "ignore_files": ["tags.yml"],
    "ignore_dirs": ["apis"],
    "aliyun_qwen-turbo_api_key": "your-api-key",
}


def validate_config(config):
    """检查配置项是否有值，没有值则打印提示"""
    required_keys = ['source_dir', 'target_dir', 'aliyun_qwen-turbo_api_key']
    missing_keys = []
    for key in required_keys:
        if not config.get(key):
            missing_keys.append(key)
    if missing_keys:
        print("以下配置项没有值，请检查配置文件：")
        for key in missing_keys:
            print(f" - {key}")
        return False
    return True


def calculate_file_hash(file_path, hash_algorithm='sha256'):
    """计算文件的哈希值"""
    hash_func = hashlib.new(hash_algorithm)
    with open(file_path, 'rb') as f:
        for chunk in iter(lambda: f.read(4096), b""):
            hash_func.update(chunk)
    return hash_func.hexdigest()


def scan_folder(folder_path: str):
    """扫描文件夹并计算每个文件的哈希值"""
    global IGNORE_DIRS
    file_hashes = {}
    folder_abs_path = os.path.abspath(folder_path)
    if not folder_abs_path.endswith(os.sep):
        folder_abs_path += os.sep
    for root, _, files in os.walk(folder_abs_path):
        c_dir = os.path.relpath(root, folder_path)
        if c_dir in IGNORE_DIRS:
            continue
        for file_name in files:
            file_abs_path = os.path.join(root, file_name)
            file_rel_path = os.path.relpath(file_abs_path, folder_abs_path)
            file_hash = calculate_file_hash(file_abs_path)
            file_hashes[file_rel_path] = file_hash
    return file_hashes


def save_json_to_file(data, output_file):
    """将哈希值保存到文件"""
    with open(output_file, 'w') as f:
        json.dump(data, f, indent=4)


def load_json_from_file(input_file):
    """从文件加载哈希值"""
    with open(input_file, 'r') as f:
        return json.load(f)


def remove_ignore_dir_from_hashes(file_hashes):
    global IGNORE_DIRS
    new_hashes = {}
    for file_path, file_hash in file_hashes.items():
        should_include = True
        for dir_path in IGNORE_DIRS:
            file_abs_path = os.path.abspath(file_path)
            dir_abs_path = os.path.abspath(dir_path)
            common_path = os.path.commonpath([file_abs_path, dir_abs_path])
            if common_path == dir_abs_path:
                should_include = False
                break
        if should_include:
            new_hashes[file_path] = file_hash
    return new_hashes


def compare_hashes(old_hashes, new_hashes):
    """比较新旧哈希值，找出变化的文件"""
    modified_files = []
    added_files = []
    deleted_files = []
    for file_path, new_hash in new_hashes.items():
        if file_path not in old_hashes:
            added_files.append(file_path)
        elif old_hashes[file_path] != new_hash:
            modified_files.append(file_path)
    for file_path in old_hashes:
        if file_path not in new_hashes:
            deleted_files.append(file_path)
    return modified_files, added_files, deleted_files


def check_file_exist(file_path):
    return os.path.exists(file_path)


# def get_all_files(folder_path: str):
#     global IGNORE_DIRS
#     all_files = []
#     folder_path = os.path.abspath(folder_path)
#     if not folder_path.endswith(os.sep):
#         folder_path += os.sep
#     for root, _, files in os.walk(folder_path):
#         c_dir = os.path.relpath(root, folder_path)
#         if c_dir in IGNORE_DIRS:
#             continue
#         for file_name in files:
#             file_abs_path = os.path.join(root, file_name)
#             file_rel_path = os.path.relpath(file_abs_path, folder_path)
#             all_files.append(file_rel_path)
#     return all_files


def translate(text):
    # noinspection PyBroadException
    global client
    if text.isspace() or not text:
        return text
    try:
        completion = client.chat.completions.create(
            model="qwen-turbo",
            messages=[
                {'role': 'system', 'content': 'You are a helpful assistant.'},
                {'role': 'user', 'content': f'把以下md或mdx内容翻译成英文（只要输出译文！保持原文的结构！）：\n{text}'}],
        )
        return completion.choices[0].message.content
    except Exception as e:
        return None


def mock_translate(text):
    return text + '\n\nmock'


def delete_file(file_path):
    if os.path.exists(file_path):
        try:
            os.remove(file_path)
            print(f"\r删除 '{file_path}'")
            return True
        except Exception as e:
            print(f"\r删除时出错 '{file_path}' {e}")
            return False
    else:
        print(f"\r不存在，无法删除 '{file_path}' ")
        return False


def read_file(file_path, mode='r', encoding='utf-8'):
    if os.path.exists(file_path):
        try:
            with open(file_path, mode, encoding=encoding) as f:
                return f.read()
        except Exception as e:
            print(f"\r读取时出错 '{file_path}' {e}")
            return None
    else:
        print(f"\r不存在，无法读取 '{file_path}'")
        return None


def write_file(file_path, content, mode='w', encoding='utf-8'):
    try:
        dir_path = os.path.dirname(file_path)
        if dir_path and not os.path.exists(dir_path):
            os.makedirs(dir_path)
        with open(file_path, mode, encoding=encoding) as f:
            f.write(content)
        print(f"\r写入 '{file_path}'")
        return True
    except Exception as e:
        print(f"\r写入时出错 '{file_path}' {e}")
        return False


def copy_file(src_path, dst_path):
    if os.path.exists(src_path):
        try:
            dst_dir = os.path.dirname(dst_path)
            if dst_dir and not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            shutil.copy2(src_path, dst_path)
            print(f"\r复制 '{dst_path}'")
            return True
        except Exception as e:
            print(f"\r复制时出错 '{dst_path}' {e}")
            return False
    else:
        print(f"\r不存在，无法复制 '{src_path}'")
        return False


def handle_diff(modified_files, added_files, deleted_files):
    global SOURCE_DIR
    global TARGET_DIR
    total_files = len(modified_files) + len(added_files) + len(deleted_files)
    processed_files = 0
    error_file_list = []
    for file_path in deleted_files:
        print(f"\r{processed_files + 1} / {total_files} 删除中...", end="", flush=True)
        success = delete_file(os.path.join(TARGET_DIR, file_path))
        if not success:
            error_file_list.append(file_path)
        processed_files += 1
    for file_path in added_files + modified_files:
        src_file_path = os.path.join(SOURCE_DIR, file_path)
        tgt_file_path = os.path.join(TARGET_DIR, file_path)
        file_ext = os.path.splitext(file_path)[1].lower()
        if file_ext in {'.md', '.mdx'} and file_path not in IGNORE_FILES:
            print(f"\r{processed_files + 1} / {total_files} 翻译中...", end="", flush=True)
            src_file_content = read_file(src_file_path)
            if src_file_content is None:
                error_file_list.append(file_path)
                continue
            trans_content = translate(src_file_content)
            if trans_content is None:
                error_file_list.append(file_path)
                continue
            success = write_file(tgt_file_path, trans_content)
            if not success:
                error_file_list.append(file_path)
                continue
        elif file_path not in IGNORE_FILES:
            print(f"\r{processed_files + 1} / {total_files} 复制中...", end="", flush=True)
            success = copy_file(src_file_path, tgt_file_path)
            if not success:
                error_file_list.append(file_path)
                continue
        else:
            continue
        processed_files += 1
    print()
    return error_file_list


def main():
    global CONFIG_FILE
    global HASH_FILE
    global SOURCE_DIR
    global TARGET_DIR
    global IGNORE_FILES
    global IGNORE_DIRS
    global client
    if not check_file_exist(CONFIG_FILE):
        print(f"找不到配置文件：'{CONFIG_FILE}'")
        save_json_to_file(config_template, CONFIG_FILE)
        print(f"已自动生成'{CONFIG_FILE}'，请配置后重试")
        return
    config = load_json_from_file(CONFIG_FILE)
    SOURCE_DIR = config['source_dir']
    TARGET_DIR = config['target_dir']
    IGNORE_FILES = config['ignore_files']
    IGNORE_DIRS = config['ignore_dirs']
    api_key = config['aliyun_qwen-turbo_api_key']
    if not validate_config(config):
        print("配置不完整，程序退出")
        exit(1)

    client = OpenAI(
        api_key=api_key,
        base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    )
    if not os.path.exists(SOURCE_DIR) or not os.path.isdir(SOURCE_DIR):
        print(f"Source 文件夹 '{SOURCE_DIR}' 不存在")
        exit(1)
    if not os.path.exists(TARGET_DIR) or not os.path.isdir(TARGET_DIR):
        print(f"Target 文件夹 '{TARGET_DIR}' 不存在")
        exit(1)
    if not check_file_exist(HASH_FILE):
        # 没有hash文件，全量翻译
        old_hash = scan_folder(TARGET_DIR)
        old_hash = {key: 'to_be_deleted' for key, value in old_hash.items()}
        new_hash = scan_folder(SOURCE_DIR)
        modified_files, added_files, deleted_files = compare_hashes(old_hash, new_hash)
        print(f'全量翻译，将处理 {len(modified_files) + len(added_files) + len(deleted_files)} 个文件\n')
    else:
        # 增量翻译
        old_hash = remove_ignore_dir_from_hashes(load_json_from_file(HASH_FILE))
        new_hash = scan_folder(SOURCE_DIR)
        modified_files, added_files, deleted_files = compare_hashes(old_hash, new_hash)
        print(f'增量翻译，将处理 {len(modified_files) + len(added_files) + len(deleted_files)} 个文件\n')
    err_file_list = handle_diff(modified_files, added_files, deleted_files)
    err_file_list = list(set(err_file_list))
    for file_path in err_file_list:
        if file_path in deleted_files:
            new_hash[file_path] = "to_be_deleted"
        elif file_path in added_files:
            new_hash.pop(file_path)
        elif file_path in modified_files:
            new_hash[file_path] = old_hash[file_path]
    if err_file_list:
        print(f'在处理以下文件时出错了：\n{err_file_list}')
        print(f'已记录，重试将进行修复\n')
    save_json_to_file(new_hash, HASH_FILE)
    print(f"哈希文件已更新：{HASH_FILE}")


if __name__ == "__main__":
    main()
