import codecs
import glob
import json
from datetime import datetime
import os
import sys

from file_copy import replace_directory_with_copy, get_total_size
from file_comparison import compare_directories
from util_slash import f_win, f_unix
from util_volume_label import map_drive_letter
import util_store

from logger import Logger
import config_setting

fake_copy = False

global_source_vault = {}  # filename -> [source_abs_path, backup_abs_path]
global_backup_plan_list = []

global_output_error_list = []
global_output_not_found_list = []
global_output_replaced_list = []

global_manual_mappings = {}  # target_name -> [source_name, target_path, size_diff]

if __name__ == "__main__":
    if not os.path.isdir(f"assets"):
        os.mkdir(f"assets")
    if not os.path.isdir(f"assets/{config_setting.batch_name}"):
        os.mkdir(f"assets/{config_setting.batch_name}")
    if not os.path.isdir(f"assets/{config_setting.batch_name}/log"):
        os.mkdir(f"assets/{config_setting.batch_name}/log")
    if not os.path.isdir(f"assets/{config_setting.batch_name}/index"):
        os.mkdir(f"assets/{config_setting.batch_name}/index")
    sys.stdout = Logger(datetime.now().strftime(f"assets/{config_setting.batch_name}/log/%Y%m%d_%H%M%S") + ".txt")
    # Step1: 读取本地backup_plan_*.json文件中所有描述备份计划的内容，形成backup_plan_list。
    print("--------------------------------------------------------------------")
    print("Step1: 读取本地backup_plan_*.json文件中所有描述备份计划的内容，形成backup_plan_list。")
    backup_plan_file_list = glob.glob("backup_plan_*.json")
    # backup_plan_file_list = glob.glob("backup_plan_cartoons.json")
    # backup_plan_file_list = glob.glob("backup_plan_games_emugames.json")
    # backup_plan_file_list = glob.glob("backup_plan_games_assistant.json")

    for backup_plan_file in backup_plan_file_list:
        with codecs.open(backup_plan_file, mode='r', encoding='utf-8') as f:
            temp_json_list = json.load(f)
            for temp_json_item in temp_json_list:
                temp_json_item["Enabled"] = bool(temp_json_item["Enabled"])
            global_backup_plan_list.extend(temp_json_list)
    print(f"backup_plan_list count: {len(global_backup_plan_list)}")

    # Step2: 处理SourcePath这一头。依据backup_plan_list中的scan_path_list，扫描所有仓库并构建source_vault。
    print("--------------------------------------------------------------------")
    print("Step2: 处理SourcePath这一头。依据backup_plan_list中的scan_path_list，扫描所有仓库并构建source_vault。")
    for backup_plan in global_backup_plan_list:
        if not backup_plan.get("Enabled"):
            break
        if "ManualMappings" in backup_plan:
            for manual_mapping in backup_plan.get("ManualMappings"):
                # global_manual_mappings[os.path.join(manual_mapping.get("BackupPath"), manual_mapping.get("TargetName"))] = [manual_mapping.get("SourceName"), manual_mapping.get("SizeDiff")]
                global_manual_mappings[manual_mapping.get("TargetName")] = [manual_mapping.get("SourceName"), os.path.join(backup_plan.get("BackupPath"), manual_mapping.get("TargetName")), manual_mapping.get("SizeDiff")]

        scan_path_list = backup_plan.get("ScanPathList")
        for scan_path in scan_path_list:
            file_list = os.listdir(map_drive_letter(scan_path))
            # file_list = list(filter(lambda x:  os.path.isdir(os.path.join(map_drive_letter(scan_path), x)), file_list))  # 仅处理目录，忽略文件
            for filename in file_list:
                global_source_vault[filename] = [os.path.join(scan_path, filename), None]
    print(f"global_source_vaule count: {len(global_source_vault)}")

    # Step3: 处理BackupPath这一头。扫描BackupPath中所有文件，自动构建待备份资源名单。
    print("--------------------------------------------------------------------")
    print("Step3: 处理BackupPath这一头。扫描BackupPath中所有文件，自动构建待备份资源名单。")
    backup_filename_list = []
    for backup_plan_item in global_backup_plan_list:
        if not backup_plan_item.get("Enabled"):
            break
        backup_path = backup_plan_item.get("BackupPath")
        filename_list = os.listdir(map_drive_letter(backup_path))
        # filename_list = list(filter(lambda x: os.path.isdir(os.path.join(map_drive_letter(backup_path), x)), set(filename_list)))  # 仅处理目录，忽略文件
        filename_list = list(filter(lambda x: x not in ["$RECYCLE.BIN", "System Volume Information"], set(filename_list)))
        backup_filename_list.extend(filename_list)
        for backup_filename in filename_list:
            if backup_filename in global_source_vault:
                global_source_vault[backup_filename][1] = os.path.join(backup_path, backup_filename)
    backup_filename_list = list(set(backup_filename_list))  # 因为多个backup_plan可以重复同一个backup_path，因此还要做一次整体去重。
    print(f"backup_filename_list count: {len(backup_filename_list)}")

    # Step4: 关联Source和Backup两头。遍历所有已经备份的文件名，去source_vault中找到其对应的源目录，构建潜在backup计划。
    print("--------------------------------------------------------------------")
    print("Step4: 关联Source和Backup两头。遍历所有已经备份的文件名，去source_vault中找到其对应的源目录，构建潜在backup计划。")
    for backup_filename in backup_filename_list:
        print(f"processing {backup_filename}...")
        if backup_filename in global_source_vault:
            source_path = global_source_vault[backup_filename][0]
            target_path = global_source_vault[backup_filename][1]
            print(f"源地址: {f_win(source_path)}, 目标地址: {f_win(target_path)}")

            # 检查是否需要执行备份
            compare_result = compare_directories(map_drive_letter(source_path), map_drive_letter(target_path))
            if len(compare_result.get("errors")) != 0:
                print(f"\033[41m[ERRORs]\033[0m found compare_result errors: {compare_result.get('errors')}")
                global_output_error_list.append(f"compare_result errors: {compare_result.get('errors')}")
            else:
                if len(compare_result.get("modified")) != 0 or len(compare_result.get("added")) != 0 or len(compare_result.get("removed")) != 0:
                    # 执行备份
                    replace_directory_with_copy(map_drive_letter(source_path), map_drive_letter(target_path), fake_copy=fake_copy)
                    global_output_replaced_list.append(f"{f_win(source_path)} -> {f_win(target_path)}")
                else:
                    print(f"资源 {backup_filename} \033[32m无变化\033[0m")
        elif backup_filename in global_manual_mappings:
            source_name = global_manual_mappings[backup_filename][0]
            target_path = global_manual_mappings[backup_filename][1]
            size_diff = float(global_manual_mappings[backup_filename][2].replace("mb", ""))
            target_path_total_size = get_total_size(map_drive_letter(target_path)) / (1024 * 1024)  # MB

            source_path = global_source_vault[source_name][0]
            source_path_total_size = get_total_size(source_path) / (1024 * 1024)  # MB
            if abs(target_path_total_size - source_path_total_size) > size_diff:
                print(f"\033[46m[Manual Copy]\033[0m from: {f_win(source_path)} to: {f_win(target_path)}")
                global_output_replaced_list.append(f"{f_win(source_path)} -> {f_win(target_path)}")
            else:
                print(f"资源 {backup_filename} \033[32m映射后 无变化\033[0m")
        else:
            global_output_not_found_list.append(backup_filename)
            print(f"源端资源 {backup_filename} \033[31m 没找到\033[0m")

    # Step5: 最后处理报告。
    print("--------------------------------------------------------------------")
    print("Step5: 最后处理报告。")
    util_store.write_list(f"assets/{config_setting.batch_name}/output_error_list.txt", global_output_error_list)
    print(f"output_error_list: {len(global_output_error_list)}")
    util_store.write_list(f"assets/{config_setting.batch_name}/output_not_found_list.txt", global_output_not_found_list)
    print(f"output_not_found_list: {len(global_output_not_found_list)}")
    util_store.write_list(f"assets/{config_setting.batch_name}/output_replaced_list.txt", global_output_replaced_list)
    print(f"output_replaced_list: {len(global_output_replaced_list)}")

    resource_index = {}
    global_manual_reverse_mapping = {}
    for k, v in global_manual_mappings.items():
        source_path = v[0]
        target_path = v[1]
        global_manual_reverse_mapping[source_path] = target_path

    # for k, v in global_source_vault.items():
    for k in sorted(global_source_vault.keys()):
        v = global_source_vault[k]
        source_path = v[0]
        target_path = v[1]
        source_name = f_unix(source_path).split('/')[-2]
        if target_path:
            output_text = "* " + f_unix(source_path).split('/')[-1]
        else:
            if k in global_manual_reverse_mapping:
                output_text = "* " + f_unix(source_path).split("/")[-1] + " -> " + f_unix(global_manual_reverse_mapping[k]).split("/")[-1]
            else:
                output_text = f_unix(source_path).split('/')[-1]
        if source_name in resource_index and resource_index[source_name]:
            resource_index[source_name].append(output_text)
        else:
            resource_index[source_name] = [output_text]

    for k, v in resource_index.items():
        util_store.write_list(f"assets/{config_setting.batch_name}/index/{k}.txt", v, dedupe=False)

