#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import json
import os.path
import sys

from dws.commons.log_utils import log
from common.util.check_utils import is_valid_id
from common.common import output_result_file, exter_attack
from common.common_models import ActionResult
from common.const import ExecuteResultEnum

from dws.backup.cluster_backup import ClusterBackup
from dws.commons.common import umount_all_gds_data_dir
from dws.commons.const import RestoreSubtaskName, IntrusiveMode, CopyGenerateType, TaskType, PERMISSION_600
from dws.commons.function_tool import log_start, get_register_ip
from dws.commons.job_info import JobInfo
from dws.commons.progress_notify import write_file, get_progress_path

from dws.restore.parse_restore_job import ParseRestoreJobParam
from dws.restore.roach_restore_job import RoachRestoreJob
from dws.restore.gds_restore_job import GDSRestoreJob, GDSRestoreWhiteListJob

from common.file_common import change_path_permission
from common.util.exec_utils import exec_mkdir_cmd

COPY_TYPE_MAP = {
    "full": 1, "increment": 2, "diff": 3, "log": 4, "nativeSnapshot": 5,
    "foreverIncrement": 6, "replication": 7, "s3Archive": 8, "tapeArchive": 9, "clone": 10
}


@log_start()
def get_task_info(restore_job_param):
    """
    功能描述：组装写入cache仓的副本信息的接口
    """
    repo_info = restore_job_param.get_repo_info()
    if len(repo_info) == 0:
        log.error(f"Fail to get all repositories info.")
        return {}
    copy_type = COPY_TYPE_MAP.get(restore_job_param.get_copy_type())
    task_info = {"repositories": repo_info, "taskType": 1, "copyType": copy_type}
    if copy_type == COPY_TYPE_MAP["s3Archive"]:
        archive_ip, archive_port = restore_job_param.get_archive_ip_port()
        ret_code, ssl_enable = restore_job_param.get_archive_ssl_enable()
        if not archive_ip or not archive_port or not ret_code:
            return {}
        archive_file_servers = []
        for single_ip in archive_ip:
            archive_file_servers.append({"ip": single_ip, "port": archive_port, "sslEnabled": ssl_enable})
        task_info["archiveFileServers"] = archive_file_servers
    return task_info


@log_start()
def save_restore_task_info(job_info, task_info, host_ip):
    """
        函数功能：保存任务任务信息
    """
    task_path = os.path.join(job_info.cache_path, 'tmp', job_info.copy_id, f'taskInfo_{host_ip}.txt')
    try:
        write_file(task_path, json.dumps(task_info))
    except Exception as exception:
        log.error(f"Write task info failed err: {exception}, job_info: {job_info.log_format()}")
        return False

    change_path_permission(task_path, user_name="rdadmin", mode=PERMISSION_600)
    return True


def do_gds_white_subtask(restore_job_param):
    copy_id = restore_job_param.get_source_backup_copy_id()
    subtask_id = restore_job_param.get_subtask_id()
    cache_path = restore_job_param.get_cache_path()
    if not copy_id or not subtask_id or not cache_path:
        log.error(f"Copy id:{copy_id} or subtask id: {subtask_id} or cache path: {cache_path} is none")
        return False
    progress_file = get_progress_path(copy_id, subtask_id, cache_path[0])
    if not os.path.exists(os.path.dirname(progress_file)):
        exec_mkdir_cmd(os.path.dirname(progress_file))
    progress_info = "Running"
    try:
        write_file(progress_file, progress_info)
    except Exception as ex:
        log.error(f"Write progress file failed exception: {ex}")
        return False
    ret = do_gds_white_list_subtask(restore_job_param)
    if not ret:
        progress_info = "Fail"
    else:
        progress_info = "Success"
    try:
        write_file(progress_file, progress_info)
    except Exception as ex:
        log.error(f"Write progress file failed exception: {ex}")
        return False
    return ret


def do_gds_white_list_subtask(restore_job_param):
    try:
        job = GDSRestoreWhiteListJob(restore_job_param)
    except Exception as ex:
        log.error(f"Restore gds white list job error exception: {ex}")
        return False

    if not job.save_gds_and_fs_info():
        log.error("Save gds process param failed")
        umount_all_gds_data_dir()
        return False
    return True


@log_start()
def do_white_list_subtask(restore_job_param, pid, main_task_id, sub_task_id):
    if restore_job_param.get_copy_generate_type() == CopyGenerateType.GENERATE_BY_GDS:
        return do_gds_white_subtask(restore_job_param)
    param = restore_job_param.get_restore_param()
    copy_id = restore_job_param.get_copy_id()
    metadata_path, cache_path = restore_job_param.get_available_path()
    if not cache_path:
        log.error(f"No cache/metadata path can be read or written. cache_path: "
                  f"{cache_path}, metadata_path:{metadata_path}")
        return False
    if restore_job_param.get_copy_type() == "s3Archive":
        metadata_path = cache_path

    user_name, env_path = restore_job_param.get_target_cluster_auth_info()
    metadata_des = restore_job_param.get_metadata_destination()
    _, cluster_agent, host_agent = restore_job_param.get_nodes_info()
    job_info = JobInfo(pid=pid, job_id=main_task_id, copy_id=copy_id, sub_job_id=sub_task_id,
                       type=TaskType.RESTORE.value, cache_path=cache_path, meta_path=metadata_path,
                       usr=user_name, env_path=env_path, metadata_destination=metadata_des,
                       storage_id=param.get("job").get("copies")[0].get("extendInfo", {}).get("storage_id", ""),
                       protect_env=param.get("job").get("copies")[0].get("protectEnv"),
                       backup_tool_type=param.get("job").get("copies")[0].get("extendInfo", {}).get(
                           "backupToolType", ""),
                       res_name=param.get("job", {}).get("targetObject", {}).get("name", ""),
                       nodes=param.get("job", {}).get("targetEnv", {}).get("nodes", []),
                       data_reps=restore_job_param.get_all_data_repo(),
                       cluster_agents=cluster_agent,
                       host_agents=host_agent)

    if not save_restore_task_info(job_info, get_task_info(restore_job_param),
                                  get_register_ip(cluster_agent, host_agent)):
        log.error(f"Save task_info failed, {job_info.log_format()}")
        return False

    if not ClusterBackup(pid).white_sub_job(job_info, IntrusiveMode.INTRUSIVE_MODE):
        log.error(f"Execute white list task failed. {job_info.log_format()}")
        return False

    return True


def do_clean_list_subtask(restore_job_param):
    if restore_job_param.get_copy_generate_type() == CopyGenerateType.GENERATE_BY_GDS:
        copy_id = restore_job_param.get_source_backup_copy_id()
        subtask_id = restore_job_param.get_subtask_id()
        cache_path = restore_job_param.get_cache_path()
        if not copy_id or not subtask_id or not cache_path:
            log.error(f"Get copy id: {copy_id}, subtask id: {subtask_id}, cache path: {cache_path} failed")
            return False
        progress_file = get_progress_path(copy_id, subtask_id, cache_path[0])
        if not os.path.exists(os.path.dirname(progress_file)):
            exec_mkdir_cmd(os.path.dirname(progress_file))
        progress_info = "Running"
        try:
            write_file(progress_file, progress_info)
        except Exception as ex:
            log.error(f"Write progress file failed exception: {ex}")
            return False
        umount_all_gds_data_dir()
        progress_info = "Success"
        try:
            write_file(progress_file, progress_info)
        except Exception as ex:
            log.error(f"Write progress file failed exception: {ex}")
            return False
    return True


@exter_attack
def do_exec(pid, main_task_id, sub_task_id):
    restore_job_param = ParseRestoreJobParam(pid)
    log.info(f"Start to exec {restore_job_param.get_subtask_name()}. main task {main_task_id}, subtask {sub_task_id}")
    if restore_job_param.get_subtask_name() == RestoreSubtaskName.WHILE_LIST_SUBTASK:
        return do_white_list_subtask(restore_job_param, pid, main_task_id, sub_task_id)
    elif restore_job_param.get_subtask_name() == RestoreSubtaskName.CLEAN_SUBTASK:
        return do_clean_list_subtask(restore_job_param)
    elif restore_job_param.get_copy_generate_type() == CopyGenerateType.GENERATE_BY_GDS:
        log.info(f"Start to exec gds sub job")
        try:
            gds_restore_job = GDSRestoreJob(restore_job_param)
        except Exception as e:
            log.error(f"GDS restore job init failed exception {e}")
            return False
        gds_restore_job.run_exec()
        return True
    else:
        roach_restore_job = RoachRestoreJob(restore_job_param)
        if roach_restore_job.do_restore_job():
            output = ActionResult(code=ExecuteResultEnum.SUCCESS, message="")
            output_result_file(pid, output.dict(by_alias=True))
            return True
        else:
            output = ActionResult(code=ExecuteResultEnum.INTERNAL_ERROR,
                                  message="Fail to execute restore business job")
            output_result_file(pid, output.dict(by_alias=True))
            return False


if __name__ == "__main__":
    if len(sys.argv) < 4:
        log.error("Not enough input parameters.")
        sys.exit(1)
    pid_args = sys.argv[1]
    main_task_id_args = sys.argv[2]
    sub_task_id_args = sys.argv[3]

    if not is_valid_id(pid_args):
        log.warn(f"pid is invalid")
        sys.exit(1)

    if not is_valid_id(main_task_id_args):
        log.warn(f"main_task_id_args is invalid")
        sys.exit(1)

    if not is_valid_id(sub_task_id_args):
        log.warn(f"sub_task_id_args is invalid")
        sys.exit(1)

    try:
        if do_exec(pid_args, main_task_id_args, sub_task_id_args):
            log.info(f"{main_task_id_args}, {sub_task_id_args}, task suc")
            sys.exit(0)
    except Exception as err:
        log.error(f"Exception err {err}")
        sys.exit(1)

    sys.exit(0)
