#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import sys

from common import cleaner
from common.common import exter_attack
from common.const import ExecuteResultEnum, SysData
from common.logger import Logger
from common.parse_parafile import ParamFileUtil
from db2.backup.dpf.dpf_db_backup import DpfDbBackup
from db2.backup.single.single_db_backup import SingleBackup
from db2.backup.util.dpf_backup_util import DpfBackupUtil
from db2.backup.ha.ha_db_backup import HaDbBackup
from db2.backup.hadr.hadr_db_backup import HadrDbBackup
from db2.comm.const import Db2JobName, Db2CommonConst, Db2JsonConstant
from db2.comm.util.common_util import Db2CommonUtil
from db2.comm.const import Db2Const

LOGGER = Logger().get_logger(filename="db2.log")


@exter_attack
def distribute_task(task_name, pid, job_id, param_dict, sub_job_id=None):
    # 如果是DPF集群
    if DpfBackupUtil.is_dpf_cluster(param_dict):
        db2_db_backup_inst = DpfDbBackup(task_name, pid, job_id, sub_job_id, param_dict)
    elif DpfBackupUtil.is_ha_cluster(param_dict):
        db2_db_backup_inst = HaDbBackup(task_name, pid, job_id, sub_job_id, param_dict)
    elif DpfBackupUtil.is_hadr_cluster(param_dict):
        db2_db_backup_inst = HadrDbBackup(task_name, pid, job_id, sub_job_id, param_dict)
    else:
        db2_db_backup_inst = SingleBackup(task_name, pid, job_id, sub_job_id, param_dict)

    job_name_func_mappings = build_job_name_func_mappings(db2_db_backup_inst)

    # 框架查询进度文件，conf里面配置的任务进度
    if task_name in Db2CommonConst.BACKUP_PROGRESS_ACTIONS:
        db2_db_backup_inst.report_progress()
        return

    # 正常副本校验停止任务，逻辑不会走到这里, 只有停止副本校验才会走这里
    if task_name == Db2JobName.ABORT_JOB and \
            param_dict.get(Db2JsonConstant.SUB_JOB, {}).get(Db2JsonConstant.JOB_NAME, "") == Db2JobName.CHECK_COPY:
        Db2CommonUtil.kill_agent_job_process(pid, job_id)

    # 异常处理，下发不存在的请求
    job_func_dict = job_name_func_mappings.get(task_name)
    if not job_func_dict:
        LOGGER.error(f"Unsupported job action: {task_name}, pid: {pid}, job_id: {job_id}, sub_job_id: {sub_job_id}")
        err_msg = f"Unsupported job action: {task_name}."
        Db2CommonUtil.record_task_result(pid=pid, err_msg=err_msg, code=ExecuteResultEnum.INTERNAL_ERROR.value)
        return

    # 调用任务对应的执行函数
    action_ret = job_func_dict["exec_job_func"]()
    # 写执行步骤的执行结果
    if job_func_dict.get("output_result_func"):
        job_func_dict["output_result_func"](action_ret)


def build_job_name_func_mappings(db2_db_backup_inst):
    job_name_func_mappings = {
        Db2JobName.ALLOW_BACKUP_IN_LOCAL: {
            "exec_job_func": db2_db_backup_inst.allow_backup_in_local_node,
            "output_result_func": db2_db_backup_inst.output_action_result,
        },
        Db2JobName.CHECK_BACKUP_JOB_TYPE: {
            "exec_job_func": db2_db_backup_inst.check_backup_job_type,
            "output_result_func": db2_db_backup_inst.output_action_result
        },
        Db2JobName.QUERY_PERMISSION: {
            "exec_job_func": db2_db_backup_inst.query_job_permission
        },
        Db2JobName.BACKUP_PRE: {
            "exec_job_func": db2_db_backup_inst.exec_backup_pre_job,
            "output_result_func": db2_db_backup_inst.output_action_result
        },
        Db2JobName.BACKUP: {
            "exec_job_func": db2_db_backup_inst.exec_backup,
            "output_result_func": db2_db_backup_inst.output_action_result
        },
        Db2JobName.QUERY_SCAN_REPOSITORIES: {
            "exec_job_func": db2_db_backup_inst.query_scan_repositories
        },
        Db2JobName.BACKUP_POST: {
            "exec_job_func": db2_db_backup_inst.exec_backup_post_job
        },
        Db2JobName.QUERY_COPY: {
            "exec_job_func": db2_db_backup_inst.query_backup_copy
        },
        Db2JobName.ABORT_JOB: {
            "exec_job_func": db2_db_backup_inst.abort_job
        },
        Db2JobName.FINALIZE_CLEAR: {
            "exec_job_func": db2_db_backup_inst.finalize_clear
        }
    }
    return job_name_func_mappings


@exter_attack
def main():
    args = sys.argv[1:]
    for line in sys.stdin:
        SysData.SYS_STDIN = line
        break
    # 参数个数为3或者4
    if len(args) not in (2, 3, 4):
        LOGGER.error(f"Param number error! Arguments: {args}")
        return ExecuteResultEnum.INTERNAL_ERROR

    task_name, pid = args[0], args[1]
    job_id = args[2] if len(args) >= 3 else ""
    input_sub_job_id = args[3] if len(args) == 4 else ""
    LOGGER.info(f"Start executing task, task name: {task_name}, pid: {pid}, job_id: {job_id}, "
                f"sub_job_id: {input_sub_job_id}.")
    try:
        param_dict = ParamFileUtil.parse_param_file(pid)
    except Exception:
        LOGGER.exception(f"Parse param file failed, task name: {task_name}, pid: {pid}.")
        err_msg = "Parse parameter file failed."
        DpfBackupUtil.record_task_result(pid, err_msg, code=ExecuteResultEnum.INTERNAL_ERROR.value)
        return ExecuteResultEnum.INTERNAL_ERROR

    try:
        distribute_task(task_name, pid, job_id, param_dict, sub_job_id=input_sub_job_id)
    except Exception:
        LOGGER.exception(f"Distribute DB2 database backup task failed. Task Name: {task_name}, Pid: {pid}, "
                         f"Job ID: {job_id}, Sub Job ID: {input_sub_job_id}.")
        return ExecuteResultEnum.INTERNAL_ERROR
    LOGGER.info(f"Execute task success. Task Name: {task_name}, Pid: {pid}, Job ID: {job_id}, "
                f"Sub Job ID: {input_sub_job_id}.")
    return ExecuteResultEnum.SUCCESS


if __name__ == '__main__':
    try:
        sys.exit(main())
    finally:
        cleaner.clear(SysData.SYS_STDIN)
        LOGGER.debug('Clear environment variables successfully.')
