#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import threading
import time

from common.common import exter_attack, output_result_file, output_execution_result_ex
from common.common_models import ActionResult, Copy, JobPermissionInfo, CopyInfoRepModel, LogDetail
from common.const import BackupTypeEnum, ExecuteResultEnum, SubJobStatusEnum, RepositoryDataTypeEnum, \
    RepoProtocalType, DBLogLevel, CopyDataTypeEnum, ReportDBLabel
from common.logger import Logger
from common.number_const import NumberConst
from common.parse_parafile import get_env_variable, ParamFileUtil
from common.util.check_utils import check_path_in_white_list
from common.util.scanner_utils import scan_dir_size
from db2.backup.db2_backup_base import Db2BackupBase
from db2.backup.single.single_db_backup_service import SingleBackupService
from db2.backup.util.db2_backup_util import Db2BackupUtil
from db2.backup.util.dpf_backup_util import DpfBackupUtil
from db2.backup.util.ha_utils import is_power_rhel_ha
from db2.comm.backup_param_parse import ParameterBackupParse
from db2.comm.const import Db2JsonConstant, Db2JobName, Db2Const, RpcToolApiName, Db2CommonConst
from db2.comm.db2_exception import ErrCodeException
from db2.comm.db2_verification import check_database_exists, check_delta_backup_config, \
    check_database_pending_status, is_trans_full_backup, check_log_arch_config, check_database_offline_backup_status
from db2.comm.error_code import Db2ErrCode
from db2.comm.models.backup_models import CopyReportParam, LogBackupParam
from db2.comm.util.common_util import Db2CommonUtil
from db2.comm.util.dpf_util import DpfUtil
from db2.comm.util.job_decorators import job_exception_decorator
from db2.comm.util.param_util import Db2ParamUtil
from postgresql.common.const import ErrorCode

LOGGER = Logger().get_logger(filename="db2.log")


class SingleBackup(Db2BackupBase):
    def __init__(self, task_name, pid, job_id, sub_job_id, param_dict):
        super().__init__(task_name, pid, job_id, sub_job_id, param_dict)
        self._parameter_backup = ParameterBackupParse(pid, job_id, sub_job_id, param_dict)
        self.backup_service = SingleBackupService(pid, job_id, sub_job_id, self._parameter_backup, param_dict)
        self.backup_type = self._parameter_backup.get_backup_type()
        self._db_name = self._parameter_backup.get_backup_database_name()
        self.resource_type = DpfBackupUtil.get_cluster_type(param_dict)
        self.log_detail = LogDetail(logInfoParam=[self.sub_job_id], logLevel=DBLogLevel.INFO.value)
        self.copy_info = Copy()

    @staticmethod
    def build_data_copy_extend_info(user_name, db_name, bak_img_timestamp):
        added_copy_ext_dict = {
            Db2JsonConstant.BAK_IMG_TIMESTAMP: bak_img_timestamp
        }
        ptn_nums = Db2ParamUtil.get_partition_nums_for_backup(user_name, db_name)
        copy_log_ext_info = DpfUtil.get_copy_cur_log_info(user_name, db_name, bak_img_timestamp, ptn_nums)
        added_copy_ext_dict.update(copy_log_ext_info)
        return added_copy_ext_dict

    @staticmethod
    def build_log_copy_extend_info(bak_img_timestamp, copy_log_ext_info):
        added_copy_ext_dict = {
            Db2JsonConstant.BAK_IMG_TIMESTAMP: bak_img_timestamp
        }
        if copy_log_ext_info:
            added_copy_ext_dict.update(copy_log_ext_info)
        return added_copy_ext_dict

    def allow_backup_in_local_node(self, user_name=None):
        LOGGER.debug("Allow_backup_in_local_node db2 single database start")
        if not user_name:
            user_name = self._parameter_backup.get_database_user_name(
                Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
        # 检查用户是否存在
        if not user_name:
            LOGGER.error(f"Failed to get db2 os username, job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     body_err=Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER,
                                                     message="The OS user does not exists")
        Db2CommonUtil.check_injection(user_name)
        # 检查系统控制器进程
        if not Db2CommonUtil.check_process_service(user_name):
            LOGGER.error(f"DB2 System Controller process is not active, job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="Process is not start!",
                                                     body_err=Db2ErrCode.DB_SERVICE_ERROR)
        # 检查数据库名是否存在
        if not self._db_name:
            LOGGER.error(f"Failed to get db2 user name: {self._db_name}, job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER,
                message="The db2 single database name does not exists")
        # 检查数据库是否存在
        if not check_database_exists(user_name, self._db_name):
            LOGGER.error(f"The db2 single database: {self._db_name} does not exist, job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.DATABASE_NOT_EXISTS,
                err_params=[self._db_name], message="The db2 single database does not exists")
        # 检查归档日志是否开启
        if not check_log_arch_config(user_name, self._db_name):
            LOGGER.error(f"The db2 database: {self._db_name} have not open log arch, can not backup, "
                         f"job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=ErrorCode.ARCHIVE_MODE_ENABLED.value,
                message="The db2 single database not log arch config")

        return self.check_resource(user_name)

    def check_resource(self, user_name):
        LOGGER.info("Check if the current database satisfied the backup conditions.")
        # 如果是增量备份，检查是否开启增量备份
        if self.backup_type == BackupTypeEnum.INCRE_BACKUP.value and \
                not check_delta_backup_config(user_name=user_name, db_name=self._db_name):
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.INCREMENTAL_BACKUP_NOT_OPEN,
                message="The db2 single database Incremental database backup is not enabled.")
        # 如果是差异备份，检查是否开启增量备份
        if self.backup_type == BackupTypeEnum.DIFF_BACKUP.value and \
                not check_delta_backup_config(user_name=user_name, db_name=self._db_name):
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.INCREMENTAL_BACKUP_NOT_OPEN,
                message="The db2 single database Incremental database backup is not enabled.")
        # 检查数据库是否做离线备份呢
        if not check_database_offline_backup_status(user_name, self._db_name):
            LOGGER.error(f"Indicates whether to perform offline backup: {self._db_name}, job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.DATABASE_NEED_OFFLINE_BACKUP,
                err_params=[self._db_name], message="There are pending tasks in the db2 single database")
        # 检查数据库状态是否正常
        if not check_database_pending_status(user_name, self._db_name):
            LOGGER.error(f"Failed to check db2 database status: {self._db_name}, job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.DATABASE_EXISTS_PENDING,
                err_params=[self._db_name], message="There are pending tasks in the db2 single database")

        LOGGER.debug("Allow_backup_in_local_node db2 single database end")
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @exter_attack
    def query_job_permission(self, user_name=None):
        if not user_name:
            user_name = self._parameter_backup.get_database_user_name(
                Db2JsonConstant.APPENV_NODES_0_AUTH_AUTHKEY)
        if not user_name:
            LOGGER.error("Get db2 single database user name failed.")
            Db2CommonUtil.record_task_result(self.pid, code=ExecuteResultEnum.INTERNAL_ERROR.value)
            return
        group_name = Db2CommonUtil.get_group_name_by_os_user(user_name)
        if not group_name:
            LOGGER.error("Get user group of db2 user failed.")
            Db2CommonUtil.record_task_result(self.pid, code=ExecuteResultEnum.INTERNAL_ERROR.value)
            return
        LOGGER.info(f"Add db2 single database user group permission.")
        output = JobPermissionInfo(user=user_name, group=group_name, fileMode="0750")
        output_result_file(self.pid, output.dict(by_alias=True))

    def check_backup_job_type(self, user_name=None):
        # 检查是否增量转全量
        # 做增量时前面没有可依赖的全量副本，需要转全量
        # 做过恢复操作，下次备份需要执行全量
        if self.backup_type == BackupTypeEnum.FULL_BACKUP.value:
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)
        tmp_path = self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        if not Db2BackupUtil.query_pre_copy_agentid_by_rpc_tool(
                self._parameter_backup.get_applications(), self.job_id, self.sub_job_id, tmp_path
        ):
            LOGGER.info(f"When performing a incremental or differential backup, there is no previous full backup copy, "
                        f"and it will be converted to a full backup, backup type: {self.backup_type}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value,
                body_err=Db2ErrCode.ERR_INC_TO_FULL
            )
        # 获取上一次数据副本的类型
        last_data_copy = Db2BackupUtil.query_pre_data_copy_by_rpc_tool(self._parameter_backup.get_applications(),
                                                                       self.job_id, self.sub_job_id, tmp_path)
        last_log_copy_type = last_data_copy.get("extendInfo", {}).get("backupType", {})
        LOGGER.info(f"Backup type of the last copy is: {last_data_copy}.")
        # 上一次是差异而本次是增量则转全量
        if self.backup_type == BackupTypeEnum.INCRE_BACKUP.value:
            if last_log_copy_type != BackupTypeEnum.FULL_BACKUP.value and last_log_copy_type != self.backup_type:
                LOGGER.info(f'Differential backup copy before, convert to a full backup.')
                return Db2CommonUtil.build_action_result(
                    code=ExecuteResultEnum.INTERNAL_ERROR.value,
                    body_err=Db2ErrCode.ERR_INC_TO_FULL
                )
        # 上一次是增量而本次是差异则转全量
        if self.backup_type == BackupTypeEnum.DIFF_BACKUP.value:
            if last_log_copy_type != BackupTypeEnum.FULL_BACKUP.value and last_log_copy_type != self.backup_type:
                LOGGER.info(f'Incremental backup copy before, convert to a full backup.')
                return Db2CommonUtil.build_action_result(
                    code=ExecuteResultEnum.INTERNAL_ERROR.value,
                    body_err=Db2ErrCode.ERR_INC_TO_FULL
                )
        # 上一次的trackMod是NO，则转全量
        last_track_mod = last_data_copy.get("extendInfo", {}).get("trackMod", "")
        if str(last_track_mod).upper() == "NO":
            LOGGER.info(f'Track mod changed to ON, first incremental or differential backup convert to full.')
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value,
                body_err=Db2ErrCode.ERR_INC_TO_FULL
            )
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator(write_progress=True)
    def exec_backup_pre_job(self, user_name=None):
        # 单机无前置任务，需要写进度
        """
        执行备份前置
        :return:
        """
        LOGGER.info(f"No operation is performed on the previous task. job id: {self.job_id}")
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)
        # 返回结果
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator(write_progress=True)
    def exec_backup(self, user_name=None):
        progress_file_name = self.sub_job_id if self.sub_job_id else self.job_id
        LOGGER.info(f"Start db2 single backup. progress_file_name: {progress_file_name}. backup: {self.backup_type}")
        if not user_name:
            user_name = self._parameter_backup.get_database_user_name(
                Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
        if not user_name:
            LOGGER.error("DB2 single database get user name fail")
            Db2CommonUtil.record_task_result(self.pid, code=ExecuteResultEnum.INTERNAL_ERROR.value)
            raise ErrCodeException(Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER)

        backup_path = self.get_single_db_backup_path(user_name)
        cache_path = self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)

        # 备份开始的初始进度为5%
        self.sub_job_detail.progress = NumberConst.FIVE
        self.sub_job_detail.task_status = SubJobStatusEnum.RUNNING.value
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)
        progress_thread = threading.Thread(name="progress", target=self.upload_report_progress)
        # 任务意外结束时子线程也结束任务
        progress_thread.daemon = True
        progress_thread.start()
        # 开始执行备份
        pre_copy_end_timestamp = None
        copy_log_ext_info = dict()
        if self.backup_type == BackupTypeEnum.FULL_BACKUP.value:
            bak_time = self.backup_service.exec_single_database_full_backup_db(self._db_name, user_name, backup_path)
        elif self.backup_type == BackupTypeEnum.INCRE_BACKUP.value:
            bak_time = self.backup_service.exec_single_database_inc_backup_db(self._db_name, user_name, backup_path)
        elif self.backup_type == BackupTypeEnum.DIFF_BACKUP.value:
            bak_time = self.backup_service.exec_single_database_diff_backup_db(self._db_name, user_name, backup_path)
        elif self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            log_back_result = self.backup_service.exec_single_database_log_backup_db(self._db_name, user_name)
            bak_time = log_back_result.bak_time
            pre_copy_end_timestamp = log_back_result.pre_copy_end_timestamp
            copy_log_ext_info = log_back_result.copy_log_ext_info
            backup_path = log_back_result.backup_path
        else:
            bak_time = ""
            LOGGER.error(f"Error db2 single backup type: {self.backup_type}")

        if not bak_time:
            self.report_fail_progress_when_not_bak_time()

            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value,
                message="Unsupported backup type"
            )
        # 上报副本
        report_param = CopyReportParam(bak_time=bak_time, cache_path=cache_path, backup_type=self.backup_type,
                                       user_name=user_name, pre_copy_end_timestamp=pre_copy_end_timestamp,
                                       copy_log_ext_info=copy_log_ext_info)
        self.report_copy(report_param)

        self.report_success_progress(backup_path)

        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def report_success_progress(self, backup_path):
        LOGGER.info(f"End db2 single backup. job_id: {self.job_id}")
        # HA 存在多路径，但是路径里的文件内容一样的。
        ret, size = scan_dir_size(self.job_id, backup_path.split(",")[0])
        if ret:
            LOGGER.info(f"DB2 single backup copy size : {size}")
            self.sub_job_detail.data_size = size
        # 备份执行完成
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        self.log_detail.log_info = ReportDBLabel.SUB_JOB_SUCCESS
        self.sub_job_detail.log_detail = [self.log_detail]
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

    def report_fail_progress_when_not_bak_time(self):
        # logInfo label,logDetail 错误码，logDetailParam 错误码参数
        self.log_detail.logTimestamp = int(time.time())
        self.log_detail.logDetail = Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER
        self.log_detail.logLevel = DBLogLevel.ERROR
        self.sub_job_detail.task_status = SubJobStatusEnum.FAILED.value
        self.sub_job_detail.log_detail = [self.log_detail]
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

    def get_single_db_backup_path(self, user_name):
        # HA性能适配备份路径to /xxs, 修改为 to "/xx3, /xx2, /xxx1"
        if is_power_rhel_ha(self.resource_type) and self._parameter_backup.get_sanclient_type() != "true":
            repositories_path_list = self._parameter_backup.get_repositories_all_date_path()
        else:
            repositories_path_list = [
                self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.DATA_REPOSITORY.value)
            ]
        if not repositories_path_list:
            raise ErrCodeException(Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER)
        # 统一对文件做权限修改
        backup_path = self.backup_service.pre_backup_path(
            user_name=user_name,
            backup_type=self.backup_type,
            data_path_list=repositories_path_list
        )
        return backup_path

    def report_copy(self, report_param: CopyReportParam):
        # 上报副本
        copy_info = Copy(id=self.job_id)
        bak_timestamp = Db2CommonUtil.convert_backup_time_to_timestamp(report_param.bak_time)
        if self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            copy_info.extend_info = self.build_log_copy_extend_info(report_param.bak_time,
                                                                    report_param.copy_log_ext_info)
            copy_info.extend_info[Db2JsonConstant.COPY_BEGIN_TIME] = int(report_param.pre_copy_end_timestamp)
            copy_info.extend_info[Db2JsonConstant.COPY_END_TIME] = bak_timestamp
            copy_info.extend_info[Db2JsonConstant.COPY_BAK_TIME] = bak_timestamp
            copy_info.timestamp = bak_timestamp
            copy_info.extend_info[Db2JsonConstant.COPY_ID] = self.job_id
            copy_info.extend_info[Db2JsonConstant.COPY_VERIFY_FILE] = "false"
            assoc_copies = copy_info.extend_info.get(Db2JsonConstant.ASSOCIATED_COPIES, [])
            LOGGER.info(f"The log backup begin time: {report_param.pre_copy_end_timestamp}, "
                        f"end time: {bak_timestamp}, associated "
                        f"copies: {assoc_copies}, database: {self._db_name}.")
        else:
            copy_info.extend_info = self.build_data_copy_extend_info(report_param.user_name,
                                                                     self._db_name, report_param.bak_time)
            # 获取数据备份结束时间（YYYYmmddHHMMSS）
            bak_end_time, _, _ = Db2CommonUtil.get_db_data_backup_end_time_and_cur_log(
                report_param.user_name, self._db_name, report_param.bak_time)
            copy_info.extend_info[Db2JsonConstant.DB2_DATA_BAK_END_TIME] = bak_end_time
            data_bak_end_timestamp = Db2CommonUtil.convert_backup_time_to_timestamp(bak_end_time) \
                if bak_end_time else bak_timestamp
            copy_info.timestamp = data_bak_end_timestamp
            copy_info.extend_info[Db2JsonConstant.COPY_ID] = self.job_id
            copy_info.extend_info[Db2JsonConstant.COPY_BAK_TIME] = data_bak_end_timestamp
            copy_info.extend_info[Db2JsonConstant.COPY_VERIFY_FILE] = "true"
            copy_info.extend_info[Db2JsonConstant.TRACK_MOD] = Db2CommonUtil.get_track_mod_val_of_db(
                report_param.user_name, self._db_name)
        copy_info.extend_info["uuid"] = Db2CommonUtil.get_agent_id()
        copy_info.extend_info["localDatabaseDirectory"] = Db2CommonUtil.get_database_local_path(report_param.user_name,
                                                                                                self._db_name)
        copy_info.extend_info["backupType"] = report_param.backup_type
        copy_info.extend_info["partitionNumber"] = Db2CommonUtil.get_database_partitionnums(report_param.user_name,
                                                                                            self._db_name)
        copy_info.extend_info["deleteLog"] = self._parameter_backup.get_delete_log()
        copy_info.extend_info["databaseName"] = self._db_name
        copy_info.extend_info["userName"] = report_param.user_name
        copy_info.extend_info["cachePath"] = report_param.cache_path
        copy_info.extend_info["sinceTimeStamp"] = self.backup_service.get_since_timestamp()
        backup_dir_prefix = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(report_param.backup_type)
        copy_path = f'{backup_dir_prefix}_{self.job_id}'
        if self._parameter_backup.get_sanclient_type() == "true":
            copy_path = self.job_id
        if self.backup_type != BackupTypeEnum.LOG_BACKUP.value:
            self.assembly_parameters_repositories(copy_info, copy_path)

        self.exec_report_copy(copy_info, report_param.cache_path)

    def exec_report_copy(self, copy_info, cache_path):
        input_path = os.path.realpath(os.path.join(cache_path, f"copy_info_in_{self.job_id}"))
        output_path = os.path.realpath(os.path.join(cache_path, f"copy_info_out_{self.job_id}"))
        params = dict()
        params["copy"] = copy_info.dict(by_alias=True)
        params["jobId"] = self.job_id

        try:
            output_execution_result_ex(input_path, params)
            if not Db2CommonUtil.exec_rpc_tool_cmd(RpcToolApiName.REPORT_COPY_ADDITIONAL_INFO, input_path, output_path):
                raise Exception("Report backup copy information failed")
        finally:
            for tmp_path in (input_path, output_path):
                if not check_path_in_white_list(tmp_path):
                    LOGGER.error("The db2 temp copy info file not check white")
                    continue
                if os.path.exists(tmp_path):
                    LOGGER.info(f"Remove temp copy info file: {tmp_path}.")
                    Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)

    def assembly_parameters_repositories(self, copy_info, copy_path):
        copy_repo_info = ParamFileUtil.get_rep_info(
            self.param_dict.get("job"), RepositoryDataTypeEnum.DATA_REPOSITORY.value)
        remote_path = os.path.realpath(os.path.join(copy_repo_info.get('remotePath'), copy_path))
        copy_info.repositories = [
            CopyInfoRepModel(
                id=copy_repo_info.get('id'),
                repositoryType=copy_repo_info.get("repositoryType"),
                isLocal=copy_repo_info.get("isLocal"), protocol=RepoProtocalType.NFS.value,
                remotePath=remote_path,
                remoteHost=copy_repo_info.get("remoteHost"),
                extendInfo=copy_repo_info.get("extendInfo")
            )
        ]

    @job_exception_decorator(write_progress=True)
    def exec_backup_post_job(self):
        # 执行后置任务
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def query_backup_copy(self):
        pass

    def abort_job(self):
        # 停止备份
        os_user = get_env_variable(Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
        self.backup_service.abort_job_service(os_user, self._db_name)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @exter_attack
    def report_progress(self, user_name=None):
        # 查询备份任务进度
        progress_file_name = self.sub_job_id if self.sub_job_id else self.job_id
        cache_path = self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        if self.task_name == Db2JobName.BACKUP_PROGRESS:
            if not user_name:
                user_name = self._parameter_backup.get_database_user_name(
                    Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
            Db2CommonUtil.check_injection(user_name)
            progress = Db2CommonUtil.get_now_backup_process(user_name, self._db_name)
            self.sub_job_detail.progress = Db2CommonUtil.get_now_backup_process(user_name, self._db_name)
            Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)
            return Db2BackupUtil.report_progress_utl(self.pid, progress_file_name, cache_path, progress)

        # 返回备份进度给框架
        return Db2BackupUtil.report_progress_utl(self.pid, progress_file_name, cache_path)

    def upload_report_progress(self):
        # 只关心运行中的任务
        while True:
            if self.sub_job_detail.task_status == SubJobStatusEnum.RUNNING.value:
                user_name = self._parameter_backup.get_database_user_name(
                    Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
                progress = Db2CommonUtil.get_now_backup_process(user_name, self._db_name)
                # 防止前面任务未写进度的情况。
                if self.sub_job_detail.progress and self.sub_job_detail.progress < progress:
                    self.sub_job_detail.progress = progress
                Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)
            else:
                LOGGER.debug(f"Stop report progress thread. current job status is : {self.sub_job_detail.dict()}")
                break
            LOGGER.debug(f"Report single db2 progress thread. detail: {self.sub_job_detail.dict()}")
            time.sleep(NumberConst.TEN)

    def output_action_result(self, action_ret: ActionResult):
        super(SingleBackup, self).output_action_result(action_ret)

    def write_backup_time_to_file(self, back_time):
        LOGGER.info(f"Start write backup time to file. backup time: {back_time}")
        ret_json = {"backupTime": back_time}
        cache_path = self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        temp_path = os.path.join(cache_path, "temp_backup_time")
        output_execution_result_ex(temp_path, ret_json)
        LOGGER.info(f"Write backup time to file complete. backup time: {back_time}, file_path: {temp_path}")

    def read_backup_time_from_file(self):
        LOGGER.info(f"Start read backup time from file.")
        cache_path = self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        tmp_path = os.path.join(cache_path, "temp_backup_time")
        ret_json = Db2CommonUtil.read_json_from_file(tmp_path)
        Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)
        ret_tm = ret_json.get("backupTime")
        LOGGER.info(f"Read backup time from file complete. backup time: {ret_tm}")
        return ret_tm

    def get_previous_copy_ext_info(self, cache_path):
        """获取日志备份时依赖的上一个副本的扩展信息"""
        LOGGER.info("Start to get latest backup copy info ...")
        app_type = self._parameter_backup.get_applications()
        latest_log_copy_info = Db2BackupUtil.query_pre_copy_info_by_rpc_tool(
            app_type, self.job_id, self.sub_job_id, cache_path, [CopyDataTypeEnum.LOG_COPY.value])
        # 存在依赖的日志副本
        if latest_log_copy_info:
            latest_log_copy_ext_info = latest_log_copy_info.get("extendInfo", {})
            LOGGER.info(f"Get latest log backup copy info success, extend info: {latest_log_copy_info}.")
            return latest_log_copy_ext_info
        # 不存在依赖的日志副本，查找依赖的全量副本
        latest_full_copy_info = Db2BackupUtil.query_pre_copy_info_by_rpc_tool(
            app_type, self.job_id, self.sub_job_id, cache_path, [CopyDataTypeEnum.FULL_COPY.value])
        # 不存在依赖的全量副本
        if not latest_full_copy_info:
            LOGGER.error(f"Get latest backup copy info failed, job id: {self.job_id}.")
            raise Exception("Get latest backup copy info failed")
        latest_full_copy_ext_info = latest_full_copy_info.get("extendInfo", {})
        LOGGER.info(f"Get latest full backup info success, extend info: {latest_full_copy_ext_info}.")
        return latest_full_copy_ext_info

    def finalize_clear(self):
        """删除归档日志"""
        LOGGER.info(f"Start to clear archive log.")
        json_param = self._parameter_backup.get_json_param()
        extend_info = json_param.get("extendInfo")
        pre_copy_end_timestamp = int(extend_info.get("beginTime"))
        db_name = extend_info.get("databaseName")
        user_name = extend_info.get("userName")
        cluster_type = extend_info.get("clusterType")
        since_timestamp = extend_info.get("sinceTimeStamp")
        cache_path = extend_info.get("cachePath")

        curr_ptn_nums = Db2CommonUtil.get_partition_no_by_hostname_for_single(user_name)
        log_chain = Db2CommonUtil.get_current_log_chain(user_name, db_name)
        log_param = LogBackupParam(user_name=user_name, db_name=db_name, log_chain=log_chain,
                                   since_timestamp=since_timestamp, cache_path=cache_path,
                                   cur_node_ptn_nums=curr_ptn_nums)
        log_param.deploy_type = cluster_type
        if self.backup_service.delete_archive_log(log_param, log_chain, curr_ptn_nums,
                                                  pre_copy_end_timestamp, user_name):
            LOGGER.info(f'Delete archive log succeeded.')
        else:
            LOGGER.warning(f'Delete archive log failed.')