#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import collections
import os
import time
import re
from datetime import datetime, timedelta

from common.common import execute_cmd, output_execution_result, execute_cmd_list, execute_cmd_oversize_return_value
from common.common_models import CopyInfoRepModel
from common.const import CMDResult, CopyDataTypeEnum, ParamConstant, RepositoryDataTypeEnum
from common.logger import Logger
from common.number_const import NumberConst
from common.parse_parafile import ParamFileUtil
from db2.backup.util.db2_backup_util import Db2BackupUtil
from db2.comm.backup_param_parse import ParameterBackupParse
from db2.comm.const import Db2CommonConst, Db2Const, Db2JsonConstant
from db2.comm.db2_exception import ErrCodeException
from db2.comm.error_code import Db2ErrCode
from db2.comm.models.backup_models import LogBackupParam, LogBackResult
from db2.comm.util.common_util import Db2CommonUtil
from db2.comm.util.param_util import Db2ParamUtil
from db2.comm.db2_cmd import get_lang_value

LOGGER = Logger().get_logger("db2.log")


class SingleBackupService:
    def __init__(self, pid, job_id, sub_job_id, param_util, param_dict):
        self._pid = pid
        self._job_id = job_id
        self._sub_job_id = sub_job_id
        self.param_util = param_util
        self.param_dict = param_dict
        self.parameter_backup = ParameterBackupParse(pid, job_id, sub_job_id, param_dict)
        self.log_path = []

    @staticmethod
    def get_backup_complete_image(std_out: str):
        backup_info = std_out.split(":")
        if len(backup_info) != 2:
            backup_info = std_out.split("：")
            if len(backup_info) != 2:
                LOGGER.error(f"The single db2 backup failed std out info: {std_out}.")
                return ""
        return backup_info[1].replace("\n", "").strip()

    def get_log_common(self):
        return f"pid: {self._pid}, jobId: {self._job_id}, sub_job_id: {self._sub_job_id}."

    def exec_single_database_full_backup_db(self, db_name, user_name, backup_path):
        """
        执行单机全量备份命令
        """
        LOGGER.info(f"Full database single backup started. "
                    f"database_name: {db_name}, {self.get_log_common()}")
        Db2CommonUtil.check_injection(user_name, db_name, backup_path)
        encoding = get_lang_value(user_name)
        # 全量备份命令
        backup_cmd = f"su - {user_name} -c \'db2 backup db {db_name} online to {backup_path} include logs\'"
        LOGGER.info(f"Start to execute backup command: {backup_cmd}.")
        ret_code, std_out, std_err = execute_cmd(backup_cmd, encoding=encoding)
        LOGGER.info(f"Execute the backup command completed, return code: {ret_code}, std_out: {std_out}, "
                    f"std_err: {std_err}.")
        # 无法访问表空间
        if "SQL2048N" in std_out and ('Reason code: "6"' in std_out or '原因码：“6”' in std_out):
            raise ErrCodeException(Db2ErrCode.RUNNING_BACKUP_TASK_EXISTS, db_name)
        # 执行状态错误或者error在输出中则备份失败
        if ret_code != CMDResult.SUCCESS.value or "errors" in std_out or "失败" in std_out:
            LOGGER.error(f'Execute the backup command failed, {self.get_log_common()}')
            raise ErrCodeException(Db2ErrCode.DATABASE_CRANKBACK_EXCEPTION, std_out)
        LOGGER.info(f'The full backup of the database: {db_name} completed, {self.get_log_common()}')
        # 获取备份结果的唯一id
        return self.get_backup_complete_image(std_out)

    def exec_single_database_inc_backup_db(self, db_name, user_name, data_path):
        """
        执行单机增量备份命令
        """
        LOGGER.info(f"Start to execute increment database single backup. "
                    f"database_name: {db_name}, {self.get_log_common()}")
        Db2CommonUtil.check_injection(user_name, db_name, data_path)
        encoding = get_lang_value(user_name)
        # 增量备份命令
        backup_cmd = f"su - {user_name} -c " \
                     f"\'db2 backup db {db_name} online incremental delta to {data_path} include logs\'"
        ret_code, std_out, std_err = execute_cmd(backup_cmd, encoding)
        LOGGER.info(f"Execute the backup command completed, return code: {ret_code}, std_out: {std_out}, "
                    f"std_err: {std_err}.")
        # 无法访问表空间
        if "SQL2048N" in std_out and ('Reason code: "6"' in std_out or '原因码：“6”' in std_out):
            raise ErrCodeException(Db2ErrCode.RUNNING_BACKUP_TASK_EXISTS, db_name)
        # 执行状态错误或者error在输出种则备份失败
        if ret_code != CMDResult.SUCCESS.value or "errors" in std_out or "失败" in std_out:
            LOGGER.error(f'Failed to execute the backup command, {self.get_log_common()}')
            raise ErrCodeException(Db2ErrCode.DATABASE_CRANKBACK_EXCEPTION, std_out)
        LOGGER.info(f'The increment backup of the database: {db_name} completed, {self.get_log_common()}')
        # 获取备份结果的唯一id
        return self.get_backup_complete_image(std_out)

    def exec_single_database_diff_backup_db(self, db_name, user_name, data_path):
        """
        执行单机差异备份命令
        """
        LOGGER.info(f"Start to execute difference database single backup. "
                    f"database_name: {db_name}, {self.get_log_common()}")
        Db2CommonUtil.check_injection(user_name, db_name, data_path)
        encoding = get_lang_value(user_name)
        # 差异备份命令
        backup_cmd = f"su - {user_name} -c \'db2 backup db {db_name} online incremental to {data_path} include logs\'"
        ret_code, std_out, std_err = execute_cmd(backup_cmd, encoding)
        LOGGER.info(f"Execute the backup command completed, return code: {ret_code}, std_out: {std_out}, "
                    f"std_err: {std_err}.")
        # 无法访问表空间
        if "SQL2048N" in std_out and ('Reason code: "6"' in std_out or '原因码：“6”' in std_out):
            raise ErrCodeException(Db2ErrCode.RUNNING_BACKUP_TASK_EXISTS, db_name)
        # 执行状态错误或者error在输出种则备份失败
        if ret_code != CMDResult.SUCCESS.value or "errors" in std_out:
            LOGGER.error(f'Failed to execute the backup command, {self.get_log_common()}')
            raise ErrCodeException(Db2ErrCode.DATABASE_CRANKBACK_EXCEPTION, std_out)
        LOGGER.info(f'The difference backup of the database: {db_name} completed, {self.get_log_common()}')
        # 获取备份结果的唯一id
        return self.get_backup_complete_image(std_out)

    def save_backup_time_info(self, bak_img_timestamp, copy_log_ext_info=None, end_time=None):
        backup_info_dict = {
            Db2JsonConstant.BAK_IMG_TIMESTAMP: bak_img_timestamp
        }
        if end_time:
            backup_info_dict[Db2JsonConstant.DB2_DATA_BAK_END_TIME] = end_time
        if copy_log_ext_info:
            backup_info_dict.update(copy_log_ext_info)
        LOGGER.info(f"Start saving backup time info: {backup_info_dict}, job id: {self._job_id}.")
        cache_path = self.param_util.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY)
        output_file_name = "{}{}".format("backupTimeInfo", self._job_id)
        output_file_path = os.path.join(cache_path, output_file_name)
        output_execution_result(output_file_path, backup_info_dict)
        LOGGER.info(f"Save backup time info success, file: {output_file_path}.")

    def get_latest_backup_loc(self, cache_path, user_name):
        LOGGER.info("Start to get data copy time")
        param = dict()
        app_type = self.param_util.get_applications()
        param["application"] = app_type
        param["types"] = [CopyDataTypeEnum.FULL_COPY.value, CopyDataTypeEnum.LOG_COPY.value]
        param["copyid"] = self._job_id
        param["jobId"] = self._job_id
        param_file = os.path.join(cache_path, "copy_info_param")
        out_file = os.path.join(cache_path, "copy_info")
        output_execution_result(param_file, param)
        rpc_tool_path = os.path.join(ParamConstant.BIN_PATH, "rpctool.sh")
        cmd = f"\"{rpc_tool_path}\" QueryPreviousCopy \"{param_file}\" \"{out_file}\""
        encoding = get_lang_value(user_name)
        code, out, err = execute_cmd(cmd, encoding=encoding)
        if code != CMDResult.SUCCESS.value:
            return {}
        out_info = Db2CommonUtil.read_json_from_file(out_file)
        LOGGER.info(f'Get out file:{out_info}')
        return out_info.get("extendInfo", {}).get("extendInfo")

    def handle_archive_log_for_log_backup(self, os_user, db_name):
        # 记录执行归档命令前的时间，用于查询归档日志时指定since参数
        arch_log_since_timestamp = time.strftime("%Y%m%d%H%M%S", time.localtime())
        time.sleep(NumberConst.THREE)
        # 手动归档日志
        Db2BackupUtil.execute_manual_archive_log(os_user, db_name)
        # 查找最新手动归档日志
        archive_log, arch_e_time = Db2BackupUtil.get_latest_archive_log(os_user, db_name,
                                                                        since_timestamp=arch_log_since_timestamp)
        # 休眠5秒，可能存在无法查询的情况
        time.sleep(NumberConst.FIVE)
        # 查询最新手动归档日志的开始时间、结束时间
        log_param = LogBackupParam(user_name=os_user, db_name=db_name)
        _, end_time = Db2BackupUtil.get_archive_log_start_and_end_time(log_param, archive_log,
                                                                       arch_cmd_end_time=arch_e_time)
        LOGGER.info(f"handle_archive_log_for_log_backup success, job id: {self._job_id}.")
        return end_time, archive_log

    def get_previous_copy_ext_info(self, cache_path, cur_log_chain):
        """获取日志备份时依赖的上一个副本的扩展信息"""
        LOGGER.info(f"Start to get previous copy info on log backup, current log chain: {cur_log_chain}.")
        app_type = self.param_util.get_applications()
        # 日志备份：首先查找最新的日志副本
        latest_log_copy_info = Db2BackupUtil.query_pre_copy_info_by_rpc_tool(
            app_type, self._job_id, self._sub_job_id, cache_path, [CopyDataTypeEnum.LOG_COPY.value])
        # 存在最新的日志副本
        if latest_log_copy_info:
            latest_log_copy_ext_info = latest_log_copy_info.get("extendInfo", {})
            copy_log_chain = latest_log_copy_ext_info.get(Db2JsonConstant.COPY_LOG_EXT_KEY, {}) \
                .get(Db2JsonConstant.LOG_CHAIN_KEY)
            LOGGER.info(f"Get previous log backup copy info success, copy log chain: {copy_log_chain}, "
                        f"current log chain: {cur_log_chain}.")
            # 最新日志副本log chain和当前log chain一致
            if cur_log_chain and cur_log_chain == copy_log_chain:
                return latest_log_copy_ext_info, CopyDataTypeEnum.LOG_COPY.value
            LOGGER.warning(f"Current log chain: {cur_log_chain} is inconsistent with log copy "
                           f"log chain: {copy_log_chain}.")
        # 日志备份：不存在最新的日志副本或者log chain不一致，再查找依赖的全量副本
        latest_log_copy_ext_info, copy_type = self.get_pre_full_log_ext_info(cache_path, cur_log_chain)
        return latest_log_copy_ext_info, copy_type

    def get_pre_full_log_ext_info(self, cache_path, cur_log_chain):
        app_type = self.param_util.get_applications()
        latest_full_copy_info = Db2BackupUtil.query_pre_copy_info_by_rpc_tool(
            app_type, self._job_id, self._sub_job_id, cache_path, [CopyDataTypeEnum.FULL_COPY.value])
        # 不存在最新的全量副本
        if not latest_full_copy_info:
            LOGGER.error(f"Get previous full backup copy info failed, job id: {self._job_id}.")
            raise ErrCodeException(Db2ErrCode.NOT_EXIT_WAL_BACKUP_FILE_AND_SNAPSHOT_BACKUP)
        latest_full_copy_ext_info = latest_full_copy_info.get("extendInfo", {})
        copy_log_chain = latest_full_copy_ext_info.get(Db2JsonConstant.COPY_LOG_EXT_KEY, {}) \
            .get(Db2JsonConstant.LOG_CHAIN_KEY)
        LOGGER.info(f"Get previous full backup copy info success, extend info: {latest_full_copy_ext_info}, "
                    f"full copy log chain: {copy_log_chain}, current log chain: {cur_log_chain}.")
        # 最新全量副本log chain和当前log chain不一致
        if cur_log_chain and cur_log_chain != copy_log_chain:
            LOGGER.warning(f"Current log chain: {cur_log_chain} is inconsistent with full copy "
                           f"log chain: {copy_log_chain}.")
            raise ErrCodeException(Db2ErrCode.ERR_RESTORED)
        return latest_full_copy_ext_info, CopyDataTypeEnum.FULL_COPY.value

    def exec_single_database_log_backup_db(self, db_name, user_name):
        """
        执行单机日志备份命令
        """
        LOGGER.info(f"Start executing single log backup ...")
        cache_path = self.param_util.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        app_info = self.param_util.get_applications()
        since_timestamp = Db2BackupUtil.build_list_his_since_timestamp_for_log_bak(
            app_info, self._job_id, self._sub_job_id, cache_path)
        # 手动归档日志
        Db2CommonUtil.activate_db_status(user_name, db_name)
        Db2BackupUtil.execute_manual_archive_log(user_name, db_name)
        # 查找手动归档记录
        archive_log, arch_cmd_end_time = Db2BackupUtil.get_latest_archive_log(
            user_name, db_name, since_timestamp=since_timestamp)
        log_chain = Db2CommonUtil.get_current_log_chain(user_name, db_name)
        # 查询最新手动归档日志的开始时间、结束时间
        log_param = LogBackupParam(user_name=user_name, db_name=db_name,
                                   log_chain=log_chain, since_timestamp=since_timestamp)
        log_time_tuple = Db2BackupUtil.get_arch_log_time(log_param, archive_log, arch_cmd_end_time=arch_cmd_end_time)
        # 获取数据库的分片号列表
        all_ptn_nums = Db2ParamUtil.get_partition_nums_for_backup(user_name, db_name)
        copy_log_ext_info = {
            Db2JsonConstant.ASSOCIATED_COPIES: [],
            Db2JsonConstant.COPY_LOG_EXT_KEY: {
                Db2JsonConstant.LOG_CHAIN_KEY: log_chain,
                Db2JsonConstant.PTN_NUM_LOG_MAP_KEY: {}
            }
        }
        for ptn_num in all_ptn_nums:
            tmp_archive_log, _ = Db2BackupUtil.get_latest_archive_log(
                user_name, db_name, since_timestamp=since_timestamp, ptn_num=ptn_num)
            copy_log_ext_info[Db2JsonConstant.COPY_LOG_EXT_KEY][Db2JsonConstant.PTN_NUM_LOG_MAP_KEY][ptn_num] = \
                tmp_archive_log
        # 查询数据库实例的节点配置文件
        curr_ptn_nums = Db2CommonUtil.get_partition_no_by_hostname_for_single(user_name)
        if not curr_ptn_nums:
            LOGGER.error(f"The value of hostname is different from that in the configuration file.")
            raise ErrCodeException(Db2ErrCode.USER_BACKUP_ERROR, db_name)
        log_param = LogBackupParam(user_name=user_name, db_name=db_name,
                                   cache_path=cache_path, cur_node_ptn_nums=curr_ptn_nums,
                                   since_timestamp=since_timestamp)
        ptn_log_locs_map, pre_copy_end_timestamp = self.get_ptn_logs_dict_of_cur_node(log_param, copy_log_ext_info)
        # 在日志仓挂载目录创建NODExxxx目录
        log_mount_dir = self.param_util.get_repositories_path(RepositoryDataTypeEnum.LOG_REPOSITORY.value)
        Db2CommonUtil.create_node_dir_for_dpf_log_backup(user_name, log_mount_dir, curr_ptn_nums)
        for ptn_num in curr_ptn_nums:
            cur_node_name = f'NODE{str(ptn_num).rjust(4, "0")}'
            tmp_arch_log_locs = ptn_log_locs_map.get(str(ptn_num), [])
            Db2BackupUtil.backup_archive_logs_for_ptn(user_name, tmp_arch_log_locs, log_mount_dir, cur_node_name)
        Db2CommonUtil.deactivate_db_status(user_name, db_name)
        return LogBackResult(bak_time=log_time_tuple[1], pre_copy_end_timestamp=pre_copy_end_timestamp,
                             copy_log_ext_info=copy_log_ext_info, backup_path=log_mount_dir)

    def delete_archive_log(self, log_param, log_chain, curr_ptn_nums, pre_copy_end_timestamp, user_name):
        """删除归档日志"""
        # 获取上次副本的结束时间，转换时间戳并倒退一小时
        time_format = '%Y%m%d%H%M%S'
        time_str = time.localtime(pre_copy_end_timestamp)
        original_time = time.strftime(time_format, time_str)
        original_time = datetime.strptime(original_time, time_format)
        one_hour_ago = original_time - timedelta(hours=1)
        save_archive_log_scope = one_hour_ago.strftime(time_format)
        encoding = get_lang_value(user_name)
        LOGGER.info(f"Save archive log within this time point: {save_archive_log_scope}.")
        for ptn_num in curr_ptn_nums:
            # 获取截止到上一次全量备份时间戳的归档日志文件
            query_archive_log_save_cmd = self.build_query_cmd(save_archive_log_scope, ptn_num, log_param)
            return_code, std_out, std_err = execute_cmd(query_archive_log_save_cmd, encoding=encoding)
            if return_code != CMDResult.SUCCESS.value or "errors" in std_out or "失败" in std_out:
                LOGGER.error(f"Execute cmd error {str(std_out)}, job id: {self._job_id}.")
                return False
            save_archive_log_dict = self.filter_archive_log_location(std_out, log_chain)
            # 获取所有的归档日志
            save_archive_log_scope = ''
            query_archive_log_all_cmd = self.build_query_cmd(save_archive_log_scope, ptn_num, log_param)
            return_code, std_out, std_err = execute_cmd(query_archive_log_all_cmd, encoding=encoding)
            if return_code != CMDResult.SUCCESS.value or "errors" in std_out or "失败" in std_out:
                LOGGER.error(f"Execute cmd error {str(std_out)}, job id: {self._job_id}.")
                return False
            all_archive_log_dict = self.filter_archive_log_location(std_out, log_chain)
            # 筛选出要删除的归档日志
            archive_log_delete = []
            for archive_log_info in all_archive_log_dict.keys():
                if archive_log_info not in save_archive_log_dict.keys():
                    archive_log_delete.append(all_archive_log_dict[archive_log_info].get("location"))
            LOGGER.info(f"Delete archive log file: {archive_log_delete}.")
            for path in archive_log_delete:
                if os.path.isfile(path):
                    Db2BackupUtil.remove_temp_file_ignore_exception(path)
            return True

    @staticmethod
    def filter_archive_log_location(std_out, log_chain):
        log_loc_dict = dict()
        tmp_log_name = None
        arch_log_info_begin_reg = r"\s*X\s+D\s+[\d]{14,}\s+[1|2]\s+D\s+(S[\d]{7}\.LOG)\s+"
        arch_log_info_reg = f"{arch_log_info_begin_reg}{str(log_chain)}\s*"
        for tmp_line in std_out.split('\n'):
            tmp_match_log_ret = re.match(arch_log_info_reg, tmp_line)
            if re.match(arch_log_info_reg, tmp_line):
                tmp_log_name = tmp_match_log_ret.groups()[0]
                log_loc_dict[tmp_log_name] = dict()
                continue
            if tmp_log_name and re.search(rf".+Location:.+/{tmp_log_name}$", tmp_line):
                log_loc_dict[tmp_log_name]["location"] = tmp_line.split(':')[-1].strip()
                tmp_log_name = None
            elif tmp_log_name and re.search(rf".+位置：.+/{tmp_log_name}$", tmp_line):
                log_loc_dict[tmp_log_name]["location"] = tmp_line.split('：')[-1].strip()
                tmp_log_name = None
        return log_loc_dict

    @staticmethod
    def build_query_cmd(query_time, ptn_num, log_param):
        log_scope = f"since {query_time}" if query_time else "all"
        if log_param.deploy_type == Db2Const.DPF_CLUSTER_TYPE and ptn_num is not None:
            query_cmd = f"su - {log_param.user_name} -c 'db2_all " \
                        f"\"<<+{ptn_num}<db2 list history archive log {log_scope} for db {log_param.db_name}\"'"
        else:
            query_cmd = f"su - {log_param.user_name} -c 'db2 list history archive log {log_scope} " \
                        f"for db {log_param.db_name}'"
        return query_cmd

    def get_ptn_logs_dict_of_cur_node(self, log_param: LogBackupParam, log_ext_info):
        ptn_log_locs_map = dict()
        log_chain = log_ext_info.get(Db2JsonConstant.COPY_LOG_EXT_KEY, {}).get(Db2JsonConstant.LOG_CHAIN_KEY)
        # 获取依赖的最新副本信息
        pre_copy_ext_info, depend_copy_type = self.get_previous_copy_ext_info(log_param.cache_path, log_chain)
        # 依赖日志副本且日志文件不完整，则需要查找上一个全量副本
        if depend_copy_type == CopyDataTypeEnum.LOG_COPY.value:
            # 依赖日志副本时检查日志完整性
            is_complete, ptn_log_locs_map = self.check_integrity_of_need_bak_logs(
                log_param, log_ext_info, pre_copy_ext_info,
                is_depend_log=True)
            if not is_complete:
                # 依赖日志副本时日志不完整，则转为依赖全量
                pre_copy_ext_info, depend_copy_type = self.get_pre_full_log_ext_info(log_param.cache_path, log_chain)
        if depend_copy_type == CopyDataTypeEnum.FULL_COPY.value:
            # 依赖全量副本时检查日志完整性
            is_complete, ptn_log_locs_map = self.check_integrity_of_need_bak_logs(
                log_param, log_ext_info, pre_copy_ext_info,
                is_depend_log=False)
            if not is_complete:
                # 依赖全量副本时日志不完整，则报错
                raise ErrCodeException(Db2ErrCode.ERR_INVALID_LOG_COPY)
        pre_copy_bak_img_time = pre_copy_ext_info.get(Db2JsonConstant.BAK_IMG_TIMESTAMP)
        if not pre_copy_bak_img_time:
            err_msg = "The backup image timestamp of previous copy is empty when executing log backup"
            LOGGER.error(err_msg)
            raise ErrCodeException(Db2ErrCode.USER_BACKUP_ERROR, message=err_msg)
        depend_copy_end_time = pre_copy_ext_info.get(Db2JsonConstant.COPY_END_TIME) \
            if depend_copy_type == CopyDataTypeEnum.LOG_COPY.value \
            else pre_copy_ext_info.get(Db2JsonConstant.COPY_BAK_TIME)
        assoc_copies = pre_copy_ext_info.get(Db2JsonConstant.ASSOCIATED_COPIES, [])
        # 最终associatedCopies中的字段是插件上报的associatedCopies字段加上beginTime-endTime之间的全量副本
        log_ext_info[Db2JsonConstant.ASSOCIATED_COPIES] = assoc_copies
        LOGGER.info(f"The copy type that log backup depends on is {depend_copy_type}, backup time: "
                    f"{pre_copy_bak_img_time}, end time: {depend_copy_end_time}, associated copies: {assoc_copies}, "
                    f"database: {log_param.db_name}.")
        return ptn_log_locs_map, depend_copy_end_time

    def check_integrity_of_need_bak_logs(self, log_param: LogBackupParam, log_ext_info, pre_copy_ext_info,
                                         is_depend_log=True):
        """
        检查需要备份日志的完整性
        :param log_param: 日志参数
        :param log_ext_info: 当前备份的日志扩展信息
        :param pre_copy_ext_info: 日志备份依赖副本的扩展信息
        :param is_depend_log: 日志备份依赖日志副本，True：依赖；False：不依赖
        :return: (bool, dict) True完整/False不完整；分片号和需要备份日志列表的映射
        """
        LOGGER.info(f"Start checking integrity of need backup logs, "
                    f"database: {log_param.db_name}, depend log: {is_depend_log}.")
        log_param.log_chain = log_ext_info.get(Db2JsonConstant.COPY_LOG_EXT_KEY, {}).get(Db2JsonConstant.LOG_CHAIN_KEY)
        ptn_log_locs_map = dict()
        for ptn_num in log_param.cur_node_ptn_nums:
            tmp_stop_log = log_ext_info.get(Db2JsonConstant.COPY_LOG_EXT_KEY, {}) \
                .get(Db2JsonConstant.PTN_NUM_LOG_MAP_KEY, {}).get(ptn_num)
            if not tmp_stop_log:
                err_msg = f"The archive log of partition {ptn_num} is empty when executing log backup"
                LOGGER.error(err_msg)
                raise ErrCodeException(Db2ErrCode.USER_BACKUP_ERROR, message=err_msg)
            tmp_pre_copy_stop_log = pre_copy_ext_info.get(Db2JsonConstant.COPY_LOG_EXT_KEY, {}) \
                .get(Db2JsonConstant.PTN_NUM_LOG_MAP_KEY, {}).get(ptn_num)
            log_param.deploy_type = Db2Const.SINGLE
            tmp_archive_log_locs = Db2CommonUtil.get_need_backup_logs_for_partition(
                log_param, tmp_pre_copy_stop_log, tmp_stop_log, ptn_num=ptn_num,
                is_depend_log=is_depend_log)
            LOGGER.info(f"Partition {ptn_num} need backup logs: {tmp_archive_log_locs}, database: {log_param.db_name}.")
            self.log_path = tmp_archive_log_locs
            # 检查日志文件是否完整
            is_completed = Db2CommonUtil.check_file_path_has_existed(tmp_archive_log_locs)
            LOGGER.info(f"The partition {ptn_num} archive logs integrity: {is_completed}.")
            if not is_completed:
                LOGGER.error(f"Archive log files: {tmp_archive_log_locs} are not complete.")
                return False, dict()
            ptn_log_locs_map[str(ptn_num)] = tmp_archive_log_locs
        LOGGER.info(f"Check integrity of need backup logs success, database: {log_param.db_name}.")
        return True, ptn_log_locs_map

    def get_copy_info(self, backup_type, json_param):
        copy_path = f'{Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(backup_type)}_{self._job_id}'
        rep_copy_info = ParamFileUtil.get_rep_info(json_param.get("job"), RepositoryDataTypeEnum.DATA_REPOSITORY.value)
        return [
            CopyInfoRepModel(
                id=rep_copy_info.get('id'),
                repositoryType=rep_copy_info.get("repositoryType"),
                isLocal=rep_copy_info.get("isLocal"),
                protocol=0,
                remotePath=f"{rep_copy_info.get('remotePath')}/{copy_path}",
                remoteHost=rep_copy_info.get("remoteHost"),
                extendInfo=rep_copy_info.get('extendInfo')
            )
        ]

    def execute_abort_sh(self, cmd_sh, encoding):
        if isinstance(cmd_sh, list):
            ret_code, std_out, std_err = Db2CommonUtil.execute_cmd_list(cmd_sh, encoding=encoding)
        else:
            ret_code, std_out, std_err = execute_cmd(cmd_sh, encoding=encoding)
        if ret_code != CMDResult.SUCCESS.value or "errors" in std_out or "失败" in std_out:
            LOGGER.error(f"Execute cmd failed, out info: {str(std_out)}, err info: {str(std_err)}, "
                         f"job id: {self._job_id}.")
            return ""
        return std_out

    def abort_job_service(self, os_name, db_name):
        # 停止备份任务
        encoding = get_lang_value(os_name)
        if str(encoding) == "gbk":
            find_backup_cmd = [f"su - {os_name} -c 'db2 list application show detail'", "grep 执行备份",
                               f"grep {db_name}"]
        else:
            find_backup_cmd = [f"su - {os_name} -c 'db2 list application show detail'", "grep Backup",
                               f"grep {db_name}"]
        std_out = self.execute_abort_sh(find_backup_cmd, encoding)
        if not std_out:
            return True
        backup_info = std_out.split(" ")
        Db2CommonUtil.check_injection(backup_info[2])
        stop_cmd = f"su - {os_name} -c 'db2 \"force application {backup_info[2]}\"'"
        LOGGER.info(f"Start to stop the backup task: {stop_cmd}")
        stop_out = self.execute_abort_sh(stop_cmd, encoding)
        if not std_out:
            return False
        # SQL0104N 该备份任务不存在
        if "successfully" in stop_out or "SQL0104N" in stop_out or "成功" in std_out:
            return True
        return False

    def pre_backup_path(self, user_name, backup_type, data_path_list: list) -> str:
        """
        param user_name: 执行备份用户名
        param backup_type: 备份类型
        param data_path_list: HA备份路径多个[xxx1, xxx2]，其余是单个[xxx]
        return:
        """
        backup_path_list = []
        user_id = Db2CommonUtil.get_os_user_id_by_os_user(user_name)
        group_id = Db2CommonUtil.get_group_id_by_os_user(user_name)
        for path in data_path_list:
            backup_path = os.path.join(path,
                                       f'{Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(backup_type)}_{self._job_id}')
            if not os.path.exists(backup_path):
                os.mkdir(backup_path)
                os.chown(backup_path, user_id, group_id)
            backup_path_list.append(backup_path)
            LOGGER.info(f'DB2 single database copy path:{backup_path}, {self.get_log_common()}')
        Db2CommonUtil.check_path_list(*backup_path_list)
        return ",".join(backup_path_list)

    def get_full_copy_extend_info(self, cache_path, application, user_name):
        LOGGER.info("Start to get full copy time")
        param = dict()
        param["application"] = application
        param["types"] = [CopyDataTypeEnum.FULL_COPY.value]
        param["copyId"] = self._job_id
        param["jobId"] = self._job_id
        param_file = os.path.join(cache_path, "copy_info_param_1")
        out_file = os.path.join(cache_path, "copy_info_1")
        output_execution_result(param_file, param)
        rpc_tool_path = os.path.join(ParamConstant.BIN_PATH, "rpctool.sh")
        cmd = f"\"{rpc_tool_path}\" QueryPreviousCopy \"{param_file}\" \"{out_file}\""
        encoding = get_lang_value(user_name)
        code, out, err = execute_cmd(cmd, encoding=encoding)
        if code != CMDResult.SUCCESS.value:
            return {}
        out_info = Db2CommonUtil.read_json_from_file(out_file)
        LOGGER.info(f'Get out file:{out_info}')
        return out_info.get("extendInfo", {})

    def get_since_timestamp(self):
        cache_path = self.param_util.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        app_info = self.param_util.get_applications()
        try:
            since_timestamp = Db2BackupUtil.build_list_his_since_timestamp_for_log_bak(
                app_info, self._job_id, self._sub_job_id, cache_path)
        except ErrCodeException:
            return None
        return since_timestamp
