#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import json
import re

from common.const import CopyDataTypeEnum, RestoreTypeEnum, RepositoryDataTypeEnum, BackupTypeEnum
from common.logger import Logger
from common.number_const import NumberConst
from common.parse_parafile import get_env_variable
from common.util.cmd_utils import get_livemount_path
from db2.comm.const import Db2JsonConstant, Db2JobName, Db2CommonConst, Db2Regex
from db2.comm.db2_exception import ErrCodeException
from db2.comm.error_code import Db2ErrCode
from db2.comm.util.comm_decorators import adapt_paths
from db2.comm.util.common_util import Db2CommonUtil
from db2.comm.constant import ParamField, ReplicationType

LOGGER = Logger().get_logger(filename="db2.log")


class Db2ParamUtil:
    @staticmethod
    def get_job_id(param_dict):
        return param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.JOB_ID, "")

    @staticmethod
    def get_sub_job_id(param_dict):
        return param_dict.get(Db2JsonConstant.SUB_JOB, {}).get(Db2JsonConstant.SUB_JOB_ID, "")

    @staticmethod
    def get_node_ip(param_dict):
        all_node_infos = param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.TARGET_ENV, {}) \
            .get(Db2JsonConstant.NODES, [])
        all_node_ips = [i.get(Db2JsonConstant.ENDPOINT) for i in all_node_infos]
        local_ip_list = Db2CommonUtil.get_local_ips()
        inter_sec_ips = list(set(all_node_ips).intersection(set(local_ip_list)))
        return inter_sec_ips[0] if inter_sec_ips else ""

    @staticmethod
    def is_new_location_restore(param_dict):
        """是否恢复到新位置"""
        tgt_loc = param_dict.get("job", {}).get("extendInfo", {}).get("targetLocation")
        return tgt_loc != "original"

    @staticmethod
    def get_tgt_db_dir(param_dict):
        return param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.TARGET_OBJECT, {}) \
            .get(Db2JsonConstant.EXTEND_INFO, {}).get(Db2JsonConstant.LOCAL_DB_DIR, "")

    @staticmethod
    def is_restore_to_same_db_path(param_dict):
        """是否恢复到相同路径"""
        src_db_dir = Db2ParamUtil.parse_copies(param_dict)[-1].get("protectObject", {}).get("extendInfo", {}) \
            .get("localDatabaseDirectory", "")
        tgt_db_dir = param_dict.get("job", {}).get("targetObject", {}).get("extendInfo", {}) \
            .get("localDatabaseDirectory", "")
        LOGGER.info(f"Detect if restore to same path, source database directory is {src_db_dir}, "
                    f"target database directory is {tgt_db_dir}.")
        return src_db_dir == tgt_db_dir

    @staticmethod
    def get_nodes_num(param_dict):
        """获取源数据库和目标数据库节点数目"""
        src_nodes_num = len(Db2ParamUtil.parse_copies(param_dict)[-1].get("protectEnv", {}).get("nodes", []))
        tgt_nodes_num = len(param_dict.get("job", {}).get("targetEnv", {}).get("nodes", []))
        return src_nodes_num, tgt_nodes_num

    @staticmethod
    def get_partition_nums_for_backup(os_user, db_name):
        """备份时获取数据库的分片号列表"""
        db_ptn_num_str = Db2CommonUtil.get_database_partitionnums(os_user, db_name)
        return [str(i).strip() for i in db_ptn_num_str.split(",")]

    @staticmethod
    def get_partition_nums_from_copy(param_dict):
        """恢复时获取源数据库的分片信息"""
        src_partition_num_str = Db2ParamUtil.parse_copies(param_dict)[-1].get("extendInfo", {}) \
            .get("partitionNumber", "")
        if not src_partition_num_str:
            src_partition_num_str = Db2ParamUtil.parse_copies(param_dict)[-1].get("extendInfo", {}). \
                get("extendInfo", {}).get("partitionNumber", "")
        return [str(i).strip() for i in src_partition_num_str.split(",")]

    @staticmethod
    def get_partition_nums(param_dict, user_name):
        """获取源数据库和目标数据库的分片信息"""
        src_partition_nums = Db2ParamUtil.get_partition_nums_from_copy(param_dict)
        tgt_obj_db_name = Db2ParamUtil.get_db_name_when_restore_db(param_dict)
        tgt_partition_num_str = Db2CommonUtil.get_database_partitionnums(user_name, tgt_obj_db_name)
        tgt_partition_nums = [str(i).strip() for i in tgt_partition_num_str.split(",")]
        return src_partition_nums, tgt_partition_nums

    @staticmethod
    def get_catalog_num(param_dict):
        """获取源数据库和目标数据库编目号"""
        src_obj_ext_info = Db2ParamUtil.parse_copies(param_dict)[-1].get("protectObject", {}).get("extendInfo", {})
        src_catalog_num = src_obj_ext_info.get("catalogNumber", "")
        tgt_obj_ext_info = param_dict.get("job", {}).get("targetObject", {}).get("extendInfo", {})
        tgt_catalog_num = tgt_obj_ext_info.get("catalogNumber", "")
        return src_catalog_num, tgt_catalog_num

    @staticmethod
    def get_sub_job_name(param_dict):
        """获取备份子任务名称"""
        return param_dict.get(Db2JsonConstant.SUB_JOB, {}).get(Db2JsonConstant.JOB_NAME, "")

    @staticmethod
    def get_sub_job_name_for_restore(param_dict):
        """获取恢复子任务名称"""
        return param_dict.get(Db2JsonConstant.SUB_JOB, {}).get(Db2JsonConstant.JOB_NAME, "")

    @staticmethod
    def get_os_user(task_name, pid):
        """获取资源的操作系统用户名"""
        if task_name == Db2JobName.QUERY_PERMISSION:
            os_user_key = Db2JsonConstant.APPENV_NODES_0_AUTH_AUTHKEY + "_" + pid
        else:
            os_user_key = Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY + "_" + pid
        os_user = get_env_variable(os_user_key)
        return os_user

    @staticmethod
    def get_tgt_os_user_when_restore(pid):
        """获取恢复目标的操作系统用户名"""
        os_user_key = Db2JsonConstant.JOB_TARGETENV_NODES_0_AUTH_AUTHKEY + "_" + pid
        os_user = get_env_variable(os_user_key)
        return os_user

    @staticmethod
    def get_tgt_inst_name_when_restore_db(param_dict):
        """获取恢复目标数据库的实例名"""
        return param_dict.get("job", {}).get("targetObject", {}).get("parentName", "")

    @staticmethod
    def get_tgt_inst_name_when_restore_ts(param_dict):
        """获取恢复目标表空间集的实例名"""
        return param_dict.get("job", {}).get("targetObject", {}).get("extendInfo", {}).get("instance", "")

    @staticmethod
    def get_inst_name_of_db_copy(param_dict):
        """获取数据库副本中的实例名"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        return copies[0].get("protectObject", {}).get("parentName", "")

    @staticmethod
    def get_inst_name_of_ts_copy(param_dict):
        """获取表空间集副本中的实例名"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        return copies[0].get("protectObject", {}).get("extendInfo", {}).get("instance", "")

    @staticmethod
    def get_table_spaces_of_ts_copy(param_dict):
        """获取表空间集副本资源的表空间名称列表"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        table_spaces_str = copies[0].get("protectObject", {}).get("extendInfo", {}).get("table", "")
        return list(ts.strip() for ts in table_spaces_str.split(",") if ts.strip()) if table_spaces_str else []

    @staticmethod
    def get_catalog_ip_of_dpf_db(param_dict):
        """获取DPF集群数据库的编目节点IP"""
        return param_dict.get("job", {}).get("protectObject", {}).get("extendInfo", {}).get("catalogIp", "")

    @staticmethod
    def get_catalog_ip_when_restore(param_dict):
        """恢复时获取DPF集群数据库的编目节点IP"""
        return param_dict.get("job", {}).get("targetObject", {}).get("extendInfo", {}).get("catalogIp", "")

    @staticmethod
    def get_backup_type(param_dict):
        """获取备份类型"""
        return param_dict.get("job", {}).get("jobParam", {}).get("backupType", 0)

    @staticmethod
    def get_db_name_for_ts(param_dict):
        """获取表空间集的数据库名称"""
        db_name = param_dict.get("job", {}).get("protectObject", {}).get("parentName", "")
        Db2CommonUtil.check_injection(db_name)
        return db_name

    @staticmethod
    def get_db_name_for_ts_restore(param_dict):
        """获取表空间集的数据库名称"""
        db_name = param_dict.get("job", {}).get("targetObject", {}).get("parentName", "")
        Db2CommonUtil.check_injection(db_name)
        return db_name

    @staticmethod
    def get_tgt_instance_name_when_restore_ts(param_dict):
        """获取恢复表空间集的实例名"""
        return param_dict.get("job", {}).get("targetObject", {}).get("extendInfo", {}).get("instance", "")

    @staticmethod
    def get_db_name_when_restore_db(param_dict):
        """获取数据库恢复时数据库名称"""
        db_name = param_dict.get("job", {}).get("targetObject", {}).get("name", "")
        Db2CommonUtil.check_injection(db_name)
        return db_name

    @staticmethod
    def get_db_name_of_db_copy(param_dict):
        """获取数据库副本资源的数据库名"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        db_name = copies[0].get("protectObject", {}).get("name", "")
        Db2CommonUtil.check_injection(db_name)
        return db_name

    @staticmethod
    def get_db_name_when_restore_ts(param_dict):
        """获取表空间集恢复时数据库名称"""
        db_name = param_dict.get("job", {}).get("targetObject", {}).get("parentName", "")
        Db2CommonUtil.check_injection(db_name)
        return db_name

    @staticmethod
    def get_db_name_of_ts_copy(param_dict):
        """获取表空间集副本的数据库名"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        db_name = copies[0].get("protectObject", {}).get("parentName", "")
        Db2CommonUtil.check_injection(db_name)
        return db_name

    @staticmethod
    def get_backup_table_spaces(param_dict):
        """获取备份的表空间集"""
        table_spaces_str = param_dict.get("job", {}).get("protectObject", {}).get("extendInfo", {}) \
            .get("table", "")
        Db2CommonUtil.check_injection(table_spaces_str)
        return list(ts.strip() for ts in table_spaces_str.split(",") if ts.strip()) if table_spaces_str else []

    @staticmethod
    def get_restore_table_spaces(param_dict):
        """获取恢复的表空间集"""
        table_spaces_str = param_dict.get("job", {}).get("targetObject", {}).get("extendInfo", {}) \
            .get("table", "")
        Db2CommonUtil.check_injection(table_spaces_str)
        return list(ts.strip() for ts in table_spaces_str.split(",") if ts.strip()) if table_spaces_str else []

    @staticmethod
    def get_restore_copy_id(param_dict):
        """获取恢复任务的restoreCopyId参数值"""
        return param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.EXTEND_INFO, {}) \
            .get(Db2JsonConstant.RESTORE_COPY_ID, "")

    @staticmethod
    def get_restore_timestamp(param_dict):
        """获取任意时间点恢复的时间戳"""
        return param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.EXTEND_INFO, {}) \
            .get(Db2JsonConstant.RESTORE_TIMESTAMP, "")

    @staticmethod
    def parse_copies(param_dict):
        copies = param_dict.get("job", {}).get("copies", [])
        if not copies:
            raise Exception("The copies value in the param file is empty or does not exist")
        return copies

    @staticmethod
    def parse_job_extend_copies(param_dict):
        return param_dict.get('job', {}).get('extendInfo', {}).get('copies', [])

    @staticmethod
    def get_copies_by_copy_type(all_copies, copy_type):
        """根据副本类型获取副本信息列表"""
        type_copies_dict = {
            CopyDataTypeEnum.FULL_COPY.value: [],
            CopyDataTypeEnum.INCREMENT_COPY.value: [],
            CopyDataTypeEnum.DIFF_COPY.value: [],
            CopyDataTypeEnum.LOG_COPY.value: []
        }
        for tmp_copy in all_copies:
            if tmp_copy.get("type") in (CopyDataTypeEnum.FULL_COPY.value, CopyDataTypeEnum.S3_ARCHIVE,
                                        CopyDataTypeEnum.TAP_ARCHIVE):
                type_copies_dict[CopyDataTypeEnum.FULL_COPY.value].append(tmp_copy)
            elif tmp_copy.get("type") == CopyDataTypeEnum.INCREMENT_COPY.value:
                # 增量副本可能有多个
                type_copies_dict[CopyDataTypeEnum.INCREMENT_COPY.value].append(tmp_copy)
            elif tmp_copy.get("type") == CopyDataTypeEnum.DIFF_COPY.value:
                type_copies_dict[CopyDataTypeEnum.DIFF_COPY.value].append(tmp_copy)
            elif tmp_copy.get("type") == CopyDataTypeEnum.LOG_COPY.value:
                # 日志副本可能有多个
                type_copies_dict[CopyDataTypeEnum.LOG_COPY.value].append(tmp_copy)
        return type_copies_dict.get(copy_type, [])

    @staticmethod
    def get_restore_type(param_dict):
        """获取恢复类型"""
        copies = param_dict.get("job", {}).get("copies", [])
        all_copy_types = list(copy.get("type") for copy in copies)
        LOGGER.info(f"Start obtaining the recovery type, all copy types: {all_copy_types}.")
        if CopyDataTypeEnum.S3_ARCHIVE.value in all_copy_types or CopyDataTypeEnum.TAP_ARCHIVE.value in all_copy_types:
            restore_type = RestoreTypeEnum.FULL_RESTORE.value
        elif CopyDataTypeEnum.LOG_COPY.value in all_copy_types:
            restore_type = RestoreTypeEnum.LOG_RESTORE.value
        elif CopyDataTypeEnum.INCREMENT_COPY.value in all_copy_types:
            restore_type = RestoreTypeEnum.INCRE_RESTORE.value
        elif CopyDataTypeEnum.DIFF_COPY.value in all_copy_types:
            restore_type = RestoreTypeEnum.DIFF_RESTORE.value
        else:
            restore_type = RestoreTypeEnum.FULL_RESTORE.value
        LOGGER.info(f"Obtain recovery type success, restore type: {restore_type}")
        return restore_type

    @staticmethod
    def get_path_from_repositories(repositories: list, repo_type, need_all: bool = False) -> list:
        mount_paths = []
        for repo in repositories:
            tmp_repo_type = repo.get(Db2JsonConstant.REPOSITORY_TYPE)
            if tmp_repo_type != repo_type:
                continue
            paths = repo.get(Db2JsonConstant.PATH)
            if not paths:
                LOGGER.error(f"The path value in repository is empty, repository type: {tmp_repo_type}.")
                raise Exception("The path value in repository is empty")
            if not need_all:
                if not Db2CommonUtil.verify_path_trustlist(paths[0]):
                    raise Exception("Invalid mount path")
                mount_paths.append(paths[0])
                continue
            for path in paths:
                if not Db2CommonUtil.verify_path_trustlist(path):
                    raise Exception("Invalid repository path")
            mount_paths.append(paths)

        if not mount_paths:
            LOGGER.error(f"The copy mount path list: {mount_paths} is empty.")
            raise Exception("The copy mount path list is empty")
        LOGGER.info(f"Get copy mount path success, paths: {mount_paths}, repository type: {repo_type}.")
        return mount_paths

    @staticmethod
    def get_repository_paths_for_backup(param_dict, repo_type) -> list:
        """
        从参数中获取仓库的位置（1: 数据仓；2：cache仓；3：日志仓；4：meta仓）
        """
        repositories = param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.REPOSITORIES, [])
        return Db2ParamUtil.get_path_from_repositories(repositories, repo_type)

    @staticmethod
    @adapt_paths
    def get_repository_paths_for_restore(copy_dict: dict, repo_type, job_id="") -> list:
        """
        从恢复任务参数中获取仓库的位置（1: 数据仓；2：cache仓；3：日志仓；4：meta仓）
        job_id不要删除，adapt_paths里会用到
        """
        repositories = copy_dict.get(Db2JsonConstant.REPOSITORIES, [])
        return Db2ParamUtil.get_path_from_repositories(repositories, repo_type)

    @staticmethod
    @adapt_paths
    def get_repository_all_paths_for_restore(copy_dict: dict, repo_type, job_id="") -> list:
        """
        从恢复任务参数中获取仓库的位置（1: 数据仓；2：cache仓；3：日志仓；4：meta仓）
        job_id不要删除，adapt_paths里会用到
        """
        repositories = copy_dict.get(Db2JsonConstant.REPOSITORIES, [])
        return Db2ParamUtil.get_path_from_repositories(repositories, repo_type, True)

    @staticmethod
    def get_cache_mount_path_for_restore(param_dict):
        """获取恢复时cache仓挂载路径"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        restore_type = Db2ParamUtil.get_restore_type(param_dict)
        # 非日志副本恢复从最后一个副本信息中获取
        if restore_type != RestoreTypeEnum.LOG_RESTORE.value:
            return Db2ParamUtil.get_repository_paths_for_restore(
                copies[-1], RepositoryDataTypeEnum.CACHE_REPOSITORY.value)[0]
        incr_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.INCREMENT_COPY.value)
        # 日志副本恢复的副本链存在增量副本从最后一个增量副本信息中获取
        if incr_copies:
            # 日志副本恢复时从依赖的全量副本信息中获取
            return Db2ParamUtil.get_repository_paths_for_restore(
                incr_copies[-1], RepositoryDataTypeEnum.CACHE_REPOSITORY.value)[0]
        diff_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.DIFF_COPY.value)
        # 日志副本恢复的副本链存在差异副本从差异副本信息中获取
        if diff_copies:
            # 日志副本恢复时从依赖的全量副本信息中获取
            return Db2ParamUtil.get_repository_paths_for_restore(
                diff_copies[-1], RepositoryDataTypeEnum.CACHE_REPOSITORY.value)[0]
        # 日志副本恢复的副本链只有全量副本从全量副本信息中获取
        return Db2ParamUtil.get_repository_paths_for_restore(
            copies[0], RepositoryDataTypeEnum.CACHE_REPOSITORY.value)[0]

    @staticmethod
    def get_data_mount_path_for_restore(param_dict):
        """获取恢复时date仓挂载路径"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        restore_type = Db2ParamUtil.get_restore_type(param_dict)
        job_id = param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.JOB_ID)
        # 非日志副本恢复从最后一个副本信息中获取
        if restore_type != RestoreTypeEnum.LOG_RESTORE.value:
            return Db2ParamUtil.get_repository_paths_for_restore(
                copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        incr_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.INCREMENT_COPY.value)
        # 日志副本恢复的副本链存在增量副本从最后一个增量副本信息中获取
        if incr_copies:
            # 日志副本恢复时从依赖的全量副本信息中获取
            return Db2ParamUtil.get_repository_paths_for_restore(
                incr_copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        diff_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.DIFF_COPY.value)
        # 日志副本恢复的副本链存在差异副本从差异副本信息中获取
        if diff_copies:
            # 日志副本恢复时从依赖的全量副本信息中获取
            return Db2ParamUtil.get_repository_paths_for_restore(
                diff_copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        # 日志副本恢复的副本链只有全量副本从全量副本信息中获取
        return Db2ParamUtil.get_repository_paths_for_restore(
            copies[0], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]

    @staticmethod
    def get_data_mount_all_path_for_restore(param_dict):
        """获取恢复时date仓挂载路径"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        restore_type = Db2ParamUtil.get_restore_type(param_dict)
        job_id = param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.JOB_ID)
        # 非日志副本恢复从最后一个副本信息中获取
        if restore_type != RestoreTypeEnum.LOG_RESTORE.value:
            return Db2ParamUtil.get_repository_all_paths_for_restore(
                copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        incr_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.INCREMENT_COPY.value)
        # 日志副本恢复的副本链存在增量副本从最后一个增量副本信息中获取
        if incr_copies:
            # 日志副本恢复时从依赖的全量副本信息中获取
            return Db2ParamUtil.get_repository_all_paths_for_restore(
                incr_copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        diff_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.DIFF_COPY.value)
        # 日志副本恢复的副本链存在差异副本从差异副本信息中获取
        if diff_copies:
            # 日志副本恢复时从依赖的全量副本信息中获取
            return Db2ParamUtil.get_repository_all_paths_for_restore(
                diff_copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        # 日志副本恢复的副本链只有全量副本从全量副本信息中获取
        return Db2ParamUtil.get_repository_all_paths_for_restore(
            copies[0], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]

    @staticmethod
    def get_id_of_copy(copy_dict: dict):
        """获取副本ID"""
        copyid = copy_dict.get(Db2JsonConstant.EXTEND_INFO, {}).get(Db2JsonConstant.COPY_ID, "")
        if not copyid:
            copyid = copy_dict.get("id", "")
        return copyid

    @staticmethod
    def get_backup_timestamp_of_copy(copy_dict: dict):
        """获取副本时间戳"""
        return copy_dict.get("timestamp", "")

    @staticmethod
    def get_backup_end_time_of_copy(copy_dict: dict):
        """获取副本的Db2JsonConstant.DB2_BAK_END_TIME参数值"""
        return copy_dict.get("extendInfo", {}).get(Db2JsonConstant.DB2_DATA_BAK_END_TIME, "")

    @staticmethod
    def get_backup_image_timestamp_of_copy(copy_dict: dict):
        """获取副本的backupImageTimestamp参数值"""
        # 备份副本
        bak_img_timestamp = copy_dict.get("extendInfo", {}).get("backupImageTimestamp", "")
        if bak_img_timestamp:
            Db2CommonUtil.check_injection(bak_img_timestamp)
            return bak_img_timestamp
        # 归档副本
        bak_img_timestamp = copy_dict.get("extendInfo", {}).get("extendInfo", {}).get("backupImageTimestamp", "")
        Db2CommonUtil.check_injection(bak_img_timestamp)
        return bak_img_timestamp

    @staticmethod
    def handle_backup_image_timestamp_of_copy(copy):
        """处理副本（即backup image）的timestamp"""
        bak_img_timestamp = Db2ParamUtil.get_backup_image_timestamp_of_copy(copy)
        if not bak_img_timestamp:
            LOGGER.warning("The backupImageTimestamp parameter was not found in the extendInfo of the copy.")
            copy_timestamp = Db2ParamUtil.get_backup_timestamp_of_copy(copy)
            bak_img_timestamp = Db2CommonUtil.convert_copy_timestamp_to_restore_time(copy_timestamp)
        return bak_img_timestamp

    @staticmethod
    def get_full_copy_path(copies, job_id="") -> str:
        """获取全量副本的路径"""
        full_copy = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.FULL_COPY.value)[0]
        full_mount_path = Db2ParamUtil.get_repository_paths_for_restore(
            full_copy, RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        full_copy_dir_pre = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(BackupTypeEnum.FULL_BACKUP.value)
        # 如果是归档副本，需返回副本挂载目录下以”full_“开头的目录
        if full_copy.get("type") in (CopyDataTypeEnum.S3_ARCHIVE, CopyDataTypeEnum.TAP_ARCHIVE):
            full_copy_dir_name = ""
            for tmp_path in os.listdir(full_mount_path):
                if tmp_path.startswith("full_"):
                    full_copy_dir_name = tmp_path
                    break
            full_copy_path = os.path.realpath(os.path.join(full_mount_path, full_copy_dir_name))
        else:
            full_copy_path = os.path.realpath(os.path.join(
                full_mount_path, f"{full_copy_dir_pre}_{Db2ParamUtil.get_id_of_copy(full_copy)}"))
        if not Db2CommonUtil.verify_path_trustlist(full_copy_path):
            raise Exception("Invalid full copy path")
        return full_copy_path

    @staticmethod
    def get_full_copy_path_for_incr_and_diff_restore(copies, job_id="") -> str:
        """增量副本或者差异副本恢复时获取全量副本的路径"""
        full_copy = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.FULL_COPY.value)[0]
        # 增量、差异副本恢复挂载路径在最后一个副本信息中
        copy_mount_path = Db2ParamUtil.get_repository_paths_for_restore(
            copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        full_copy_dir_pre = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(BackupTypeEnum.FULL_BACKUP.value)
        path = os.path.realpath(os.path.join(
            copy_mount_path, f"{full_copy_dir_pre}_{Db2ParamUtil.get_id_of_copy(full_copy)}"))
        Db2CommonUtil.check_path_list(path)
        return path

    @staticmethod
    def get_all_full_copy_path_for_incr_and_diff_restore(copies, use_san_client=False, job_id=""):
        """增量副本或者差异副本恢复时获取全量副本的路径"""
        full_copy = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.FULL_COPY.value)[0]
        # 增量、差异副本恢复挂载路径在最后一个副本信息中
        copy_mount_path_list = Db2ParamUtil.get_repository_all_paths_for_restore(
            copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        full_copy_dir_pre = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(BackupTypeEnum.FULL_BACKUP.value)
        # 使用SAN Client场景副本挂载路径后面需拼接副本ID
        san_splice_path = full_copy.get('id', '') if use_san_client else ''
        copy_path_list = list()
        for copy_mount_path in copy_mount_path_list:
            full_copy_path = os.path.realpath(
                os.path.join(copy_mount_path, san_splice_path,
                             f"{full_copy_dir_pre}_{Db2ParamUtil.get_id_of_copy(full_copy)}"))
            if os.path.exists(full_copy_path):
                copy_path_list.append(full_copy_path)
        Db2CommonUtil.check_path_list(*copy_path_list)
        return copy_path_list

    @staticmethod
    def get_incr_copy_paths(copies, use_san_client=False, job_id="") -> list:
        """获取增量副本的路径列表"""
        incr_copy_paths = list()
        incr_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.INCREMENT_COPY.value)
        diff_copy_dir_pre = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(BackupTypeEnum.INCRE_BACKUP.value)
        # 增量副本恢复挂载路径在最后一个副本信息中
        copy_mount_path = Db2ParamUtil.get_repository_paths_for_restore(
            copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        for tmp_incr_copy in incr_copies:
            # 使用SAN Client场景副本挂载路径后面需拼接副本ID
            san_splice_path = tmp_incr_copy.get('id', '') if use_san_client else ''
            tmp_incr_copy_path = os.path.realpath(os.path.join(
                copy_mount_path, san_splice_path, f"{diff_copy_dir_pre}_{Db2ParamUtil.get_id_of_copy(tmp_incr_copy)}"))
            incr_copy_paths.append(tmp_incr_copy_path)
        Db2CommonUtil.check_path_list(*incr_copy_paths)
        return incr_copy_paths

    @staticmethod
    def get_diff_copy_path(copies, use_san_client=False, job_id="") -> str:
        """获取差异副本的路径"""
        diff_copy = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.DIFF_COPY.value)[0]
        # 差异副本恢复挂载路径在最后一个副本信息中
        copy_mount_path = Db2ParamUtil.get_repository_paths_for_restore(
            copies[-1], RepositoryDataTypeEnum.DATA_REPOSITORY.value, job_id=job_id)[0]
        diff_copy_dir_pre = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(BackupTypeEnum.DIFF_BACKUP.value)
        # 使用SAN Client场景副本挂载路径后面需拼接副本ID
        san_splice_path = diff_copy.get('id', '') if use_san_client else ''
        path = os.path.realpath(os.path.join(
            copy_mount_path, san_splice_path, f"{diff_copy_dir_pre}_{Db2ParamUtil.get_id_of_copy(diff_copy)}"))
        Db2CommonUtil.check_path_list(path)
        return path

    @staticmethod
    def get_log_copy_paths(copies, use_san_client=False, job_id=""):
        """获取日志副本的路径列表"""
        log_copy_paths = list()
        log_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.LOG_COPY.value)
        for tmp_log_copy in log_copies:
            tmp_log_mount_paths = Db2ParamUtil.get_repository_paths_for_restore(
                tmp_log_copy, RepositoryDataTypeEnum.LOG_REPOSITORY.value, job_id=job_id)
            # 使用SAN Client场景副本挂载路径后面需拼接副本ID
            if use_san_client:
                san_splice_path = tmp_log_copy.get('id', '')
                tmp_log_mount_paths = [os.path.join(i, san_splice_path) for i in tmp_log_mount_paths]
            log_copy_paths.extend(tmp_log_mount_paths)
        Db2CommonUtil.check_path_list(*log_copy_paths)
        return log_copy_paths

    @staticmethod
    def get_log_copy_paths_for_hadr(param_dict, job_id="") -> list:
        """获取HADR类型数据库日志副本的路径列表"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        restore_timestamp = Db2ParamUtil.get_restore_timestamp(param_dict)
        restore_copy_id = Db2ParamUtil.get_restore_copy_id(param_dict)
        LOGGER.info(f"Start getting hadr log copy paths, restore copy id: {restore_copy_id}, "
                    f"restore timestamp: {restore_timestamp} ...")
        log_copies = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.LOG_COPY.value)
        log_copy_paths = list()
        for tmp_log_copy in log_copies:
            tmp_log_mount_paths = Db2ParamUtil.get_repository_paths_for_restore(
                tmp_log_copy, RepositoryDataTypeEnum.LOG_REPOSITORY.value, job_id=job_id)
            log_copy_paths.extend(tmp_log_mount_paths)
        if not log_copy_paths:
            err_msg = "The hadr log copy paths is empty."
            LOGGER.error(err_msg)
            raise ErrCodeException(Db2ErrCode.USER_BACKUP_ERROR, message=err_msg)
        last_log_copy = log_copies[-1]
        last_copy_end_time = last_log_copy.get(Db2JsonConstant.EXTEND_INFO, {}).get(Db2JsonConstant.COPY_END_TIME)
        LOGGER.info(f"The log copy id: {last_log_copy.get(Db2JsonConstant.ID)}, end time: {last_copy_end_time}.")
        # "全量->日志1->日志2"副本场景，使用“日志1结束时间”恢复会下发日志2副本路径，需移除日志2副本路径
        if last_copy_end_time and int(last_copy_end_time) == int(restore_timestamp) and restore_copy_id:
            tmp_log_copy_path = log_copy_paths[0]
            match_ret = re.match(Db2Regex.CUT_COPY_MOUNT_PATH_REG, str(tmp_log_copy_path))
            if not match_ret:
                LOGGER.warning(f"The log copy path: {tmp_log_copy_path} is invalid.")
                Db2CommonUtil.check_path_list(*log_copy_paths)
                return log_copy_paths
            copy_meta_infos_dict = param_dict.get("job").get("extendInfo").get("associated_log_copies",
                                                                               {})
            LOGGER.info(f"copy_meta_infos_dict:{copy_meta_infos_dict}")
            if not copy_meta_infos_dict:
                restore_copy_meta_path = os.path.realpath(
                    os.path.join(match_ret.groups()[0], f"{restore_copy_id}.meta"))
                if not os.path.isfile(restore_copy_meta_path):
                    LOGGER.warning(f"The restore copy meta file: {restore_copy_meta_path} does not exists.")
                    Db2CommonUtil.check_path_list(*log_copy_paths)
                    return log_copy_paths

                copy_meta_infos_dict = Db2ParamUtil.build_copy_meta_infos(copy_meta_infos_dict, restore_copy_meta_path)
            log_copy_paths = Db2ParamUtil.filter_log_copy_paths(copy_meta_infos_dict, log_copy_paths, restore_timestamp)
        LOGGER.info(f"Get hadr log copy paths success, restore copy id: {restore_copy_id}, "
                    f"restore timestamp: {restore_timestamp}, .")
        Db2CommonUtil.check_path_list(*log_copy_paths)
        return log_copy_paths

    @staticmethod
    def filter_log_copy_paths(copy_meta_infos_dict, log_copy_paths, restore_timestamp):
        del_log_copy_ids = set()
        for copy_id, copy_info in copy_meta_infos_dict.items():
            # 分割 copy_info 来获取时间戳部分
            tmp_splits = copy_info.split("~")

            # 确保时间戳部分足够并满足条件
            if len(tmp_splits) == 2 and int(tmp_splits[0]) >= int(restore_timestamp):
                del_log_copy_ids.add(copy_id)

                # 适配原来的正则匹配路径过滤
                del_log_path_reg = f".*/{copy_id}{Db2Regex.LINUX_PATH_END_REG}"
                log_copy_paths = [i for i in log_copy_paths if not re.match(del_log_path_reg, str(i))]
        LOGGER.info(f"Need deleted log copy ids are {del_log_copy_ids} when getting hadr log paths;")
        return log_copy_paths

    @staticmethod
    def build_copy_meta_infos(copy_meta_infos_dict, restore_copy_meta_path):
        with open(restore_copy_meta_path, mode='r') as meta_file:
            copy_meta_infos = meta_file.readlines()
        LOGGER.debug(f"The meta file: {restore_copy_meta_path} content is {copy_meta_infos}.")
        # 将 copy_meta_infos 转换为字典格式
        copy_meta_infos_dict = {}
        for tmp_line in copy_meta_infos:
            tmp_splits = re.split(Db2Regex.COPY_META_INFO_SPLIT_RET, str(tmp_line))
            if len(tmp_splits) >= 3:  # 确保拆分的部分不少于3个
                # 创建字典，键为 copy_id 和 copy_info（时间戳区间）
                copy_meta_infos_dict[tmp_splits[0].strip()] = f"{tmp_splits[1]}~{tmp_splits[2]}"
        return copy_meta_infos_dict

    @staticmethod
    def get_applications(param_dict):
        return param_dict.get("job", {}).get("protectObject")

    @staticmethod
    def get_full_copy_info(param_dict):
        """获取全量副本信息"""
        ret_json = {"data_path": None, "meta_path": None, "cache_path": None, "backup_time": None}
        copy = param_dict.get("job", {}).get("copies", [])[0]
        ret_json['id'] = copy.get("id")
        ret_json["backup_time"] = copy.get("extendInfo", {}).get("backupTime")
        for repository in copy.get("repositories", []):
            if repository.get("repositoryType") == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                ret_json["data_path"] = repository.get("path", [""])[0]
            elif repository.get("repositoryType") == RepositoryDataTypeEnum.META_REPOSITORY.value:
                ret_json["meta_path"] = repository.get("path", [""])[0]
            elif repository.get("repositoryType") == RepositoryDataTypeEnum.CACHE_REPOSITORY.value:
                ret_json["cache_path"] = repository.get("path", [""])[0]
        return ret_json

    @staticmethod
    def get_diff_copy_info(param_dict):
        ret_json = {"data_path": None, "meta_path": None, "cache_path": None, "backup_time": None}
        copies = param_dict.get("job", {}).get("copies", [])
        for copy in copies:
            if copy.get("type") != CopyDataTypeEnum.DIFF_COPY.value:
                continue
            ret_json["backup_time"] = copy.get("extendInfo", {}).get("backupTime")
            for repository in copy.get("repositories", []):
                if repository.get("repositoryType") == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                    ret_json["data_path"] = repository.get("path", [""])[0]
        return ret_json

    @staticmethod
    def get_incr_copy_info(param_dict):
        ret_json = {"data_path": [], "meta_path": None, "cache_path": None, "backup_time": []}
        copies = param_dict.get("job", {}).get("copies", [])
        for copy in copies:
            if copy.get("type") != CopyDataTypeEnum.INCREMENT_COPY.value:
                continue
            ret_json["backup_time"].append(copy.get("extendInfo", {}).get("backupTime"))
            for repository in copy.get("repositories", []):
                if repository.get("repositoryType") == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                    ret_json["data_path"].append(repository.get("path", [""])[0])
        return ret_json

    @staticmethod
    def get_copy_path_info(param_dict):
        ret = []
        restore_type = Db2ParamUtil.get_restore_type(param_dict)
        if restore_type == RestoreTypeEnum.LOG_RESTORE.value:
            return Db2ParamUtil.get_log_copy_path(param_dict)
        copies = param_dict.get("job", {}).get("copies", [])
        job_id = param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.JOB_ID)
        for repository in copies[-1].get("repositories", []):
            if repository.get("repositoryType") == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                tmp_data_path = get_livemount_path(job_id, repository.get("path", [""])[0])
                Db2CommonUtil.check_path_list(tmp_data_path)
            elif repository.get("repositoryType") == RepositoryDataTypeEnum.META_REPOSITORY.value:
                tmp_meta_path = get_livemount_path(job_id, repository.get("path", [""])[0])
                Db2CommonUtil.check_path_list(tmp_meta_path)
            elif repository.get("repositoryType") == RepositoryDataTypeEnum.CACHE_REPOSITORY.value:
                tmp_cache_path = repository.get("path", [""])[0]
                Db2CommonUtil.check_path_list(tmp_cache_path)
        for copy in copies:
            tmp_ret_json = {
                "data_path": None, "meta_path": None, "cache_path": None, "backup_time": None,
                "restore_time": "", "id": copy.get("id"), "type": ""
            }
            id_tmp = Db2ParamUtil.get_id_of_copy(copy)
            tmp_ret_json["type"] = copy.get("type")
            tmp_ret_json["meta_path"] = tmp_meta_path
            tmp_ret_json["cache_path"] = tmp_cache_path
            restore_time = copy.get("extendInfo", {}).get("backupImageTimestamp")
            table_create_at = copy.get("extendInfo", {}).get(Db2JsonConstant.TABLE_CREATE_AT, "")
            backup_table = copy.get("extendInfo", {}).get(Db2JsonConstant.BACKUP_TABLE, "")
            if not restore_time:
                restore_time = copy.get("extendInfo", {}).get("restoreTime")
                if not restore_time:
                    restore_time = copy.get("extendInfo", {}).get("extendInfo", {}).get("restoreTime")
            tmp_ret_json["restore_time"] = restore_time
            tmp_ret_json[Db2JsonConstant.TABLE_CREATE_AT] = table_create_at
            tmp_ret_json[Db2JsonConstant.BACKUP_TABLE] = backup_table
            tmp_ret_json["backup_time"] = copy.get("extendInfo", {}).get("backupTime")
            if copy.get("type") in [CopyDataTypeEnum.FULL_COPY.value, CopyDataTypeEnum.S3_ARCHIVE,
                                    CopyDataTypeEnum.TAP_ARCHIVE]:
                tmp_ret_json["data_path"] = os.path.join(tmp_data_path, f"full_{id_tmp}")
            elif copy.get("type") == CopyDataTypeEnum.DIFF_COPY.value:
                tmp_ret_json["data_path"] = os.path.join(tmp_data_path, f"diff_{id_tmp}")
            elif copy.get("type") == CopyDataTypeEnum.INCREMENT_COPY.value:
                tmp_ret_json["data_path"] = os.path.join(tmp_data_path, f"incr_{id_tmp}")
            ret.append(tmp_ret_json)
        return ret

    @staticmethod
    def get_copy_all_path_infos(param_dict):
        ret = []
        use_san_client = Db2ParamUtil.use_san_client_for_restore(param_dict)
        restore_type = Db2ParamUtil.get_restore_type(param_dict)
        if restore_type == RestoreTypeEnum.LOG_RESTORE.value:
            return Db2ParamUtil.get_log_copy_path(param_dict)
        copies = param_dict.get("job", {}).get("copies", [])
        job_id = param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.JOB_ID)
        for repository in copies[-1].get("repositories", []):
            if repository.get("repositoryType") == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                tmp_data_path_list = []
                for path in repository.get("path", [""]):
                    tmp_data_path_list.append(get_livemount_path(job_id, path) if path else "")
                Db2CommonUtil.check_path_list(*tmp_data_path_list)
            elif repository.get("repositoryType") == RepositoryDataTypeEnum.META_REPOSITORY.value:
                tmp_meta_path = get_livemount_path(job_id, repository.get("path", [""])[0])
                Db2CommonUtil.check_path_list(tmp_meta_path)
            elif repository.get("repositoryType") == RepositoryDataTypeEnum.CACHE_REPOSITORY.value:
                tmp_cache_path = repository.get("path", [""])[0]
                Db2CommonUtil.check_path_list(tmp_cache_path)
        for copy in copies:
            tmp_ret_json = Db2ParamUtil.fill_ret_info(copy, tmp_cache_path, tmp_data_path_list, tmp_meta_path,
                                                      use_san_client)
            ret.append(tmp_ret_json)
        return ret

    @staticmethod
    def fill_ret_info(copy, tmp_cache_path, tmp_data_path_list, tmp_meta_path, use_san_client):
        tmp_ret_json = {
            "data_path": None, "meta_path": None, "cache_path": None, "backup_time": None,
            "restore_time": "", "id": copy.get("id"), "type": ""
        }
        tmp_ret_json["type"] = copy.get("type")
        tmp_ret_json["meta_path"] = tmp_meta_path
        tmp_ret_json["cache_path"] = tmp_cache_path
        restore_time = copy.get("extendInfo", {}).get("backupImageTimestamp")
        table_create_at = copy.get("extendInfo", {}).get(Db2JsonConstant.TABLE_CREATE_AT, "")
        backup_table = copy.get("extendInfo", {}).get(Db2JsonConstant.BACKUP_TABLE, "")
        if not restore_time:
            restore_time = copy.get("extendInfo", {}).get("restoreTime")
        tmp_ret_json["restore_time"] = restore_time
        tmp_ret_json[Db2JsonConstant.TABLE_CREATE_AT] = table_create_at
        tmp_ret_json[Db2JsonConstant.BACKUP_TABLE] = backup_table
        tmp_ret_json["backup_time"] = copy.get("extendInfo", {}).get("backupTime")
        # 使用SAN Client场景副本挂载路径后面需拼接副本ID
        san_splice_path = copy.get('id', '') if use_san_client else ''
        id_tmp = Db2ParamUtil.get_id_of_copy(copy)
        if copy.get("type") in [CopyDataTypeEnum.FULL_COPY.value, CopyDataTypeEnum.S3_ARCHIVE,
                                CopyDataTypeEnum.TAP_ARCHIVE]:
            tmp_ret_json["data_path"] = []
            for tmp_data_path in tmp_data_path_list:
                full_data_path = os.path.join(tmp_data_path, san_splice_path, f"full_{id_tmp}")
                if os.path.exists(full_data_path):
                    tmp_ret_json["data_path"].append(full_data_path)
        elif copy.get("type") == CopyDataTypeEnum.DIFF_COPY.value:
            tmp_ret_json["data_path"] = []
            for tmp_data_path in tmp_data_path_list:
                diff_data_path = os.path.join(tmp_data_path, san_splice_path, f"diff_{id_tmp}")
                if os.path.exists(diff_data_path):
                    tmp_ret_json["data_path"].append(diff_data_path)
        elif copy.get("type") == CopyDataTypeEnum.INCREMENT_COPY.value:
            tmp_ret_json["data_path"] = []
            for tmp_data_path in tmp_data_path_list:
                incr_data_path = os.path.join(tmp_data_path, san_splice_path, f"incr_{id_tmp}")
                if os.path.exists(incr_data_path):
                    tmp_ret_json["data_path"].append(incr_data_path)
        return tmp_ret_json

    @staticmethod
    def get_log_copy_path(param_dict):
        ret = []
        copies = param_dict.get("job", {}).get("copies", [])
        tmp_cache_path = ""
        for repository in copies[0].get("repositories", []):
            if repository.get("repositoryType") == RepositoryDataTypeEnum.CACHE_REPOSITORY.value:
                tmp_cache_path = repository.get("path", [""])[0]
                Db2CommonUtil.check_path_list(tmp_cache_path)
        for copy in copies:
            if copy.get("type") == CopyDataTypeEnum.FULL_COPY.value:
                tmp_ret_json = Db2ParamUtil.get_log_copy_path_when_full(copy, tmp_cache_path)
                ret.append(tmp_ret_json)
            elif copy.get("type") == CopyDataTypeEnum.LOG_COPY.value:
                # 注意ret参数是传入函数写的值
                Db2ParamUtil.get_log_copy_path_when_log(copy, ret, tmp_cache_path)
        return ret

    @staticmethod
    def get_log_copy_path_when_log(copy, ret, tmp_cache_path):
        for repository in copy.get("repositories", []):
            if repository.get("repositoryType") == RepositoryDataTypeEnum.LOG_REPOSITORY.value:
                tmp_ret_json = {
                    "data_path": None, "meta_path": None, "cache_path": None, "backup_time": None,
                    "restore_time": "", "id": "", "type": ""
                }
                tmp_ret_json["cache_path"] = tmp_cache_path
                tmp_ret_json["data_path"] = repository.get("path", [""])[0]
                tmp_ret_json["restore_time"] = copy.get("timestamp")
                ret.append(tmp_ret_json)

    @staticmethod
    def get_log_copy_path_when_full(copy, tmp_cache_path):
        tmp_ret_json = {
            "data_path": None, "meta_path": None, "cache_path": None, "backup_time": None,
            "restore_time": "", "id": "", "type": ""
        }
        tmp_ret_json["id"] = copy.get("id")
        id_tmp = copy.get("id")
        tmp_ret_json["cache_path"] = tmp_cache_path
        tmp_ret_json["restore_time"] = copy.get("extendInfo", {}).get("backupImageTimestamp")
        tmp_ret_json["backup_time"] = copy.get("extendInfo", {}).get("backupTime")
        for repository in copy.get("repositories", []):
            if repository.get("repositoryType") == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                tmp_data_path = repository.get("path", [""])[0]
                Db2CommonUtil.check_path_list(tmp_data_path)
            elif repository.get("repositoryType") == RepositoryDataTypeEnum.META_REPOSITORY.value:
                tmp_meta_path = repository.get("path", [""])[0]
                Db2CommonUtil.check_path_list(tmp_meta_path)
        tmp_ret_json["data_path"] = os.path.join(tmp_data_path, f"full_{id_tmp}")
        tmp_ret_json["meta_path"] = tmp_meta_path
        return tmp_ret_json

    @staticmethod
    def get_ts_name_for_restore(param_dict):
        """获取恢复的表空间"""
        table_spaces_str = param_dict.get("job", {}).get("targetObject", {}).get("extendInfo", {}).get("table", "")
        Db2CommonUtil.check_injection(table_spaces_str)
        return list(ts.strip() for ts in table_spaces_str.split(",") if ts.strip()) if table_spaces_str else []

    @staticmethod
    def get_instance_name_for_restore(param_dict):
        """获取本副本的实例名称"""
        """获取数据库副本资源的实例名"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        return copies[0].get("protectObject", {}).get("extendInfo", {}).get("instance", "")

    @staticmethod
    def get_full_copy_path_when_restore(param_dict):
        """获取全量副本的路径"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        full_copy = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.FULL_COPY.value)[0]
        param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.JOB_ID)
        date_mount_path = Db2ParamUtil.get_data_mount_path_for_restore(param_dict)
        full_copy_dir_pre = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(BackupTypeEnum.FULL_BACKUP.value)
        use_san_client = Db2ParamUtil.use_san_client_for_restore(param_dict)
        # 使用SAN Client场景副本挂载路径后面需拼接副本ID
        not_local = full_copy.get("type") in (CopyDataTypeEnum.S3_ARCHIVE, CopyDataTypeEnum.TAP_ARCHIVE)
        # 使用SAN Client场景副本挂载路径后面需拼接副本ID
        copy_path = full_copy.get("id", "") if not not_local else full_copy.get("extendInfo", {}).get("extendInfo",
                                                                                                      {}).get(
            "copyid", "")
        san_splice_path = copy_path if use_san_client else ''
        # sanclient复制副本需要挂载路径后面需拼接副本ID
        if use_san_client:
            ret, tmp_copy_id = Db2ParamUtil.is_replication_type(param_dict, full_copy)
            if ret:
                san_splice_path = tmp_copy_id
        # 如果是归档副本，需返回副本挂载目录下以”full_“开头的目录
        if not_local:
            full_copy_dir_name = ""
            date_mount_path = os.path.join(date_mount_path, san_splice_path)
            if not os.path.exists(date_mount_path):
                LOGGER.info(f"Date_mount_path: {date_mount_path} not exist.")
                full_copy_path = os.path.realpath(os.path.join(date_mount_path, full_copy_dir_name))
                return full_copy_path
            for tmp_path in os.listdir(date_mount_path):
                if tmp_path.startswith("full_"):
                    full_copy_dir_name = tmp_path
                    break
            full_copy_path = os.path.realpath(os.path.join(date_mount_path, full_copy_dir_name))
        else:
            full_copy_path = os.path.realpath(os.path.join(
                date_mount_path, san_splice_path, f"{full_copy_dir_pre}_{Db2ParamUtil.get_id_of_copy(full_copy)}"))
        Db2CommonUtil.check_path_list(full_copy_path)
        return full_copy_path

    @staticmethod
    def use_san_client_for_restore(param_dict):
        """
        是否使用SAN Client代理
        """
        involve_san_client = param_dict.get(Db2JsonConstant.JOB, {}) \
            .get(Db2JsonConstant.EXTEND_INFO, {}).get(Db2JsonConstant.SANCLIENT_TYPE, "false")
        ret = involve_san_client == "true"
        LOGGER.info(f"Check use san client: {ret} when restore.")
        return ret

    @staticmethod
    def get_all_full_copy_path_when_restore(param_dict):
        """获取全量副本的路径"""
        copies = Db2ParamUtil.parse_copies(param_dict)
        full_copy = Db2ParamUtil.get_copies_by_copy_type(copies, CopyDataTypeEnum.FULL_COPY.value)[0]
        data_mount_path_list = Db2ParamUtil.get_data_mount_all_path_for_restore(param_dict)
        use_san_client = Db2ParamUtil.use_san_client_for_restore(param_dict)
        not_local = full_copy.get("type") in (CopyDataTypeEnum.S3_ARCHIVE, CopyDataTypeEnum.TAP_ARCHIVE)
        # 使用SAN Client场景副本挂载路径后面需拼接副本ID
        copy_path = full_copy.get("id", "") if not not_local else full_copy.get("extendInfo", {}).get("extendInfo",
                                                                                                      {}).get("copyid",
                                                                                                              "")
        san_splice_path = copy_path if use_san_client else ''
        # sanclient复制副本需要挂载路径后面需拼接副本ID
        if use_san_client:
            ret, tmp_copy_id = Db2ParamUtil.is_replication_type(param_dict, full_copy)
            if ret:
                san_splice_path = tmp_copy_id
        # 如果是归档副本，需返回副本挂载目录下以”full_“开头的目录
        full_copy_path_list = list()
        if not_local:
            for data_mount_path in data_mount_path_list:
                data_mount_path = os.path.join(data_mount_path, san_splice_path)
                full_copy_dir_name = Db2ParamUtil.check_data_mount_path_exist_full_path(data_mount_path, "")
                if full_copy_dir_name:
                    full_copy_path_list.append(os.path.realpath(os.path.join(
                        data_mount_path, full_copy_dir_name)))
        else:
            full_copy_dir_pre = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(BackupTypeEnum.FULL_BACKUP.value)
            for data_mount_path in data_mount_path_list:
                full_copy_path = os.path.realpath(os.path.join(
                    data_mount_path, san_splice_path, f"{full_copy_dir_pre}_{Db2ParamUtil.get_id_of_copy(full_copy)}"))
                if os.path.exists(full_copy_path):
                    full_copy_path_list.append(full_copy_path)
        Db2CommonUtil.check_path_list(*full_copy_path_list)
        return full_copy_path_list

    @staticmethod
    def is_replication_type(param_dict, full_copy):
        job_extend_copy = Db2ParamUtil.parse_job_extend_copies(param_dict)
        if len(job_extend_copy) == 0:
            return False, ''
        copy_type = job_extend_copy[-1].get('generatedBy', '')
        if copy_type == ReplicationType.REVERSE_REPLICATION or copy_type == ReplicationType.NORMAL_REPLICATION:
            return True, full_copy.get('extendInfo', {}).get('copyid', '')
        return False, ''

    @staticmethod
    def check_data_mount_path_exist_full_path(date_mount_path, full_copy_dir_name):
        if not os.path.exists(date_mount_path):
            LOGGER.info(f"Date_mount_path: {date_mount_path} not exist.")
            return full_copy_dir_name
        for tmp_path in os.listdir(date_mount_path):
            if tmp_path.startswith("full_"):
                full_copy_dir_name = tmp_path
                break
        if not full_copy_dir_name:
            LOGGER.error(f"Get full_copy_dir_name fail.")
        return full_copy_dir_name

    @staticmethod
    def check_has_copy_verify_file(param_dict):
        # 获取sla里面是否副本校验
        return param_dict.get("job", {}).get("extendInfo", {}).get("copy_verify", "false") == "true"

    @staticmethod
    def get_copy_cluster_nodes(param_dict):
        copies = Db2ParamUtil.parse_copies(param_dict)
        if not copies:
            return []
        return copies[-1].get(ParamField.PROTECT_ENV, {}).get(ParamField.NODES, [])

    @staticmethod
    def get_target_cluster_nodes(param_dict):
        return param_dict.get(ParamField.JOB, {}).get(ParamField.TARGET_ENV, {}).get(ParamField.NODES, [])

    @staticmethod
    def is_restore_end_of_log(param_dict):
        """是否恢复至日志末尾"""
        fail_on_pit = param_dict.get(Db2JsonConstant.JOB, {}).get(Db2JsonConstant.EXTEND_INFO, {}) \
            .get(Db2JsonConstant.FAIL_ON_PIT)
        return fail_on_pit == "1"

    @staticmethod
    def get_log_end_time_of_copy(copy_dict: dict):
        """获取日志副本的Db2JsonConstant.COPY_END_TIME参数值"""
        return copy_dict.get(Db2JsonConstant.EXTEND_INFO, {}).get(Db2JsonConstant.COPY_END_TIME, "")

    @staticmethod
    def get_ts_info_for_restore(param_dict):
        copies = Db2ParamUtil.parse_copies(param_dict)
        if not copies:
            return set()
        tablespace_info_str = copies[-1].get(ParamField.EXTEND_INFO, {}).get(ParamField.TABLESPACE_INFO)
        if not tablespace_info_str:
            tablespace_info_str = copies[-1].get(ParamField.EXTEND_INFO, {}). \
                get(ParamField.EXTEND_INFO, {}).get(
                ParamField.TABLESPACE_INFO)
        tablespace_info_str = tablespace_info_str if tablespace_info_str else "{}"
        tablespace_info = json.loads(tablespace_info_str)
        tmp_dict = {k: v for [k, v] in tablespace_info}
        return set(zip(tmp_dict.keys(), tmp_dict.values()))

    @staticmethod
    def get_table_info_for_restore(param_dict):
        copies = Db2ParamUtil.parse_copies(param_dict)
        if not copies:
            return set()
        table_info_str = copies[-1].get(ParamField.EXTEND_INFO, {}).get(ParamField.TABLE_INFO)
        if not table_info_str:
            table_info_str = copies[-1].get(ParamField.EXTEND_INFO, {}). \
                get(ParamField.EXTEND_INFO, {}).get(ParamField.TABLE_INFO)
        table_info_str = table_info_str if table_info_str else "{}"
        table_info = json.loads(table_info_str)
        tmp_dict = {k: v for [k, v] in table_info}
        return set(zip(tmp_dict.keys(), tmp_dict.values()))

    @staticmethod
    def get_backup_copy_type_prefix(param_dict):
        copy_type = param_dict.get("job", {}).get("copies", [])[0].get("type", '')
        if copy_type == 'increment':
            copy_type = 'incr'
        LOGGER.info(f'copy_type: {copy_type}')
        return copy_type
