#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import json
import os
import uuid
import stat
import pexpect

from common.common import output_result_file, read_tmp_json_file, output_execution_result_ex, get_host_sn, \
    str_is_none_or_empty, report_job_details
from common.common_models import ActionResult, SubJobDetails, LogDetail
from common.const import ExecuteResultEnum, SubJobStatusEnum, RepositoryName, CopyDataTypeEnum, ParamConstant, \
    ReportDBLabel, DBLogLevel
from common.schemas.thrift_base_data_type import SubJob
from common.file_common import check_file_exist, delete_file, change_owner_by_name, create_dir_recursive
from common.parse_parafile import ParamFileUtil, get_env_variable

from oracle import logger
from oracle.common.common import send_params_to_pexpect, write_tmp_json_file, get_log_path_by_meta_type
from oracle.common.common import parse_backup_path, clear_repository_dir, convert_error_code, \
    change_mount_nas_file_owner, get_instance_name_by_id, modify_instance_name_in_p_file
from oracle.common.constants import RecoverTargetType, RootCommand, PluginPathConstant, ErrorCode, OracleProgress, \
    ScriptExitCode, CacheRepoFilename, RestoreResultKey, ORACLEJsonConstant, StartDataBase, RestoreByType, \
    LogMetaFileName, OracleDataBaseType, ORACLEExecPower, OracleReportLabel
from oracle.common.user_env_common import update_oracle_env, get_user_shell_type


class OracleInstRestore:
    def __init__(self, pid, job_id, sub_job_id):
        self._pid = pid
        self._job_id = job_id
        self._sub_job_id = sub_job_id
        self.db_name = ""
        self.db_instance = ""
        self.db_user = ""
        self.db_user_pwd = ""
        self.oracle_home = ""
        self.asm_user = ""
        self.asm_user_pwd = ""
        self.meta_data_path = ""
        self.recover_target = "2"
        self.recover_path = ""
        self.recover_order = 1
        self.recover_num = 1
        self.channels = ""
        self.pit_time = 0
        self.pit_scn = ''
        self.asm_instance_name = ""
        self.db_type = 0
        self.enc_algo = ""
        self.enc_key = ""
        self.p_file_pid = ""
        self.backup_repo = ""
        self.meta_data_path = ""
        self.cache_repo_path = ""
        self.log_repo_path = ""
        self.start_db = 1
        self.restore_target_host_str = ""
        self.oracle_user = ""
        self.oracle_group = ""
        self.access_oracle_base = ""
        self.access_oracle_home = ""
        self.copy_json = []
        # 用于日志恢复时catalog多个日志目录
        self.log_job_id_list = ''
        self.restore_by = 1
        self.resetlogs_id = ''
        # 调用shell脚本时传递的scn
        self.scn_to_script = ''
        self.target_object_sub_type = ""
        self.job_name = ""
        self.asm_install_user = ""
        self.copy_sub_type = ""
        self.bct_status = ""
        self.json_param = {}
        self.first_copy_protect_object_json = {}
        self.first_copy_protect_object_extend_info_json = {}
        self.response = ActionResult(code=ExecuteResultEnum.INTERNAL_ERROR.value)

    def parse_params(self):
        self.json_param = ParamFileUtil.parse_param_file(self._pid)
        job_json = self.json_param.get(ORACLEJsonConstant.JOB, {})
        self.copy_json = job_json.get(ORACLEJsonConstant.COPIES, [{}])
        first_copy_json = self.copy_json[0]
        self.first_copy_protect_object_json = first_copy_json.get(ORACLEJsonConstant.PROTECT_OBJECT, {})
        self.db_name = self.first_copy_protect_object_json.get(ORACLEJsonConstant.NAME, "")
        self.first_copy_protect_object_extend_info_json = self.first_copy_protect_object_json.get(
            ORACLEJsonConstant.EXTEND_INFO, {})
        self.copy_sub_type = self.first_copy_protect_object_json.get(ORACLEJsonConstant.SUB_TYPE, "")
        self.db_instance = self.first_copy_protect_object_extend_info_json.get(ORACLEJsonConstant.INST_NAME, "")
        instance_name = get_instance_name_by_id(
            self.json_param.get('job', {}).get('extendInfo', {}).get('instances', ''))
        if instance_name:
            self.db_instance = instance_name
        self.db_user = get_env_variable(f'job_copies_0_protectObject_auth_authKey_{self._pid}')
        self.db_user_pwd = get_env_variable(f'job_copies_0_protectObject_auth_authPwd_{self._pid}')
        job_extend_info_json = job_json.get(ORACLEJsonConstant.EXTEND_INFO, {})
        if job_extend_info_json:
            self.channels = job_extend_info_json.get(ORACLEJsonConstant.CHANNELS_UPPERCASE, "")
            self.restore_target_host_str = job_extend_info_json.get(ORACLEJsonConstant.RESTORE_TARGET_HOST, "")
            target_location = job_extend_info_json.get(ORACLEJsonConstant.TARGET_LOCATION, ORACLEJsonConstant.NEW)
            if target_location == ORACLEJsonConstant.NEW:
                self.recover_target = RecoverTargetType.OTHER.value
            else:
                self.recover_target = RecoverTargetType.SAME.value
            self.recover_path = job_extend_info_json.get(ORACLEJsonConstant.RESTORE_PATH, "")
        self.recover_order = 1
        job_target_env_json = job_json.get(ORACLEJsonConstant.TARGET_ENV, {})
        self.recover_num = len(job_target_env_json.get(ORACLEJsonConstant.NODES, [{}]))
        self.pit_time = job_extend_info_json.get(ORACLEJsonConstant.RESTORE_TIME_STAMP, "")
        self.pit_scn = job_extend_info_json.get(ORACLEJsonConstant.RESTORE_SCN, '')
        self.db_type = 0
        first_copy_extend_info = first_copy_json.get(ORACLEJsonConstant.EXTEND_INFO, {})
        self.enc_algo = first_copy_extend_info.get(ORACLEJsonConstant.BACKUP_ALGO, "")
        self.enc_key = first_copy_extend_info.get(ORACLEJsonConstant.BACKUP_ALGO_VALUE, "")
        self.start_db = StartDataBase.YES
        target_object_json = job_json.get(ORACLEJsonConstant.TARGET_OBJECT, {})
        self.target_object_sub_type = target_object_json.get(ORACLEJsonConstant.SUB_TYPE, "")
        self.oracle_user = target_object_json.get(ORACLEJsonConstant.EXTEND_INFO, {}). \
            get(ORACLEJsonConstant.INSTALL_USER_NAME, ORACLEExecPower.ORACLE_USER)
        self.oracle_group = target_object_json.get(ORACLEJsonConstant.EXTEND_INFO, {}). \
            get("oracle_group", ORACLEExecPower.ORACLE_GROUP)
        get_user_shell_type(self.oracle_user)
        # 用于没有配置环境变量的场景
        self.access_oracle_base = target_object_json.get(ORACLEJsonConstant.EXTEND_INFO, {}). \
            get("accessOracleBase", "")
        self.access_oracle_home = target_object_json.get(ORACLEJsonConstant.EXTEND_INFO, {}). \
            get("accessOracleHome", "")
        update_oracle_env(oracle_base=self.access_oracle_base, oracle_home=self.access_oracle_home)
        self.bct_status = job_extend_info_json.get("bctStatus", "false")

    def get_instance_restore_param(self):
        return {
            'AppName': self.db_name,
            'InstanceName': self.db_instance,
            'UserName': self.db_user,
            'Password': self.db_user_pwd,
            'OracleHome': self.oracle_home,
            'AccessOracleHome': self.access_oracle_home,
            'AccessOracleBase': self.access_oracle_base,
            'ASMUserName': self.asm_user,
            'ASMPassword': self.asm_user_pwd,
            "DataPath": self.backup_repo,
            "LogPath": self.log_repo_path,
            'MetaDataPath': self.meta_data_path,
            'recoverTarget': self.recover_target,
            'recoverPath': self.recover_path,
            'recoverOrder': self.recover_order,
            'recoverNum': self.recover_num,
            'Channel': self.channels,
            'pitTime': self.pit_time,
            'pitScn': self.scn_to_script,
            'ASMInstanceName': self.asm_instance_name,
            'dbType': self.db_type,
            'EncAlgo': self.enc_algo,
            'EncKey': self.enc_key,
            'pfilePID': self.p_file_pid,
            'RestoreBy': self.restore_by,
            'JobIdList': self.log_job_id_list,
            'OracleInstallUser': self.oracle_user,
            'GridInstallUser': self.asm_install_user,
            'bctStatus': self.bct_status
        }

    def set_cluster_to_single_db_info(self):
        if self.copy_sub_type == OracleDataBaseType.ORACLE_CLUSTER and \
                self.target_object_sub_type == OracleDataBaseType.ORACLE:
            node_info_list_protect = json.loads(self.first_copy_protect_object_extend_info_json.get('instances', []))
            self.db_instance = node_info_list_protect[0].get('inst_name', '')
            self.oracle_home = node_info_list_protect[0].get('oracle_home', '')
            asm_info = json.loads(node_info_list_protect[0].get('asm_info', '{}'))
            self.asm_instance_name = asm_info.get('instName', '')

    def instant_pre_requisite(self):
        """
        即时恢复前置任务
        """
        self.response.code = ExecuteResultEnum.SUCCESS.value
        output_result_file(self._pid, self.response.dict(by_alias=True))

    def instant_pre_requisite_progress(self):
        """
        即时恢复前置任务进度
        """
        sub_job_detail = SubJobDetails(taskId=self._job_id, subTaskId=self._sub_job_id,
                                       progress=OracleProgress.PROGRESS_ONE_HUNDRED,
                                       taskStatus=SubJobStatusEnum.COMPLETED)
        output_result_file(self._pid, sub_job_detail.dict(by_alias=True))

    def instant_post_requisite(self):
        """
        即时恢复后置任务
        """
        self.parse_params()
        self.get_repo_info()
        clear_repository_dir(self.cache_repo_path, self._job_id)
        self.response.code = ExecuteResultEnum.SUCCESS.value
        output_result_file(self._pid, self.response.dict(by_alias=True))

    def instant_post_requisite_progress(self):
        """
        即时恢复后置任务进度
        """
        sub_job_detail = SubJobDetails(taskId=self._job_id, subTaskId=self._sub_job_id,
                                       progress=OracleProgress.PROGRESS_ONE_HUNDRED,
                                       taskStatus=SubJobStatusEnum.COMPLETED)
        output_result_file(self._pid, sub_job_detail.dict(by_alias=True))

    def handle_resetlogs_id(self):
        if not self.log_repo_path:
            return ''
        file_path = os.path.join(self.log_repo_path, 'meta', LogMetaFileName.RESETLOGS_ID_MAP)
        reset_info = read_tmp_json_file(file_path)
        origin_job_id_list = reset_info.get(self.resetlogs_id, [])
        new_job_id_list = []
        logger.info(f'Handle resetlogs id, job_id:{self._job_id}, sub_job_id:{self._sub_job_id}.')
        # 去除无效的目录
        for job_id in origin_job_id_list:
            path = os.path.join(self.log_repo_path, job_id)
            if check_file_exist(path):
                new_job_id_list.append(job_id)
            else:
                logger.warning(f'Path:{path} not exist, will not record it to {LogMetaFileName.RESETLOGS_ID_MAP}, '
                               f'job_id:{self._job_id}, sub_job_id:{self._sub_job_id}.')
        reset_info[self.resetlogs_id] = new_job_id_list
        # 将新的关系写回文件
        if check_file_exist(file_path):
            write_tmp_json_file(file_path, self._job_id, reset_info)
        return ",".join(new_job_id_list)

    def get_inst_copy_info_by_type_list(self, type_list):
        for copy_info in self.copy_json:
            if copy_info.get(ORACLEJsonConstant.TYPE, "") in type_list:
                return copy_info
        return {}

    def parse_resetlogs_id_params(self):
        self.log_job_id_list = self.handle_resetlogs_id()
        logger.info(f'Restore log_job_id_list:{self.log_job_id_list}, {self._job_id}.')

    def get_repo_info(self):
        """
        获取data、meta、cache、log仓库信息
        """
        for copy_info in self.copy_json:
            repositories = copy_info.get("repositories", [])
            if copy_info.get('type') == CopyDataTypeEnum.LOG_COPY.value:
                # 在日志副本中获取log仓路径
                self.log_repo_path = get_log_path_by_meta_type(repositories)
            else:
                # 在数据副本中获取data、meta、cache仓路径
                repo_paths = parse_backup_path(repositories)
                self.backup_repo = repo_paths.get(RepositoryName.DATA_REPOSITORY)[0]
                if not repo_paths.get(RepositoryName.CACHE_REPOSITORY):
                    self.cache_repo_path = os.path.join(self.backup_repo, f'cache_{self._job_id}')
                    create_dir_recursive(self.cache_repo_path)
                else:
                    self.cache_repo_path = repo_paths.get(RepositoryName.CACHE_REPOSITORY)[0]

    def parse_sub_job_params(self):
        sub_job = SubJob(**self.json_param.get('subJob'))
        job_priority = sub_job.job_priority
        self.job_name = sub_job.job_name
        logger.info(f'Instant Restore jobPriority:{job_priority}, job_name:{self.job_name}, {self._job_id}.')
        self.recover_order = 1
        if self.job_name == "open_db":
            self.recover_order = 2
        elif self.job_name == "move_dbf":
            self.recover_order = 3

    def instant_restore_move_data(self):
        result, err_code = self.move_date_base_file()
        if result:
            logger.info(f"Instant_restore move date success. job_id:{self._job_id}")
            self.output_result_info_to_file(ExecuteResultEnum.SUCCESS.value, ErrorCode.SUCCESS.value,
                                            SubJobStatusEnum.COMPLETED.value, err_code)
        else:
            logger.info(f"Instant_restore fail for mdf fail. job_id:{self._job_id}")
            self.output_result_info_to_file(ExecuteResultEnum.INTERNAL_ERROR.value, ErrorCode.INTERNAL_ERROR.value,
                                            SubJobStatusEnum.FAILED.value, err_code)

    def output_result_info_to_file(self, code, body_err, execution_status, exit_status):
        self.response.code = code
        self.response.body_err = body_err
        result_file = self.instant_restore_get_result_file()
        error_code, contents = convert_error_code(self._job_id, self._sub_job_id, exit_status)
        output_execution_result_ex(result_file, {RestoreResultKey.TASK_STATUS: execution_status,
                                                 RestoreResultKey.ERROR_CODE: error_code,
                                                 RestoreResultKey.ERROR_PARAM: contents})
        logger.info(f"Output_result_info_to_file. code:{code}. body_err:{body_err}. "
                    f"execution_status:{execution_status}. exit_status:{exit_status}. job_id:{self._job_id}")
        output_result_file(self._pid, self.response.dict(by_alias=True))

    def instant_restore(self):
        """
        即时恢复
        """
        self.parse_params()
        self.get_repo_info()
        self._parse_param_for_different_copy_type()
        if not self._parse_param_for_different_deploy_type() or not self._parse_asm_info_param():
            self.output_result_info_to_file(ExecuteResultEnum.INTERNAL_ERROR.value, ErrorCode.INTERNAL_ERROR.value,
                                            SubJobStatusEnum.FAILED.value, ScriptExitCode.ERROR_PARAM_INVALID)
            return
        self._parse_restore_by()
        self.parse_resetlogs_id_params()
        self.parse_sub_job_params()
        if self.target_object_sub_type == OracleDataBaseType.ORACLE_CLUSTER and self.job_name == "move_dbf":
            self.instant_restore_move_data()
            return
        if self.recover_target == RecoverTargetType.OTHER.value:
            logger.info(f'Recover to other node, job_id:{self._job_id}, sub_job_id:{self._sub_job_id}.')
            self.create_inst_restore_p_file()
        logger.info(f'Restore_by:{self.restore_by}, {self._job_id}.')
        change_mount_nas_file_owner(
            **{"data_repo_path": self.backup_repo, "oracle_user": self.oracle_user,
               "oracle_group": self.oracle_group,
               "log_repo_path": self.log_repo_path, "job_id": self._job_id})

        # 调用恢复脚本
        script_param = self.get_instance_restore_param()
        child = pexpect.spawn(
            f"sh {PluginPathConstant.SCRIPTS_PATH}/{RootCommand.NATIVE_INST_RESTORE.value} "
            f"{PluginPathConstant.SCRIPTS_PATH.value} {self._sub_job_id} {len(script_param)}",
            encoding='utf-8')

        send_params_to_pexpect(script_param, child)
        child.timeout = None
        child.expect(pexpect.EOF)
        child.close()
        exit_status = child.exitstatus
        logger.info(f'Instant_restore finally child.exitstatus:{child.exitstatus}. job_id:{self._job_id}')
        if exit_status != ScriptExitCode.SUCCESS:
            logger.error(f"Instant_restore fail. job_id:{self._job_id}")
            self.output_result_info_to_file(ExecuteResultEnum.INTERNAL_ERROR.value, ErrorCode.INTERNAL_ERROR.value,
                                            SubJobStatusEnum.FAILED.value, exit_status)
        else:
            log_detail = LogDetail(logInfo=OracleReportLabel.MOUNT_SUCCESS_DATABASE_START_DATA_MIGRATION_START_LABEL,
                                   logInfoParam=[], logLevel=DBLogLevel.INFO)
            report_job_details(self._pid,
                               SubJobDetails(taskId=self._job_id, subTaskId=self._sub_job_id, progress=50,
                                             logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value))
            logger.info(f'Mount succeed, target database start, data migration start.sub_job_id:{self._sub_job_id}')
            if self.target_object_sub_type != OracleDataBaseType.ORACLE_CLUSTER:
                self.instant_restore_move_data()
            else:
                self.output_result_info_to_file(ExecuteResultEnum.SUCCESS.value, ErrorCode.SUCCESS.value,
                                                SubJobStatusEnum.COMPLETED.value, exit_status)

    def instant_restore_get_result_file(self):
        result_file = os.path.join(ParamConstant.RESULT_PATH,
                                   f'{CacheRepoFilename.INSTANT_PROGRESS}_{self._sub_job_id}')
        if check_file_exist(result_file):
            logger.info(
                f'Remove inst_restore progress flag:{result_file}, '
                f'job_id:{self._job_id}, sub_job_id:{self._sub_job_id}.')
            delete_file(result_file)
        return result_file

    def instant_restore_progress(self):
        result_file = os.path.join(ParamConstant.RESULT_PATH,
                                   f'{CacheRepoFilename.INSTANT_PROGRESS}_{self._sub_job_id}')
        logger.info(f"Instant_restore_progress get_result_file result_file:{result_file}. job_id:{self._job_id}")
        result_data = read_tmp_json_file(result_file)
        task_status = result_data.get(RestoreResultKey.TASK_STATUS, SubJobStatusEnum.RUNNING.value)
        if task_status == SubJobStatusEnum.RUNNING.value:
            progress = OracleProgress.PROGRESS_FIFTY
        else:
            logger.info(f'Restore job end, task_status:{task_status}, job_id:{self._job_id}, '
                        f'sub_job_id:{self._sub_job_id}.')
            progress = OracleProgress.PROGRESS_ONE_HUNDRED
            delete_file(result_file)

        sub_job_detail = SubJobDetails(taskId=self._job_id, subTaskId=self._sub_job_id, progress=progress,
                                       taskStatus=task_status)

        if task_status == SubJobStatusEnum.FAILED:
            error_code = result_data.get(RestoreResultKey.ERROR_CODE, "")
            contents = result_data.get(RestoreResultKey.ERROR_PARAM, "")
            log_detail = LogDetail(logInfo=ReportDBLabel.SUB_JOB_FALIED, logInfoParam=[self._sub_job_id],
                                   logLevel=DBLogLevel.ERROR.value, logDetail=error_code, logDetailInfo=[contents])
            sub_job_detail.extend_info = {'nodeId': get_host_sn()}
            sub_job_detail.log_detail = [log_detail]

        logger.info(f"Instant_restore_progress task_status::{task_status}. job_id:{self._job_id}")
        output_result_file(self._pid, sub_job_detail.dict(by_alias=True))

    def get_move_date_base_file_param(self):
        return {
            'AppName': self.db_name,
            'InstanceName': self.db_instance,
            'UserName': self.db_user,
            'Password': self.db_user_pwd,
            'OracleHome': self.oracle_home,
            'AccessOracleHome': self.access_oracle_home,
            'AccessOracleBase': self.access_oracle_base,
            'ASMUserName': self.asm_user,
            'ASMPassword': self.asm_user_pwd,
            'ASMInstanceName': self.asm_instance_name,
            'dbType': self.db_type,
            'startDb': self.start_db,
            'recoverTarget': self.recover_target,
            'recoverPath': self.recover_path,
            'recoverOrder': self.recover_order,
            'recoverNum': self.recover_num,
            "DataPath": self.backup_repo,
            "LogPath": self.log_repo_path,
            'MetaDataPath': self.meta_data_path,
            'OracleInstallUser': self.oracle_user
        }

    def move_date_base_file(self):
        """
        迁移数据库文件
        """

        script_param = self.get_move_date_base_file_param()
        child = pexpect.spawn(
            f"sh {PluginPathConstant.SCRIPTS_PATH}/{RootCommand.NATIVE_MOVE_DBF.value} "
            f"{PluginPathConstant.SCRIPTS_PATH.value} {self._sub_job_id} {len(script_param)}",
            encoding='utf-8')

        send_params_to_pexpect(script_param, child)
        child.timeout = None
        child.expect(pexpect.EOF)
        child.close()
        exit_status = child.exitstatus
        logger.info(f'Move_date_base_file child.exitstatus:{child.exitstatus}. job_id:{self._job_id}')
        return exit_status == ScriptExitCode.SUCCESS, exit_status

    def create_inst_restore_p_file(self):
        logger.info(f'Start creating p_file, job_id:{self._job_id}, sub_job_id:{self._sub_job_id}.')
        params = json.loads(self.restore_target_host_str)
        res_params = modify_instance_name_in_p_file(
            self.json_param.get('job', {}).get('extendInfo', {}).get('instances', ''), params)
        self.p_file_pid = str(uuid.uuid4())
        p_file_uuid = self.p_file_pid
        p_file_name = f'pfile{p_file_uuid}'
        p_file_path = f'{PluginPathConstant.NONE_ROOT_TMP_PATH.value}/{p_file_name}'
        flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
        modes = stat.S_IWUSR | stat.S_IRUSR | stat.S_IXUSR
        with os.fdopen(os.open(p_file_path, flags, modes), 'w+') as out_file:
            for key, value in res_params.items():
                out_file.write(f'{key}={value}\n')
        try:
            change_owner_by_name(p_file_path, self.oracle_user, self.oracle_group)
        except Exception as e_change_owner:
            msg = f'Change p_file owner to {self.oracle_user}:{self.oracle_group} failed.'
            logger.error(f'{msg}, exception:{e_change_owner}, job_id:{self._job_id}, sub_job_id:{self._sub_job_id}.')

        logger.info(f'Create tmp p_file successfully, job_id:{self._job_id}, sub_job_id:{self._sub_job_id}.')

    def _parse_restore_by(self):
        # restore_by用来区分数据副本恢复、任意时间点恢复、SCN恢复几种场景
        if self.pit_time:
            self.restore_by = RestoreByType.TIME
            # 使用时间恢复时脚本的scn需要传0
            self.scn_to_script = '0'
        elif self.pit_scn:
            self.restore_by = RestoreByType.SCN
            self.scn_to_script = self.pit_scn
        else:
            self.restore_by = RestoreByType.COPY

    def _parse_asm_info_param(self):
        asm_info = self.first_copy_protect_object_json.get(ORACLEJsonConstant.AUTH, {}). \
            get(ORACLEJsonConstant.EXTEND_INFO, {}).get(ORACLEJsonConstant.ASM_INFO_, "")
        asm_info_dic = {}
        if not str_is_none_or_empty(asm_info):
            try:
                asm_info_dic = json.loads(asm_info)
            except json.JSONDecodeError:
                logger.error(f'json decode asm_info exception , job_id:{self._job_id}.')
                return False
        self.asm_user = asm_info_dic.get('authKey', "")
        self.asm_user_pwd = asm_info_dic.get('authPwd', "")
        self.asm_install_user = asm_info_dic.get('installUsername', "")

        return True

    def _parse_param_for_different_deploy_type(self):
        """
        解析不同部署场景的参数，分为：单机；集群
        """
        if self.target_object_sub_type == OracleDataBaseType.ORACLE:
            # 单机
            self.oracle_home = self.first_copy_protect_object_extend_info_json.get('oracle_home', "")
            asm_info_str = self.first_copy_protect_object_extend_info_json.get(ORACLEJsonConstant.ASM_INFO, "")
            asm_info_dic = {}
            if not str_is_none_or_empty(asm_info_str):
                try:
                    asm_info_dic = json.loads(asm_info_str)
                except json.JSONDecodeError:
                    logger.error(f'json decode asm_info exception , job_id:{self._job_id}.')
                    return False
            self.asm_instance_name = asm_info_dic.get(ORACLEJsonConstant.INST_NAME_, "")
        elif self.target_object_sub_type == OracleDataBaseType.ORACLE_CLUSTER:
            # RAC集群
            if not self._parse_rac_deploy_type():
                return False
        self.set_cluster_to_single_db_info()
        # 如果指定了实例名，设置指定的实例名
        instance_name = get_instance_name_by_id(
            self.json_param.get('job', {}).get('extendInfo', {}).get('instances', ''))
        if instance_name:
            self.db_instance = instance_name
        logger.info(f'Got params, db_instance:{self.db_instance}, oracle_home:{self.oracle_home}, '
                    f'asm_sid_name:{self.asm_instance_name}, job_id:{self._job_id}.')

        return True

    def _parse_rac_deploy_type(self):
        host_sn = get_host_sn()
        node_info_list = self.json_param.get("job", {}).get("targetEnv", {}).get("nodes", [])
        node_info_list_protect = json.loads(self.first_copy_protect_object_extend_info_json.get('instances', []))
        if len(node_info_list_protect) < len(node_info_list):
            raise Exception(f'Length of nodesInfo smaller than target_env nodes, job_id:{self._job_id}.')
        for index_node, node_info in enumerate(node_info_list):
            if node_info.get('id') == host_sn:
                tmp_node_info = node_info_list_protect[index_node]
                self.db_instance = tmp_node_info.get('inst_name', "")
                self.oracle_group = tmp_node_info.get('oracle_group', 'oinstall')
                self.oracle_home = tmp_node_info.get('oracle_home', "")
                try:
                    self.asm_instance_name = json.loads(tmp_node_info.get('asm_info', '{}')).get('instName', "")
                except json.JSONDecodeError:
                    self.asm_instance_name = ""
                    logger.error(f'json decode asm_info exception , job_id:{self._job_id}.')
                    return False

        return True

    def _parse_param_for_different_copy_type(self):
        """
        解析不同副本类型的参数，分为2个场景：归档副本（s3/磁带）；其他副本（全量/增量/差异）
        :return: None
        """
        first_copy_type = self.copy_json[0].get(ORACLEJsonConstant.TYPE, "")
        if first_copy_type in [CopyDataTypeEnum.S3_ARCHIVE.value, CopyDataTypeEnum.TAP_ARCHIVE]:
            # 归档副本的扩展信息多嵌套了一层
            self.enc_algo = self.copy_json[0].get(ORACLEJsonConstant.EXTEND_INFO, ""). \
                get(ORACLEJsonConstant.EXTEND_INFO, "").get('backup_algo', "")
            self.enc_key = self.copy_json[0].get(ORACLEJsonConstant.EXTEND_INFO, ""). \
                get(ORACLEJsonConstant.EXTEND_INFO, "").get('backup_algo_value')
            data_copy_info = self.get_inst_copy_info_by_type_list(
                [CopyDataTypeEnum.S3_ARCHIVE, CopyDataTypeEnum.TAP_ARCHIVE])
            self.resetlogs_id = data_copy_info.get(ORACLEJsonConstant.EXTEND_INFO, "") \
                .get('extendInfo').get('resetlogs_id')
        else:
            self.enc_algo = self.copy_json[0].get(ORACLEJsonConstant.EXTEND_INFO, "").get('backup_algo', "")
            self.enc_key = self.copy_json[0].get(ORACLEJsonConstant.EXTEND_INFO, "").get('backup_algo_value', "")
            # 从数据副本（全量、增量、差异）中获取resetlogs_id
            data_copy_info = self.get_inst_copy_info_by_type_list([CopyDataTypeEnum.FULL_COPY,
                                                                   CopyDataTypeEnum.INCREMENT_COPY,
                                                                   CopyDataTypeEnum.DIFF_COPY,
                                                                   CopyDataTypeEnum.CLONE])
            self.resetlogs_id = data_copy_info.get(ORACLEJsonConstant.EXTEND_INFO, {}).get('resetlogs_id', "")
