#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import datetime
import os
import json
import platform
import re
import time

from common.common import output_execution_result_ex, execute_cmd, execute_cmd_oversize_return_value
from common.common_models import Copy, SubJobDetails
from common.const import ParamConstant, CMDResult, CopyDataTypeEnum, SubJobStatusEnum, Encoding
from common.logger import Logger
from common.number_const import NumberConst
from db2.comm.const import RpcToolApiName, Db2Const, Db2Regex, Db2JsonConstant
from db2.comm.db2_cmd import get_lang_value
from db2.comm.db2_exception import ErrCodeException
from db2.comm.error_code import Db2ErrCode
from db2.comm.models.backup_models import LogBackupParam
from db2.comm.util.common_util import Db2CommonUtil

LOGGER = Logger().get_logger(filename="db2.log")


class Db2BackupUtil:
    """
    DB2备份工具类
    """

    @staticmethod
    def parse_backup_image_timestamp(std_out):
        """解析备份镜像时间戳"""
        bak_img_timestamp = ""
        for line in std_out.split("\n"):
            if "The timestamp for this backup image is" in line:
                bak_img_timestamp = line.split(":")[1].strip()
                break
        return bak_img_timestamp

    @staticmethod
    def execute_manual_archive_log(os_user, db_name):
        """执行手动归档日志命令"""
        Db2CommonUtil.check_os_user_with_ex(os_user)
        Db2CommonUtil.check_injection(db_name)
        archive_log_cmd = f'su - {os_user} -c "db2 archive log for db {db_name}"'
        LOGGER.info(f"Start to execute archive log command: {archive_log_cmd}.")
        encoding = get_lang_value(os_user)
        return_code, out, err = execute_cmd(archive_log_cmd, encoding=encoding)
        LOGGER.info(f"Execute archive log command completed, return code: {return_code}, out info: {out}, "
                    f"err info: {err}.")
        # Execute archive log command completed, return code: 0, out info: DB20000I  ARCHIVE LOG 命令成功完成。 , err info: .
        if return_code != CMDResult.SUCCESS.value:
            LOGGER.error(f"Execute archive log command failed.")
            raise Exception("Execute archive log command failed.")
        LOGGER.info(f"Execute archive log command succeeded.")

    @staticmethod
    def list_his_arch_log_for_db(os_user, db_name, since_timestamp=None, ptn_num=None, deploy_type=None):
        """查询数据库归档日志的历史记录"""
        log_scope = f"since {since_timestamp}" if since_timestamp else "all"
        Db2CommonUtil.check_os_user_with_ex(os_user)
        Db2CommonUtil.check_injection(db_name, str(since_timestamp))
        if ptn_num is not None and deploy_type == Db2Const.DPF_CLUSTER_TYPE:
            Db2CommonUtil.check_injection(str(ptn_num))
            list_arch_log_cmd = f"su - {os_user} -c 'db2_all " \
                                f"\"<<+{ptn_num}< db2 list history archive log {log_scope} for db {db_name}\"'"
        else:
            list_arch_log_cmd = f'su - {os_user} -c "db2 list history archive log {log_scope} for db {db_name}"'
        LOGGER.info(f"Start to get archive log history, command: {list_arch_log_cmd}.")
        encoding = get_lang_value(os_user)
        return_code, out, err = execute_cmd(list_arch_log_cmd, encoding=encoding)
        retry_nums = 0
        while retry_nums < 2:
            retry_nums += 1
            if "SQL2155W" not in out:
                break
            time.sleep(NumberConst.TEN)
            return_code, out, err = execute_cmd(list_arch_log_cmd, encoding=encoding)
        if return_code != CMDResult.SUCCESS.value:
            LOGGER.error(f"Get latest archive log failed, return code: {return_code}, "
                         f"out: {out}, err: {err}.")
            raise Exception("Execute list history archive log command failed")
        return out

    @staticmethod
    def get_latest_archive_log(os_user, db_name, since_timestamp=None, ptn_num=None, deploy_type=None):
        """获取最近手动归档日志名称"""
        LOGGER.info(f"Start getting latest archive log.")
        archive_log, start_time, end_time = "", "", ""
        # 获取所有归档日志的记录
        list_his_out = Db2BackupUtil.list_his_arch_log_for_db(
            os_user, db_name, since_timestamp=since_timestamp, ptn_num=ptn_num, deploy_type=deploy_type)
        out_list = list_his_out.split("\n")
        # 获取最近归档日志记录中“Comment: ARCHIVE LOG”所在的倒序行号
        # "Comment: ARCHIVE LOG"代表归档日志的操作，中文环境与英文环境相同
        reversed_last_arch_log_idx = None
        for idx, tmp_line in enumerate(out_list[::-1]):
            if re.match(r"\s*Comment:\s*ARCHIVE LOG\s*", tmp_line):
                reversed_last_arch_log_idx = idx
                break
        if reversed_last_arch_log_idx is None:
            LOGGER.error("No latest manually archive log found.")
            raise Exception("No latest manually archive log found")
        # 一条归档日志记录共12行，取“Comment: ARCHIVE LOG”所在行前面6行和后面5行
        useful_out = out_list[-(reversed_last_arch_log_idx + 7): -(reversed_last_arch_log_idx - 5)]
        for line in useful_out:
            log_match_ret = re.match(r"\s*X\s+D\s+[\d]{14}\s+N\s+(S\d{7}\.LOG)\s*", str(line))
            if log_match_ret:
                archive_log = log_match_ret.groups()[0]
                continue
            if re.match(r"\s*Start Time\s*:", str(line)):
                start_time = str(line).split(":")[1].strip()
                continue
            elif re.match(r"\s*开始时间\s*：", str(line)):
                start_time = str(line).split("：")[1].strip()
                continue
            if re.match(r"\s*End Time\s*:", str(line)):
                end_time = str(line).split(":")[1].strip()
                break
            elif re.match(r"\s*结束时间\s*：", str(line)):
                end_time = str(line).split("：")[1].strip()
                break
        LOGGER.info(f"Get latest archive log: {archive_log} success, start time: {start_time}, end time: {end_time}.")
        if not archive_log:
            raise Exception("The latest archived log obtained is empty")
        if not end_time:
            end_time = start_time
        return archive_log, end_time

    @staticmethod
    def get_archive_log_start_and_end_time(log_param: LogBackupParam, archive_log, arch_cmd_end_time=None):
        """获取归档日志的开始时间和结束时间"""
        LOGGER.info(f"Start getting start time and end time of archive log, "
                    f"os user: {log_param.user_name}, database: {log_param.db_name}, "
                    f"log: {archive_log}, archive command end time: {arch_cmd_end_time}, "
                    f"log chain: {log_param.log_chain}.")
        start_time, end_time = "", ""
        cur_log_chain = log_param.log_chain
        if not cur_log_chain:
            cur_log_chain = Db2CommonUtil.get_current_log_chain(log_param.user_name, log_param.db_name)
        list_his_out = Db2BackupUtil.list_his_arch_log_for_db(log_param.user_name, log_param.db_name,
                                                              since_timestamp=log_param.since_timestamp)
        out_list = list_his_out.split("\n")
        # 获取最近归档日志记录中“  X  D  20230211145840      1    D  S0000051.LOG C0000028 ”所在的倒序行号
        reversed_log_info_idx = None
        arch_log_info_reg = f"{Db2Regex.ARCH_LOG_INFO_BEGIN_REG}{str(archive_log)}\s+{str(cur_log_chain)}\s*"
        for idx, tmp_line in enumerate(out_list[::-1]):
            if re.match(arch_log_info_reg, tmp_line):
                reversed_log_info_idx = idx
                break
        if reversed_log_info_idx is None:
            LOGGER.error(f"No archive log found, log: {archive_log}.")
            return False, ("", "")
        # 一条归档日志记录共12行，取“  X  D  20230211145840      P    D  S0000051.LOG C0000028 ”所在行前面2行，后面9行
        useful_out = out_list[-(reversed_log_info_idx + 3): -(reversed_log_info_idx - 9)]
        for line in useful_out:
            # 查找开始时间
            if re.match(r"\s*Start Time\s*:", str(line)):
                start_time = str(line).split(":")[1].strip()
                continue
            elif re.match(r"\s*开始时间\s*：", str(line)):
                start_time = str(line).split("：")[1].strip()
                continue
            # 查找结束时间
            if re.match(r"\s*End Time\s*:", str(line)):
                end_time = str(line).split(":")[1].strip()
            elif re.match(r"\s*结束时间\s*：", str(line)):
                end_time = str(line).split("：")[1].strip()
        LOGGER.info(f"Get start time and end time of archive log success, archive log: {archive_log}, "
                    f"log start time: {start_time}, log end time: {end_time}.")
        if not end_time:
            end_time = arch_cmd_end_time if arch_cmd_end_time else start_time
        return True, (start_time, end_time)

    @staticmethod
    def get_arch_log_time(log_param: LogBackupParam, archive_log, arch_cmd_end_time=None):
        """检查日志是否已归档并获取开始时间和结束时间"""
        arch_log_timeout = Db2CommonUtil.get_archive_timeout_of_db(log_param.user_name, log_param.db_name)
        LOGGER.info(f"Checking if the manual archive log: {archive_log} is archived, timeout: {arch_log_timeout}.")
        check_arch_time = time.time() + arch_log_timeout
        is_archived = False
        time_tuple = ("", "")
        # 检查日志是否归档成功
        while time.time() <= check_arch_time:
            ret, time_tuple = Db2BackupUtil.get_archive_log_start_and_end_time(
                log_param, archive_log, arch_cmd_end_time=arch_cmd_end_time)
            if ret:
                is_archived = True
                break
            time.sleep(NumberConst.TEN)
        if not is_archived:
            LOGGER.error(f"The task to check if the manual archive log: {archive_log} is archived timed out")
            raise ErrCodeException(Db2ErrCode.LOG_ARCHIVE_TIMEOUT, message="Archive log archive timed out")
        return time_tuple

    @staticmethod
    def backup_archive_logs_for_ptn(user_name, archive_log_locs, log_mount_path, cur_node_name):
        Db2CommonUtil.check_os_user_with_ex(user_name)
        tgt_path = os.path.realpath(os.path.join(log_mount_path, cur_node_name))
        node_name_start_reg = r"(.+/"
        node_name_end_reg = r"/)"
        node_name_reg = f"{node_name_start_reg}{cur_node_name}{node_name_end_reg}"
        is_aix = platform.system() == 'AIX'
        for tmp_log_loc in archive_log_locs:
            if not Db2CommonUtil.check_path_characters_and_black_list(tmp_log_loc):
                LOGGER.error(f"The archive log location: {tmp_log_loc} is invalid.")
                raise ErrCodeException(Db2ErrCode.USER_BACKUP_ERROR, message="The archive log location is invalid")
            match_ret = re.match(node_name_reg, tmp_log_loc)
            if not match_ret:
                LOGGER.error(f"The archive log path: {tmp_log_loc} is invalid.")
                continue
            tmp_node_path = match_ret.groups()[0]
            tmp_no_node_path = tmp_log_loc.split(tmp_node_path)[1]
            if is_aix:
                Db2BackupUtil.copy_file_for_aix(user_name, tmp_node_path, tgt_path, tmp_no_node_path)
                continue
            cp_cmd = f'su - {user_name} -c "cd {tmp_node_path} && cp --parents -f {tmp_no_node_path} {tgt_path} ' \
                     f'&& echo result=$?'
            Db2CommonUtil.check_injection(tmp_node_path, tmp_no_node_path, tgt_path)
            Db2CommonUtil.execute_echo_result_command(cp_cmd, encoding=get_lang_value(user_name))

    @staticmethod
    def copy_file_for_aix(user_name, source_dir, target_dir, copy_item):
        Db2CommonUtil.check_os_user_with_ex(user_name)
        source_path = os.path.join(source_dir, copy_item)
        if not os.path.exists(source_path):
            LOGGER.error(f"File not exits:{source_path}")
            return
        # 获取文件权限
        permission = oct(os.stat(source_path).st_mode)[-3:]
        target_path = os.path.join(target_dir, copy_item)
        file_dir, file_name = os.path.split(target_path)
        # 创建目录->拷贝->更改权限
        cp_cmd = f'su - {user_name} -c "mkdir -p {file_dir} && cp -f -r {source_path} {target_path} ' \
                 f'&& chmod {permission} {target_path} && echo result=$?'
        Db2CommonUtil.check_injection(file_dir, source_path, target_path, str(permission))
        LOGGER.info(f"Execute file copy:{cp_cmd}")
        Db2CommonUtil.execute_echo_result_command(cp_cmd, encoding=get_lang_value(user_name))

    @staticmethod
    def copy_dir_for_aix(user_name, source_dir, target_dir, dir_name):
        Db2CommonUtil.check_os_user_with_ex(user_name)
        source_path = os.path.join(source_dir, dir_name)
        if not os.path.exists(source_path):
            LOGGER.error(f"Directory not exists: {source_path}")
            return
        # 获取文件权限
        permission = oct(os.stat(source_path).st_mode)[-3:]
        target_path = os.path.join(target_dir, dir_name)
        # 创建目录->拷贝->更改权限
        cp_cmd = f'su - {user_name} -c "mkdir -p {target_dir} && cp -f -r {source_path} {target_dir} ' \
                 f'&& chmod {permission} {target_path} && echo result=$?'
        Db2CommonUtil.check_injection(source_path, target_path, str(permission))
        LOGGER.info(f"Execute directory copy: {cp_cmd}")
        Db2CommonUtil.execute_echo_result_command(cp_cmd, encoding=get_lang_value(user_name))

    @staticmethod
    def report_backup_copy_info_by_rpc_tool(job_id, copy_info: Copy, cache_path):
        """使用rpctool工具上报备份副本信息"""
        input_path = os.path.realpath(os.path.join(cache_path, f"copy_info_in_{job_id}"))
        output_path = os.path.realpath(os.path.join(cache_path, f"copy_info_out_{job_id}"))
        params = dict()
        params["copy"] = copy_info.dict(by_alias=True)
        params["jobId"] = job_id
        try:
            output_execution_result_ex(input_path, params)
            if not Db2CommonUtil.exec_rpc_tool_cmd(RpcToolApiName.REPORT_COPY_ADDITIONAL_INFO, input_path, output_path):
                raise Exception("Report backup copy information failed")
        finally:
            for tmp_path in (input_path, output_path):
                if os.path.exists(tmp_path):
                    LOGGER.info(f"Remove temp copy info file: {tmp_path}.")
                    Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)

    @staticmethod
    def build_list_his_since_timestamp_for_log_bak(app_info, job_id, sub_job_id, cache_path):
        """构造日志备份时db2 list history命令中since timestamp的值"""
        LOGGER.info(f"Start building list history since timestamp for log backup, "
                    f"job id: {job_id}, sub job id: {sub_job_id}.")
        pre_log_copy_info = Db2BackupUtil.query_pre_copy_info_by_rpc_tool(
            app_info, job_id, sub_job_id, cache_path,
            [CopyDataTypeEnum.LOG_COPY.value]
        )
        # 存在上一个日志副本
        if pre_log_copy_info:
            pre_copy_type = pre_log_copy_info.get('type')
            pre_copy_id = pre_log_copy_info.get('id')
            # 日志备份结束时间戳
            pre_copy_end_timestamp = pre_log_copy_info.get("extendInfo", {}).get(Db2JsonConstant.BAK_IMG_TIMESTAMP)
        else:
            pre_full_copy_info = Db2BackupUtil.query_pre_copy_info_by_rpc_tool(
                app_info, job_id, sub_job_id, cache_path,
                [CopyDataTypeEnum.FULL_COPY.value]
            )
            # 不存在上一个全量副本
            if not pre_full_copy_info:
                LOGGER.error(f"Get previous full copy info failed, job id: {job_id}, sub job id: {sub_job_id}.")
                raise ErrCodeException(Db2ErrCode.NOT_EXIT_WAL_BACKUP_FILE_AND_SNAPSHOT_BACKUP)
            pre_full_copy_ext_info = pre_full_copy_info.get("extendInfo", {})
            pre_copy_type = pre_full_copy_info.get('type')
            pre_copy_id = pre_log_copy_info.get('id')
            # 数据备份结束时间戳
            pre_copy_end_timestamp = pre_full_copy_ext_info.get(Db2JsonConstant.DB2_DATA_BAK_END_TIME)
            if not pre_copy_end_timestamp:
                # 数据备份开始时间戳
                pre_copy_end_timestamp = pre_full_copy_ext_info.get(Db2JsonConstant.BAK_IMG_TIMESTAMP)
        if not pre_copy_end_timestamp:
            LOGGER.warning(f"Build list history since timestamp for log backup is empty, "
                           f"previous copy id: {pre_copy_id}, copy type: {pre_copy_type}.")
            return ""
        # 查询归档历史的since参数，当前时间减1小时，防止找不到
        pre_copy_end_time = datetime.datetime.strptime(pre_copy_end_timestamp, "%Y%m%d%H%M%S")
        since_timestamp = (pre_copy_end_time + datetime.timedelta(hours=-1)).strftime("%Y%m%d%H%M%S")
        LOGGER.info(f"Build list history since timestamp for log backup success, previous copy type: {pre_copy_type}, "
                    f"end timestamp: {pre_copy_end_timestamp}, since timestamp: {since_timestamp}.")
        return since_timestamp

    @staticmethod
    def query_pre_copy_info_by_rpc_tool(app_info, job_id, sub_jod_id, cache_path, copy_types=None):
        """使用rpctool工具查询上一个副本信息"""
        LOGGER.info(f"Start querying previous copy info by rpctool, "
                    f"job id: {job_id}, sub job id: {sub_jod_id}, copy types: {copy_types}.")
        input_path = os.path.realpath(os.path.join(cache_path, f"pre_copy_in_{sub_jod_id}"))
        output_path = os.path.realpath(os.path.join(cache_path, f"pre_copy_out_{sub_jod_id}"))
        Db2CommonUtil.check_path_list(input_path, output_path)
        for tmp_path in (input_path, output_path):
            if os.path.exists(tmp_path):
                LOGGER.info(f"Remove temp pre copy info file: {tmp_path} before querying previous copy.")
                Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)
        param_dict = dict()
        param_dict["application"] = app_info
        param_dict["types"] = [CopyDataTypeEnum.LOG_COPY.value] if not copy_types else copy_types
        param_dict["copyId"] = job_id
        param_dict["jobId"] = job_id
        try:
            output_execution_result_ex(input_path, param_dict)
            ret, copy_dict = Db2CommonUtil.exec_rpc_tool_cmd_with_output(
                RpcToolApiName.QUERY_PREVIOUS_COPY, input_path, output_path)
            copy_msg = f"copy type: {copy_dict.get('type')}, id: {copy_dict.get('id')}, " \
                       f"timestamp: {copy_dict.get('timestamp')}, " if copy_dict else ""
            LOGGER.info(f"Query previous copy info by rpctool complete, result: {ret}, {copy_msg}"
                        f"job id: {job_id}, sub job id: {sub_jod_id}.")
            return copy_dict
        finally:
            for tmp_path in (input_path, output_path):
                if os.path.exists(tmp_path):
                    LOGGER.info(f"Remove temp copy info file: {tmp_path} after querying previous copy.")
                    Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)

    @staticmethod
    def query_pre_copy_agentid_by_rpc_tool(app_type, job_id, sub_jod_id, cache_path, copy_types=None):
        """使用rpctool工具查询上一个副本信息"""
        LOGGER.info(f"Start querying previous copy agnet id by rpctool, application: {app_type}, job id: {job_id}, "
                    f"sub job id: {sub_jod_id}, copy types: {copy_types}.")
        input_path = os.path.realpath(os.path.join(cache_path, f"pre_copy_in_{sub_jod_id}"))
        output_path = os.path.realpath(os.path.join(cache_path, f"pre_copy_out_{sub_jod_id}"))
        Db2CommonUtil.check_path_list(input_path, output_path)
        for tmp_path in (input_path, output_path):
            if os.path.exists(tmp_path):
                LOGGER.info(f"Remove temp pre copy info file: {tmp_path} before querying previous copy.")
                Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)
        param_dict = dict()
        param_dict["application"] = app_type
        param_dict["types"] = [
            CopyDataTypeEnum.FULL_COPY.value, CopyDataTypeEnum.LOG_COPY.value,
            CopyDataTypeEnum.INCREMENT_COPY.value, CopyDataTypeEnum.DIFF_COPY.value
        ]
        param_dict["copyId"] = job_id
        param_dict["jobId"] = job_id
        output_execution_result_ex(input_path, param_dict)
        ret, copy_info_dict = Db2CommonUtil.exec_rpc_tool_cmd_with_output(
            RpcToolApiName.QUERY_PREVIOUS_COPY, input_path, output_path)
        if not ret:
            LOGGER.error(f"Query previous copy agentid failed by rpctool, application: {app_type}, job id: {job_id}, "
                         f"sub job id: {sub_jod_id}, result: {ret}, copy types: {copy_types}.")
            return False
        for tmp_path in (input_path, output_path):
            if os.path.exists(tmp_path):
                LOGGER.info(f"Remove temp copy info file: {tmp_path} after querying previous copy.")
                Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)
        return Db2CommonUtil.check_agent_id_is_same(copy_info_dict)

    @staticmethod
    def query_pre_data_copy_by_rpc_tool(app_type, job_id, sub_jod_id, cache_path, copy_types=None):
        """使用rpctool工具查询上一个数据副本信息"""
        LOGGER.info(f"Start querying previous copy agnet id by rpctool, application: {app_type}, job id: {job_id}, "
                    f"sub job id: {sub_jod_id}, copy types: {copy_types}.")
        input_path = os.path.realpath(os.path.join(cache_path, f"pre_copy_in_{sub_jod_id}"))
        output_path = os.path.realpath(os.path.join(cache_path, f"pre_copy_out_{sub_jod_id}"))
        Db2CommonUtil.check_path_list(input_path, output_path)
        for tmp_path in (input_path, output_path):
            if os.path.exists(tmp_path):
                LOGGER.info(f"Remove temp pre copy info file: {tmp_path} before querying previous copy.")
                Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)
        param_dict = dict()
        param_dict["application"] = app_type
        param_dict["types"] = [
            CopyDataTypeEnum.FULL_COPY.value, CopyDataTypeEnum.INCREMENT_COPY.value, CopyDataTypeEnum.DIFF_COPY.value]
        param_dict["copyId"] = job_id
        param_dict["jobId"] = job_id
        output_execution_result_ex(input_path, param_dict)
        ret, copy_info_dict = Db2CommonUtil.exec_rpc_tool_cmd_with_output(
            RpcToolApiName.QUERY_PREVIOUS_COPY, input_path, output_path)
        for tmp_path in (input_path, output_path):
            if os.path.exists(tmp_path):
                LOGGER.info(f"Remove temp copy info file: {tmp_path} after querying previous copy.")
                Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)
        return copy_info_dict

    @staticmethod
    def report_progress_utl(pid, file_name, cache_path, progress=0):
        LOGGER.info("Start reporting progress.")
        file_path = os.path.join(cache_path, file_name)
        if os.access(file_path, os.F_OK):
            try:
                json_str = Db2BackupUtil.get_json_from_file(file_path)
            except Exception as e_info:
                LOGGER.error(f"Reporting progress failed, err msg is:{str(e_info)}")
                return False
            rest_path = os.path.join(ParamConstant.RESULT_PATH, f"result{pid}")
            sub_job_detail = SubJobDetails(**json_str)
            if sub_job_detail.task_status in [SubJobStatusEnum.COMPLETED.value, SubJobStatusEnum.FAILED.value]:
                sub_job_detail.progress = NumberConst.HUNDRED
            elif sub_job_detail.task_status == SubJobStatusEnum.RUNNING.value:
                # 防止前面任务未写进度的情况。
                if sub_job_detail.progress and sub_job_detail.progress < progress:
                    sub_job_detail.progress = progress
            output_execution_result_ex(rest_path, sub_job_detail.dict(by_alias=True))
        else:
            LOGGER.error(f'Reporting progress failed, The permission is insufficient or the file does not exist. '
                         f'path:{file_path}')
            return False
        LOGGER.info(f"Reporting progress is complete, job status: {json_str.get('taskStatus')}, "
                    f"progress: {json_str.get('progress')}.")
        return True

    @staticmethod
    def get_json_from_file(file_path):
        """
        解析临时文件
        :return:
        """
        if not os.path.isfile(file_path):
            raise Exception(f"File: {file_path} does not exist.")
        try:
            with open(file_path, "r", encoding=Encoding.INTERNAL_ENCODING) as tmp_file:
                json_dict = json.loads(tmp_file.read())
        except Exception as ex:
            raise Exception("parse param file failed") from ex
        return json_dict

    @staticmethod
    def remove_temp_file_ignore_exception(tmp_path):
        try:
            os.remove(tmp_path)
        except Exception as err:
            LOGGER.error(f"remove tmp_path:{tmp_path}: {err}!")