#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import time

from common import common
from common.logger import Logger
from common.const import BackupTypeEnum, ExecuteResultEnum, CopyDataTypeEnum, DBLogLevel, ReportDBLabel
from common.common_models import SubJobDetails, Copy, ActionResult, LogDetail
from common.number_const import NumberConst
from common.util.check_utils import check_path_in_white_list
from common.util.scanner_utils import scan_dir_size
from db2.comm.db2_exception import ErrCodeException
from db2.comm.db2_verification import check_database_pending_status, check_log_arch_config, \
    check_delta_backup_config, check_database_exists, is_trans_full_backup, check_database_offline_backup_status
from db2.backup.db2_backup_base import Db2BackupBase
from db2.backup.dpf.dpf_db_backup_service import DpfDbBackupService
from db2.comm.backup_param_parse import ParameterBackupParse
from db2.backup.util.db2_backup_util import Db2BackupUtil
from common.common import output_execution_result_ex
from common.const import SubJobStatusEnum, RepositoryDataTypeEnum, ParamConstant
from db2.comm.error_code import Db2ErrCode
from db2.comm.const import Db2JsonConstant, Db2CommonConst, RpcToolApiName, Db2JobName, Db2Const
from db2.comm.util.common_util import Db2CommonUtil
from db2.comm.util.dpf_util import DpfUtil
from db2.comm.util.job_decorators import job_exception_decorator
from db2.comm.util.param_util import Db2ParamUtil

LOGGER = Logger().get_logger(filename="db2.log")


class DpfDbBackup(Db2BackupBase):
    """Db2数据库备份任务执行类
    """

    def __init__(self, task_name, pid, job_id, sub_job_id, param_dict):
        super().__init__(task_name, pid, job_id, sub_job_id, param_dict)
        self.param_util = ParameterBackupParse(pid, job_id, sub_job_id, param_dict)
        self.backup_service = DpfDbBackupService(pid, job_id, sub_job_id, self.param_util)

    @job_exception_decorator()
    def allow_backup_in_local_node(self):
        LOGGER.info("Allow_backup_in_local_node start")
        user_name = self.param_util.get_database_user_name(Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
        db_name = self.param_util.get_backup_database_name()
        # 检查连通性
        connect_status = Db2CommonUtil.check_process_service(user_name)
        if not connect_status:
            LOGGER.error(f"Check dpf instance connection result: {connect_status}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, message="The DPF cluster status is abnormal",
                body_err=Db2ErrCode.DB_SERVICE_ERROR)
        check_database = check_database_exists(user_name, db_name)
        if not check_database:
            LOGGER.error(f"User {user_name} database {db_name} not exists.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, message="Database not exists.",
                body_err=Db2ErrCode.DATABASE_NOT_EXISTS, err_params=[db_name])
        check_log_arch = check_log_arch_config(user_name, db_name)
        if not check_log_arch:
            LOGGER.error(f"User {user_name} database {db_name} does not enable log archive method.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, message="Database does not enable log archive method.",
                body_err=Db2ErrCode.ARCHIVE_MODE_DISABLE)

        if not check_database_offline_backup_status(user_name, db_name):
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, message="Database need offline backup.",
                body_err=Db2ErrCode.DATABASE_NEED_OFFLINE_BACKUP, err_params=[db_name])

        if not check_database_pending_status(user_name, db_name):
            LOGGER.error(f"User {user_name} database {db_name} cfg status is pending.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, message="Database status is pending.",
                body_err=Db2ErrCode.DATABASE_EXISTS_PENDING, err_params=[db_name])
        check_backup_config = check_delta_backup_config(user_name, db_name)
        if not check_backup_config:
            LOGGER.error(f"User {user_name} database {db_name} does not enable track modified pages.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, message="Database does not enable track modified pages.",
                body_err=Db2ErrCode.INCREMENTAL_BACKUP_NOT_OPEN)
        LOGGER.info("Allow_backup_in_local_node end")
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator()
    def check_backup_job_type(self):
        backup_type = self.param_util.get_backup_type()
        LOGGER.info(f"Check backup job type: {backup_type}.")
        if backup_type == BackupTypeEnum.FULL_BACKUP.value:
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)
        if is_trans_full_backup(
                self.param_util.get_database_user_name(Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY),
                self.param_util.get_repositories_path(RepositoryDataTypeEnum.DATA_REPOSITORY)):
            LOGGER.info(f"When performing a incremental or differential backup, there is no full backup copy, "
                        f"and it will be converted to a full backup, backup type: {backup_type}.")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     body_err=Db2ErrCode.ERR_INC_TO_FULL)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator()
    def query_job_permission(self):
        LOGGER.info(f"Query job permission start.")
        user_name = self.param_util.get_database_user_name(Db2JsonConstant.APPENV_NODES_0_AUTH_AUTHKEY)
        group = Db2CommonUtil.get_group_name_by_os_user(user_name)
        result_info = {
            "user": user_name,
            "group": group,
            "fileMode": "0755"
        }
        LOGGER.info(f"Add user {user_name} group {group} permission.")
        common.output_result_file(self.pid, result_info)

    @job_exception_decorator(write_progress=True)
    def exec_backup_pre_job(self):
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED, NumberConst.HUNDRED, None)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator(write_progress=True)
    def exec_backup(self):
        sub_job_name = self.param_util.get_sub_job_name()
        if sub_job_name == Db2JobName.CHECK_AND_QUERY_COPY:
            # 备份完成后执行上报副本
            return self.report_backup_copy()

        tmp_job_log = f"pid: {self.pid}, job id: {self.job_id}, sub job id: {self.sub_job_id}"
        LOGGER.info(f'Start executing backup subtask: {sub_job_name}, {tmp_job_log}.')
        backup_type = self.param_util.get_backup_type()
        # 挂载目录子任务，确保执行后续子任务时各节点已成功挂载目录
        if sub_job_name == "mount_path_subtask":
            return self.exec_mount_path_subtask(backup_type)

        # 备份数据库子任务：backup_db_subtask
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, NumberConst.FIFTEEN, "")
        # 获取编目节点IP
        catalog_ip = Db2ParamUtil.get_catalog_ip_of_dpf_db(self.param_dict)
        if not catalog_ip:
            LOGGER.error("The catalog IP param is empty.")
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, NumberConst.FIFTEEN, "")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="The catalog IP param is empty")
        db_name = self.param_util.get_backup_database_name()
        is_catalog_node = DpfUtil.is_catalog_node(catalog_ip)
        # 日志备份需要每个节点都执行
        if backup_type == BackupTypeEnum.LOG_BACKUP.value:
            return self.exec_backup_log_subtask(db_name, is_catalog_node)
        # 非编目节点不执行数据备份（全量、增量、差异）操作
        if not is_catalog_node:
            LOGGER.info("Current node is no catalog node, no need backup.")
            self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, NumberConst.SEVENTY, "")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)
        # 全量/增量/差异备份
        if backup_type == BackupTypeEnum.FULL_BACKUP.value:
            bak_log_ret, bak_img_timestamp = self.backup_service.exec_full_backup_db(db_name)
        elif backup_type == BackupTypeEnum.INCRE_BACKUP.value:
            bak_log_ret, bak_img_timestamp = self.backup_service.exec_increment_backup_db(db_name)
        elif backup_type == BackupTypeEnum.DIFF_BACKUP.value:
            bak_log_ret, bak_img_timestamp = self.backup_service.exec_diff_backup_db(db_name)
        else:
            LOGGER.error(f'Wrong backup type: {backup_type}, {tmp_job_log}.')
            raise ErrCodeException(Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER)
        LOGGER.info(f"Record backup result: {bak_log_ret}, backup type: {backup_type}, backup timestamp: "
                    f"{bak_img_timestamp}, {tmp_job_log}.")
        if not bak_log_ret or not bak_img_timestamp:
            LOGGER.error(f'Execute subtask failed, {tmp_job_log}.')
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, 100, "")
            return Db2CommonUtil.build_action_result(body_err=Db2ErrCode.DATABASE_CRANKBACK_EXCEPTION,
                                                     code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="Backup error.")
        if backup_type in (BackupTypeEnum.FULL_BACKUP.value, BackupTypeEnum.INCRE_BACKUP.value,
                           BackupTypeEnum.DIFF_BACKUP.value):
            self.save_data_backup_info(db_name, bak_img_timestamp)
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, NumberConst.SEVENTY, "")
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def save_data_backup_info(self, db_name, bak_img_timestamp):
        os_user = Db2ParamUtil.get_os_user(self.task_name, self.pid)
        # 获取数据备份结束时间
        bak_end_time, _, _ = Db2CommonUtil.get_db_data_backup_end_time_and_cur_log(
            os_user, db_name, bak_img_timestamp)
        ptn_nums = Db2ParamUtil.get_partition_nums_for_backup(os_user, db_name)
        copy_log_ext_info = DpfUtil.get_copy_cur_log_info(os_user, db_name, bak_img_timestamp, ptn_nums,
                                                          deploy_type=Db2Const.DPF_CLUSTER_TYPE)
        self.backup_service.save_backup_time_info(bak_img_timestamp, copy_log_ext_info=copy_log_ext_info,
                                                  end_time=bak_end_time)

    def exec_mount_path_subtask(self, backup_type):
        catalog_ip = Db2ParamUtil.get_catalog_ip_of_dpf_db(self.param_dict)
        if not catalog_ip:
            LOGGER.error("The catalog IP param is empty.")
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, NumberConst.FIFTEEN, "")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="The catalog IP param is empty")
        # 日志备份时在编目节点执行手动归档日志命令
        if backup_type == BackupTypeEnum.LOG_BACKUP.value and DpfUtil.is_catalog_node(catalog_ip):
            os_user = self.param_util.get_database_user_name(Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
            db_name = self.param_util.get_backup_database_name()
            self.backup_service.handle_archive_log_for_log_backup(os_user, db_name)
        LOGGER.info(f'Execute subtask: mount_path_subtask success, pid: {self.pid}, job id: {self.job_id}, '
                    f'sub job id: {self.sub_job_id}".')
        self.sub_job_detail.progress = NumberConst.TEN
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        Db2CommonUtil.proactively_report_progress(self.pid, self.sub_job_detail)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def exec_backup_log_subtask(self, db_name, is_catalog_node):
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, NumberConst.SEVENTY_FIVE, "")
        bak_log_ret = self.backup_service.exec_log_backup_db(db_name, is_catalog_node)
        if not bak_log_ret:
            LOGGER.error(f'Execute backup log subtask subtask failed, pid: {self.pid} task id: {self.job_id} '
                         f'sub_job_id: {self.sub_job_id}.')
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, NumberConst.HUNDRED, "")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="Backup log error.",
                                                     body_err=Db2ErrCode.DATABASE_CRANKBACK_EXCEPTION)
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, NumberConst.HUNDRED, "")
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator(write_progress=True)
    def exec_backup_post_job(self):
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, 100, "")
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def query_backup_copy(self):
        pass

    def report_backup_copy(self):
        LOGGER.info(f'Start query copy info. job_id: {self.job_id}, sub_job_id: {self.sub_job_id}')
        bak_info_dict = self.backup_service.query_backup_time_info()
        bak_img_timestamp = bak_info_dict.get(Db2JsonConstant.BAK_IMG_TIMESTAMP, "")
        if not bak_img_timestamp:
            err_msg = "The backup image timestamp is empty when executing queryCopy subtask."
            LOGGER.error(err_msg)
            raise ErrCodeException(Db2ErrCode.USER_BACKUP_ERROR, message=err_msg)
        copy_info = Copy(id=self.job_id)
        backup_type = self.param_util.get_backup_type()
        user_name = Db2ParamUtil.get_os_user(self.task_name, self.pid)
        db_name = self.param_util.get_backup_database_name()
        if backup_type != BackupTypeEnum.LOG_BACKUP.value:
            copy_info.repositories = self.backup_service.query_copy_info_handle()
        copy_info.extend_info = bak_info_dict
        copy_info.extend_info[Db2JsonConstant.COPY_ID] = self.job_id
        copy_timestamp = Db2CommonUtil.convert_backup_time_to_timestamp(bak_img_timestamp)
        cache_path = self.param_util.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        if backup_type == BackupTypeEnum.LOG_BACKUP.value:
            self.build_log_copy_extend(cache_path, copy_info, copy_timestamp, db_name)
            copy_path = self.param_util.get_repositories_path(RepositoryDataTypeEnum.LOG_REPOSITORY.value)
        else:
            backup_dir_prefix = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(backup_type)
            copy_path = self.backup_service.pre_backup_path(user_name, db_name, backup_dir_prefix)

            data_bak_end_time = bak_info_dict.get(Db2JsonConstant.DB2_DATA_BAK_END_TIME)
            data_bak_end_timestamp = Db2CommonUtil.convert_backup_time_to_timestamp(data_bak_end_time)\
                if data_bak_end_time else copy_timestamp
            copy_info.timestamp = data_bak_end_timestamp
            copy_info.extend_info[Db2JsonConstant.COPY_BAK_TIME] = data_bak_end_timestamp
            # 是否做副本校验
            copy_info.extend_info[Db2JsonConstant.COPY_VERIFY_FILE] = "true"
            LOGGER.info(f"The data backup time: {data_bak_end_timestamp}, backup image time: {bak_img_timestamp}, "
                        f"data backup end time: {data_bak_end_time}, db: {db_name}, backup type: {backup_type}.")
        copy_info.extend_info[Db2JsonConstant.PTN_NUM] = Db2CommonUtil.get_database_partitionnums(user_name, db_name)
        try:
            self.sub_job_detail.data_size = self.get_copy_size(copy_path)
        except Exception as ex:
            LOGGER.exception(f"Can not get copy size, error:{ex}")

        self.report_copy(cache_path, copy_info)
        LOGGER.info(f'Query copy info complete. job_id: {self.job_id}, sub_job_id: {self.sub_job_id}')
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        Db2CommonUtil.proactively_report_progress(self.pid, self.sub_job_detail)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def get_copy_size(self, copy_path):

        ret, size = scan_dir_size(self.job_id, copy_path)
        if ret:
            LOGGER.info(f"DB2 single backup copy size : {size}")
            return size
        LOGGER.error(f"Get DB2 single backup copy size file job_id:{self.job_id}")
        return NumberConst.ZERO

    def build_log_copy_extend(self, cache_path, copy_info, copy_timestamp, db_name):
        depend_copy_info_dict = self.backup_service.read_depend_copy_info_for_log_bak(cache_path)
        pre_copy_bak_time = depend_copy_info_dict.get(Db2JsonConstant.COPY_END_TIME)
        if not pre_copy_bak_time:
            err_msg = "The backup end time of previous copy is empty when executing queryCopy subtask."
            LOGGER.error(err_msg)
            raise ErrCodeException(Db2ErrCode.USER_BACKUP_ERROR, message=err_msg)
        assoc_copies = depend_copy_info_dict.get(Db2JsonConstant.ASSOCIATED_COPIES, [])
        copy_info.extend_info[Db2JsonConstant.ASSOCIATED_COPIES] = assoc_copies
        copy_info.extend_info[Db2JsonConstant.COPY_BEGIN_TIME] = pre_copy_bak_time
        copy_info.extend_info[Db2JsonConstant.COPY_END_TIME] = copy_timestamp
        copy_info.extend_info[Db2JsonConstant.COPY_BAK_TIME] = copy_timestamp
        copy_info.extend_info[Db2JsonConstant.COPY_VERIFY_FILE] = "false"
        copy_info.timestamp = copy_timestamp
        LOGGER.info(f"The log backup begin time: {pre_copy_bak_time}, end time: {copy_timestamp}, associated copies: "
                    f"{assoc_copies}, database: {db_name}.")

    def report_copy(self, cache_path, copy_info):
        input_path = os.path.realpath(os.path.join(cache_path, f"copy_info_in_{self.job_id}"))
        output_path = os.path.realpath(os.path.join(cache_path, f"copy_info_out_{self.job_id}"))
        params = dict()
        params["copy"] = copy_info.dict(by_alias=True)
        params["jobId"] = self.job_id
        try:
            output_execution_result_ex(input_path, params)
            if not Db2CommonUtil.exec_rpc_tool_cmd(RpcToolApiName.REPORT_COPY_ADDITIONAL_INFO, input_path, output_path):
                raise Exception("Report backup copy information failed")
        finally:
            for tmp_path in (input_path, output_path):
                if not check_path_in_white_list(tmp_path):
                    LOGGER.error("The db2 temp copy info file not check white")
                    continue
                if os.path.exists(tmp_path):
                    LOGGER.info(f"Remove temp copy info file: {tmp_path}.")
                    Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)

    @job_exception_decorator()
    def abort_job(self):
        self.backup_service.abort_database_backup_job(self.param_util.
                                                      get_database_user_name(Db2JsonConstant.
                                                                             JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY),
                                                      self.param_util.get_backup_database_name())
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def report_progress(self):
        key = self.sub_job_id if self.sub_job_id else self.job_id
        return Db2BackupUtil.report_progress_utl(
            self.pid, key, self.param_util.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value))

    def write_progress_to_file(self, task_status, progress, log_detail):
        """
        将进度写入进度文件供框架读取
        :return:
        """
        output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=int(progress),
                               logDetail=list(), taskStatus=task_status)
        if log_detail:
            output.log_detail.append(log_detail)
        Db2CommonUtil.proactively_report_progress(self.pid, output)
        LOGGER.info(f"Report progress: {progress}, taskStatus: {task_status}, pid: {self.pid}")

    def check_backup_type(self):
        pass

    def output_action_result(self, action_ret: ActionResult):
        super(DpfDbBackup, self).output_action_result(action_ret)

    def finalize_clear(self):
        pass
