#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import threading
import time
import json

from common.common import output_result_file, execute_cmd, exter_attack, \
    output_execution_result_ex, execute_cmd_list
from common.common_models import CopyInfoRepModel, SubJobDetails, LogDetail, ActionResult, JobPermissionInfo, Copy
from common.const import BackupTypeEnum, RepositoryDataTypeEnum, ExecuteResultEnum, CMDResult, \
    SubJobStatusEnum, DBLogLevel, ReportDBLabel
from common.logger import Logger
from common.number_const import NumberConst
from common.parse_parafile import get_env_variable
from common.util.check_utils import check_path_in_white_list
from common.util.scanner_utils import scan_dir_size
from db2.backup.db2_backup_base import Db2BackupBase
from db2.backup.util.db2_backup_util import Db2BackupUtil
from db2.backup.util.dpf_backup_util import DpfBackupUtil
from db2.backup.util.ha_utils import is_power_rhel_ha
from db2.comm.db2_cmd import get_lang_value
from db2.comm.backup_param_parse import ParameterBackupParse
from db2.comm.const import Db2JsonConstant, Db2JobName, Db2CommonConst, RpcToolApiName
from db2.comm.constant import ParamField
from db2.comm.db2_exception import ErrCodeException
from db2.comm.error_code import Db2ErrCode
from db2.comm.util.common_util import Db2CommonUtil
from db2.comm.util.job_decorators import job_exception_decorator
from db2.comm.util.param_util import Db2ParamUtil
from db2.comm.db2_verification import check_log_arch_config, check_database_exists, \
    check_delta_backup_config, check_database_pending_status, check_database_offline_backup_status
from postgresql.common.const import ErrorCode

LOGGER = Logger().get_logger(filename="db2.log")


class SingleTsBackup(Db2BackupBase):
    """Db2表空间集备份任务执行类
    """

    def __init__(self, task_name, pid, job_id, sub_job_id, param_dict):
        super().__init__(task_name, pid, job_id, sub_job_id, param_dict)
        self._parameter_backup = ParameterBackupParse(pid, job_id, sub_job_id, self.param_dict)
        self._database_name = self._parameter_backup.get_ts_backup_database_name()
        self._backup_type = self._parameter_backup.get_backup_type()
        self._sub_job_name = param_dict.get("subJob", {}).get("jobName", None)
        # 获取备份表空间列表
        self._tablespace_list = Db2ParamUtil.get_backup_table_spaces(self.param_dict)
        self._cache_path = self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        self._backup_path = self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.DATA_REPOSITORY.value)
        self.log_detail = LogDetail(logInfoParam=[self.sub_job_id], logLevel=DBLogLevel.INFO.value)
        self.resource_type = DpfBackupUtil.get_cluster_type(param_dict)

    def allow_backup_in_local_node(self, user_name=None):
        if not user_name:
            user_name = get_env_variable(f"{Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY}_{self.pid}")
        # 检测数据库实例是否存在
        if not Db2CommonUtil.check_process_service(user_name):
            LOGGER.error("Process is not start!")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="Process is not start!",
                                                     body_err=Db2ErrCode.DB_SERVICE_ERROR)
        # 数据库是否存在
        if not check_database_exists(user_name, self._database_name):
            LOGGER.error(f"The db2 database: {self._database_name} not exit can not backup")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value,
                body_err=Db2ErrCode.DATABASE_NOT_EXISTS,
                err_params=[self._database_name],
                message="The db2 single database does not exists")

        # 是否开启日志备份
        if not check_log_arch_config(user_name, self._database_name):
            LOGGER.error(f"The db2 database: {self._database_name} have not open log arch, can not backup")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value,
                body_err=ErrorCode.ARCHIVE_MODE_ENABLED.value,
                message="The db2 single database not log arch config")

        # 如果是增量备份，检查是否开启增量备份
        if not check_delta_backup_config(user_name=user_name, db_name=self._database_name):
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value,
                body_err=Db2ErrCode.INCREMENTAL_BACKUP_NOT_OPEN,
                message="The db2 single database Incremental database backup is not enabled.")

        # 检查数据库是否做离线备份呢
        if not check_database_offline_backup_status(user_name, self._database_name):
            LOGGER.error(f"Failed to check db2 database status: {self._database_name}, job id: {self.job_id}.")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.DATABASE_NEED_OFFLINE_BACKUP,
                err_params=[self._database_name], message="There are pending tasks in the db2 single database")

        # 数据库是否正常
        if not check_database_pending_status(user_name, self._database_name):
            LOGGER.error(f"The db2 database: {self._database_name} cfg status is pending, can not backup")
            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=Db2ErrCode.DATABASE_EXISTS_PENDING,
                err_params=[self._database_name], message="There are pending tasks in the db2 single database")

        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def query_job_permission(self, user_name=None):
        if not user_name:
            user_name = self._parameter_backup.get_database_user_name(
                Db2JsonConstant.APPENV_NODES_0_AUTH_AUTHKEY)

        group = Db2CommonUtil.get_group_name_by_os_user(user_name)
        LOGGER.info(f"Add user {user_name} group {group} permission.")
        output = JobPermissionInfo(user=user_name, group=group, fileMode="0750")
        output_result_file(self.pid, output.dict(by_alias=True))

    def check_backup_job_type(self, user_name=None):
        if self._backup_type == BackupTypeEnum.FULL_BACKUP.value:
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)
        tmp_path = self._parameter_backup.get_repositories_path(
            RepositoryDataTypeEnum.CACHE_REPOSITORY.value)
        if not Db2BackupUtil.query_pre_copy_agentid_by_rpc_tool(self._parameter_backup.get_applications(),
                                                                self.job_id, self.sub_job_id, tmp_path):
            LOGGER.info(f"When performing a incremental or differential backup, there is no full backup copy, "
                        f"and it will be converted to a full backup, backup type: {self._backup_type}.")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     body_err=Db2ErrCode.ERR_INC_TO_FULL)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator(write_progress=True, is_pre=True)
    def exec_backup_pre_job(self, user_name=None):
        # 前置检查
        LOGGER.info("exec_backup_pre_job start")
        if not user_name:
            user_name = self._parameter_backup.get_database_user_name(
                Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
        try:
            all_ts_dict = Db2CommonUtil.get_table_space_info_dict_of_db(user_name, self._database_name)
        except Exception as ex:
            LOGGER.error(f"Get Database table spaces failed exception: {ex}")
            all_ts_dict = dict()
        all_tablespace_list = all_ts_dict.keys()
        if all_tablespace_list and set(self._tablespace_list).issubset(set(all_tablespace_list)):
            LOGGER.info(f'Start executing backup pre job, job_id: {self.job_id}, sub_job_id: {self.sub_job_id}.')
            self.sub_job_detail.progress = NumberConst.HUNDRED
            self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
            result_code = ExecuteResultEnum.SUCCESS.value
        else:
            not_exited_tablespace = list(set(self._tablespace_list) - set(all_tablespace_list))
            raise ErrCodeException(Db2ErrCode.SPACE_NOT_EXISTS, ",".join(not_exited_tablespace))

        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

        return Db2CommonUtil.build_action_result(code=result_code)

    def pre_backup_path(self, user_name, data_path_list: list) -> str:
        """
        param user_name: 执行备份用户名
        param backup_type: 备份类型
        param data_path_list: HA备份路径多个[xxx1, xxx2]，其余是单个[xxx]
        return:
        """
        backup_path_list = []
        user_id = Db2CommonUtil.get_os_user_id_by_os_user(user_name)
        group_id = Db2CommonUtil.get_group_id_by_os_user(user_name)
        for path in data_path_list:
            backup_path = os.path.join(
                path,
                f'{Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(self._backup_type)}_{self.job_id}'
            )

            if not os.path.exists(backup_path):
                os.mkdir(backup_path)
                os.chown(backup_path, user_id, group_id)
            backup_path_list.append(backup_path)
            LOGGER.info(f'DB2 single database copy path:{backup_path}, pid: {self.pid}, job_id: {self.job_id}, '
                        f'sub_job_id: {self.sub_job_id}')
        return ",".join(backup_path_list)

    @job_exception_decorator(write_progress=True)
    def exec_backup(self, user_name=None):
        if not user_name:
            user_name = self._parameter_backup.get_database_user_name(
                Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY
            )
        encoding = Db2CommonUtil.get_lang_value(user_name)

        # HA性能适配备份路径to /xxs, 修改为 to "/xx3, /xx2, /xxx1"
        if is_power_rhel_ha(self.resource_type) and self._parameter_backup.get_sanclient_type() != "true":
            repositories_path_list = self._parameter_backup.get_repositories_all_date_path()
        else:
            repositories_path_list = [
                self._parameter_backup.get_repositories_path(RepositoryDataTypeEnum.DATA_REPOSITORY.value)
            ]
        if not repositories_path_list:
            raise ErrCodeException(Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER)

        backup_path = self.pre_backup_path(user_name=user_name, data_path_list=repositories_path_list)
        backup_cmd = self.assembly_backup_cmd(backup_path, user_name)
        LOGGER.info(f"Backup_cmd is {backup_cmd}")

        # 备份开始的初始进度为5%
        self.sub_job_detail.progress = NumberConst.FIVE
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

        progress_thread = threading.Thread(name="progress", target=self.upload_report_progress)
        # 任务意外结束时子线程也结束任务
        progress_thread.daemon = True
        progress_thread.start()
        bak_time = Db2CommonUtil.exec_protect_cmd(backup_cmd, self._database_name, encoding)
        LOGGER.info(f"backup time: {bak_time}")

        if not bak_time:
            self.sub_job_detail.task_status = SubJobStatusEnum.FAILED.value
            self.log_detail.logLevel = DBLogLevel.ERROR
            self.log_detail.log_info = ReportDBLabel.SUB_JOB_FALIED
            self.sub_job_detail.log_detail = [self.log_detail]
            Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

            return Db2CommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value,
                message="Unsupported backup type"
            )

        self.report_copy(bak_time)
        # HA 存在多路径，但是路径里的文件内容一样的。
        ret, size = scan_dir_size(self.job_id, backup_path.split(",")[0])
        if ret:
            LOGGER.info(f"DB2 single backup copy size : {size}")
            self.sub_job_detail.data_size = size

        # 备份执行完成
        LOGGER.info(f"End db2 single ts backup. sub_job_id: {self.sub_job_id}")
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        self.log_detail.log_info = ReportDBLabel.SUB_JOB_SUCCESS
        self.sub_job_detail.log_detail = [self.log_detail]
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def report_copy(self, bak_time):
        # 上报副本
        user_name = get_env_variable(f"{Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY}_{self.pid}")
        copy_info = Copy(id=self.job_id)
        bak_timestamp = Db2CommonUtil.convert_backup_time_to_timestamp(bak_time)
        backup_dir_prefix = Db2CommonConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(self._backup_type)
        tablespace_info = Db2CommonUtil.get_tablespace_info(user_name, self._database_name, self._tablespace_list)
        LOGGER.info(f"report_copy tablespace_info:{tablespace_info}")
        table_info = Db2CommonUtil.get_table_info(user_name, self._database_name, self._tablespace_list)
        copy_path = f'{backup_dir_prefix}_{self.job_id}'
        if self._parameter_backup.get_sanclient_type() == "true":
            copy_path = self.job_id
        rep_copy_info = self._parameter_backup.get_copy_rep_info(RepositoryDataTypeEnum.DATA_REPOSITORY.value)
        copy_info.repositories = [
            CopyInfoRepModel(
                id=rep_copy_info.get('id'),
                repositoryType=rep_copy_info.get("repositoryType"),
                isLocal=rep_copy_info.get("isLocal"), protocol=0,
                remotePath=f"{rep_copy_info.get('remotePath')}/{copy_path}",
                remoteHost=rep_copy_info.get("remoteHost"),
                extendInfo=rep_copy_info.get("extendInfo")
            )
        ]
        copy_info.extend_info = {
            "backupTime": bak_timestamp,
            "beginTime": bak_timestamp,
            "endTime": bak_timestamp,
            "restoreTime": f"{bak_time}",
            "uuid": Db2CommonUtil.get_agent_id(),
            Db2JsonConstant.COPY_VERIFY_FILE: "true",
            ParamField.TABLESPACE_INFO: json.dumps(tablespace_info),
            ParamField.TABLE_INFO: json.dumps(table_info),
            Db2JsonConstant.COPY_ID: self.job_id
        }

        input_path = os.path.realpath(os.path.join(self._cache_path, f"copy_info_in_{self.job_id}"))
        output_path = os.path.realpath(os.path.join(self._cache_path, f"copy_info_out_{self.job_id}"))
        params = dict()
        params["copy"] = copy_info.dict(by_alias=True)
        params["jobId"] = self.job_id
        try:
            output_execution_result_ex(input_path, params)
            if not Db2CommonUtil.exec_rpc_tool_cmd(RpcToolApiName.REPORT_COPY_ADDITIONAL_INFO, input_path, output_path):
                raise Exception("Report backup copy information failed")
        finally:
            for tmp_path in (input_path, output_path):
                if not check_path_in_white_list(tmp_path):
                    LOGGER.error("The db2 temp copy info file not check white")
                    continue
                if os.path.exists(tmp_path):
                    LOGGER.info(f"Remove temp copy info file: {tmp_path}.")
                    Db2BackupUtil.remove_temp_file_ignore_exception(tmp_path)

    def query_backup_copy(self):
        pass

    def exec_backup_post_job(self):
        # 执行后置任务
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def assembly_backup_cmd(self, backup_path, user_name):
        # 通过备份类型组装备份命令
        backup_cmd_map = {
            BackupTypeEnum.DIFF_BACKUP: f"incremental to {backup_path} include logs",
            BackupTypeEnum.INCRE_BACKUP: f"incremental delta to {backup_path} include logs",
            BackupTypeEnum.FULL_BACKUP: f"to {backup_path} include logs"
        }
        if self._backup_type not in backup_cmd_map.keys():
            LOGGER.error("Can not apply backup type")
            return ""
        backup_single_path = backup_cmd_map.get(self._backup_type)
        Db2CommonUtil.check_os_user_with_ex(user_name)
        tablespace_str = ','.join(self._tablespace_list)
        Db2CommonUtil.check_injection(self._database_name, tablespace_str, backup_path)
        return f"su - {user_name} -c " \
               f"'db2 backup db {self._database_name} on all nodes tablespace \({tablespace_str} \)" \
               f" online {backup_single_path}\'"

    def abort_job(self, user_name=None):
        if not user_name:
            user_name = self._parameter_backup.get_database_user_name(
                Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
        find_backup_cmd = \
            [f"su - {user_name} -c 'db2 list application show detail'", "grep Backup", f"grep {self._database_name}"]
        Db2CommonUtil.check_os_user_with_ex(user_name)
        Db2CommonUtil.check_injection(self._database_name)
        err_dict = LogDetail(logInfo='', logInfoParam=[], logTimestamp=0, logDetail=0,
                             logDetailParam=[], logDetailInfo=[], logLevel=DBLogLevel.INFO)
        job_status = SubJobStatusEnum.FAILED.value
        ret_code, std_out, std_err = execute_cmd_list(find_backup_cmd, locale_encoding=True)
        if ret_code != CMDResult.SUCCESS.value or "errors" in std_out:
            self.write_progress_to_file(job_status, err_dict, f"post_job_progress_{self.sub_job_id}", progress=100)
            return False
        if not std_out:
            job_status = SubJobStatusEnum.COMPLETED.value
            self.write_progress_to_file(job_status, err_dict, f"post_job_progress_{self.sub_job_id}", progress=100)
            return True
        backup_info = std_out.split(" ")
        Db2CommonUtil.check_injection(backup_info[2])
        stop_cmd = f"su - {user_name} -c 'db2 \"force application {backup_info[2]}\"'"
        encoding = get_lang_value(user_name)
        ret_code, std_out, std_err = execute_cmd(stop_cmd, encoding=encoding)
        if ret_code != CMDResult.SUCCESS.value:
            self.write_progress_to_file(job_status, err_dict, f"post_job_progress_{self.sub_job_id}", progress=100)
            return False
        if "successfully" in std_out:
            job_status = SubJobStatusEnum.COMPLETED.value
            self.write_progress_to_file(job_status, err_dict, f"post_job_progress_{self.sub_job_id}", progress=100)
            return True
        self.write_progress_to_file(job_status, err_dict, f"post_job_progress_{self.sub_job_id}", progress=100)
        return False

    def write_progress_to_file(self, task_status, log_detail, progress_type, progress):
        """
        将进度写入进度文件供框架读取
        :return:
        """
        output = SubJobDetails(
            taskId=self.job_id, subTaskId=self.sub_job_id,
            progress=int(progress), logDetail=list(), taskStatus=task_status
        )
        if log_detail:
            output.log_detail.append(log_detail)
        LOGGER.debug(f"DB2 single ts progress: {progress}, taskStatus: {task_status}, pid: {self.pid}")
        Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=output)

    @exter_attack
    def report_progress(self, user_name=None):
        progress_file_name = self.sub_job_id if self.sub_job_id else self.job_id
        # 查询备份任务进度
        if self.task_name == Db2JobName.BACKUP_PROGRESS:
            # 写备份备份进度到临时文件
            if not user_name:
                user_name = self._parameter_backup.get_database_user_name(
                    Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
            cache_path = Db2ParamUtil.get_repository_paths_for_backup(
                self.param_dict, RepositoryDataTypeEnum.CACHE_REPOSITORY.value)[0]
            # 获取备份进度
            progress = Db2CommonUtil.get_now_backup_process(user_name, self._database_name)
            return Db2BackupUtil.report_progress_utl(self.pid, progress_file_name, cache_path, progress)

        # 将临时文件进度返回备份进度给框架
        return Db2BackupUtil.report_progress_utl(
            self.pid, progress_file_name, self._parameter_backup.get_repositories_path(
                RepositoryDataTypeEnum.CACHE_REPOSITORY.value
            )
        )

    def output_action_result(self, action_ret: ActionResult):
        super(SingleTsBackup, self).output_action_result(action_ret)

    def upload_report_progress(self):
        # 这里只处理运行中的任务
        while True:
            if self.sub_job_detail.task_status == SubJobStatusEnum.RUNNING.value:
                user_name = self._parameter_backup.get_database_user_name(
                    Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY)
                progress = Db2CommonUtil.get_now_backup_process(user_name, self._database_name)
                # 防止前面任务未写进度的情况。
                if self.sub_job_detail.progress and self.sub_job_detail.progress < progress:
                    self.sub_job_detail.progress = progress
                Db2CommonUtil.proactively_report_progress(pid=self.pid, job_detail=self.sub_job_detail)

            else:
                LOGGER.debug(f"Stop report progress thread. current job status is : {self.sub_job_detail.dict()}")
                break
            LOGGER.debug(f"report single db2 progress detail: {self.sub_job_detail.dict()}")
            time.sleep(NumberConst.TEN)
