#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import time

from common.common import output_result_file, clean_dir
from common.common_models import JobPermissionInfo, ActionResult, LogDetail, Copy
from common.const import ExecuteResultEnum, SubJobStatusEnum, DBLogLevel, BackupTypeEnum, RepositoryDataTypeEnum
from common.exception.common_exception import ErrCodeException
from common.job_const import ParamKeyConst
from common.number_const import NumberConst
from common.util.check_utils import check_repo_path
from common.util.common_utils import proactively_report_progress, get_group_name_by_os_user
from common.util.scanner_utils import scan_dir_size
from gaussdbt.single import LOGGER
from gaussdbt.single.backup.gst_backup_base import GstBackupBase
from gaussdbt.single.backup.gst_single_backup_service import GstSingleBackupService
from gaussdbt.single.comm.gst_single_const import GstSingleConst, GstSingleKeyConst
from gaussdbt.single.comm.gst_single_error_code import ErrorCode
from gaussdbt.single.comm.utils.gst_job_wraps import job_exception_decorator
from gaussdbt.single.comm.utils.gst_single_backup_utils import GstSingleBackupUtil
from gaussdbt.single.comm.utils.gst_single_comm_utils import GstSingleCommonUtil
from gaussdbt.single.comm.utils.gst_single_param_utils import GstSingleParamUtil
from gaussdbt.single.comm.utils.monitor_wraps import monitor_report_progress_decorator


class GstSingleBackup(GstBackupBase):
    """GaussDB T单机版备份任务执行类
    """

    def __init__(self, task_name, pid, job_id, sub_job_id, param_dict):
        super(GstSingleBackup, self).__init__(task_name, pid, job_id, sub_job_id, param_dict)
        self._os_user = GstSingleParamUtil.get_os_user(self.task_name, self.pid)
        self._backup_type = GstSingleParamUtil.get_backup_type(self.param_dict)
        self.log_detail = LogDetail(logLevel=DBLogLevel.INFO.value)
        self.bak_service = GstSingleBackupService(pid, job_id, sub_job_id, param_dict, self._os_user)

    @job_exception_decorator()
    def allow_backup_in_local_node(self):
        # 检查用户是否存在
        if not self._os_user:
            LOGGER.error(f"Failed to get os username: %s, job id: %s.", self._os_user, self.job_id)
            return GstSingleCommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=ErrorCode.OS_USER_NOT_EXISTS,
                message="The OS user does not exists")
        return GstSingleCommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def query_job_permission(self):
        if not self._os_user:
            LOGGER.error(f"Failed to get os username: %s, pid: %s.", self._os_user, self.job_id)
            GstSingleCommonUtil.output_action_result(
                self.pid, ExecuteResultEnum.INTERNAL_ERROR.value, body_err=ErrorCode.OS_USER_NOT_EXISTS)
            return
        group_name = get_group_name_by_os_user(self._os_user)
        output = JobPermissionInfo(user=self._os_user, group=group_name, fileMode="0750")
        output_result_file(self.pid, output.dict(by_alias=True))

    def check_backup_job_type(self):
        LOGGER.info("Checking backup job type: %s.", self._backup_type)
        return GstSingleCommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator(write_progress=True)
    def exec_backup_pre_job(self):
        self.sub_job_detail.progress = NumberConst.ONE
        self.sub_job_detail.task_status = SubJobStatusEnum.RUNNING.value
        proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
        check_funcs = [
            self.bak_service.check_instance_status_for_backup,
            self.bak_service.check_log_mode_for_backup,
        ]
        for chk_func in check_funcs:
            chk_func()
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
        return GstSingleCommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def gen_sub_job(self):
        LOGGER.info("Start generating backup sub jobs ...")
        tmp_priority = 1
        nodes = self.param_dict.get(ParamKeyConst.JOB, {}).get(ParamKeyConst.PROTECT_ENV, {})\
            .get(ParamKeyConst.NODES, [])
        # 只支持单机
        if len(nodes) != 1:
            LOGGER.error("The nodes of protectEnv is invalid when executing generate sub job, length: %s.", len(nodes))
            raise ErrCodeException(ErrorCode.PARAMS_IS_INVALID, message="The nodes of protectEnv is invalid.")
        tmp_node_id = nodes[0].get(ParamKeyConst.ID, "")
        backup_sub_job = GstSingleCommonUtil.build_sub_job(self.job_id, "backup", tmp_priority, tmp_node_id)
        sub_jobs = [backup_sub_job]
        LOGGER.info("Generate backup sub jobs success, sub jobs: %s.", sub_jobs)
        output_result_file(self.pid, sub_jobs)

    @job_exception_decorator(write_progress=True)
    def exec_backup(self):
        s_job_name = GstSingleParamUtil.get_sub_job_name(self.param_dict)
        LOGGER.info("Start executing backup subtask: %s, pid: %s, job id: %s, sub job id: %s.",
                    s_job_name, self.pid, self.job_id, self.sub_job_id)
        if self._backup_type not in (BackupTypeEnum.FULL_BACKUP.value, BackupTypeEnum.LOG_BACKUP.value):
            LOGGER.error(f"Unsupported backup type: %s.", self._backup_type)
            self.sub_job_detail.task_status = SubJobStatusEnum.FAILED.value
            proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
            return GstSingleCommonUtil.build_action_result(
                code=ExecuteResultEnum.INTERNAL_ERROR.value, body_err=ErrorCode.PARAMS_IS_INVALID,
                message="Unsupported backup type")
        cache_path = GstSingleParamUtil.get_repository_paths_for_backup(
            self.param_dict, RepositoryDataTypeEnum.CACHE_REPOSITORY.value)[0]
        self.sub_job_detail.progress = NumberConst.TEN
        proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
        backup_dir_prefix = GstSingleConst.BACKUP_TYPE_DIR_PREFIX_MAP.get(self._backup_type)
        if backup_dir_prefix:
            bak_dir_name = f'{backup_dir_prefix}{int(time.time())}'
            data_path = GstSingleParamUtil.get_repository_paths_for_backup(
                self.param_dict, RepositoryDataTypeEnum.DATA_REPOSITORY.value)[0]
            # 备份集绝对路径长度最大值：167字节
            backup_dir = os.path.realpath(os.path.join(data_path, bak_dir_name))
        else:
            bak_dir_name = ""
            log_path = GstSingleParamUtil.get_repository_paths_for_backup(
                self.param_dict, RepositoryDataTypeEnum.LOG_REPOSITORY.value)[0]
            backup_dir = os.path.realpath(os.path.join(log_path, bak_dir_name))
        self.bak_service.create_backup_dir(self._os_user, backup_dir)
        monitor_decorator = monitor_report_progress_decorator(self.pid, self.sub_job_detail, LOGGER)
        bak_time_info = dict()
        if self._backup_type == BackupTypeEnum.FULL_BACKUP.value:
            bak_time_info = monitor_decorator(self.bak_service.handle_full_backup)(backup_dir)
        elif self._backup_type == BackupTypeEnum.LOG_BACKUP.value:
            bak_time_info = monitor_decorator(self.bak_service.handle_log_backup)(backup_dir)

        # 上报副本信息
        copy_info = self.build_copy_info(bak_time_info, bak_dir_name)
        GstSingleBackupUtil.report_copy_info_by_rpc_tool(self.job_id, copy_info, cache_path)
        try:
            self.sub_job_detail.data_size = self.get_copy_size(backup_dir)
        except Exception:
            LOGGER.exception("Get copy size failed.")
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
        return GstSingleCommonUtil.build_action_result(ExecuteResultEnum.SUCCESS.value)

    def get_copy_size(self, copy_path):
        ret, size = scan_dir_size(self.job_id, copy_path)
        if not ret:
            LOGGER.error("Get gaussdbt single backup copy size failed, job id: %s.", self.job_id)
            return NumberConst.ZERO
        LOGGER.info("Get gaussdbt single backup copy size success, size: %s.", size)
        return size

    def build_copy_info(self, bak_time_info, bak_dir_name):
        copy_info = Copy(id=self.job_id, extendInfo=dict())
        if self._backup_type == BackupTypeEnum.LOG_BACKUP.value:
            copy_info.timestamp = bak_time_info.get(ParamKeyConst.COPY_END_TIME)
        else:
            copy_info.timestamp = bak_time_info.get(ParamKeyConst.COPY_BAK_TIME)
            copy_info.repositories = self.bak_service.build_copy_repositories(
                self.param_dict, self._backup_type, bak_dir_name)
        copy_info.extend_info.update(bak_time_info)
        copy_info.extend_info[GstSingleKeyConst.BAK_DIR_NAME] = bak_dir_name
        # 不支持副本校验
        copy_info.extend_info[ParamKeyConst.COPY_VERIFY_FILE] = "false"
        gsdb_data = GstSingleCommonUtil.get_val_of_gsdb_env_var(self._os_user, "GSDB_DATA")
        copy_info.extend_info[GstSingleKeyConst.GSDB_DATA_DIR] = gsdb_data
        return copy_info

    @job_exception_decorator(write_progress=True)
    def exec_backup_post_job(self):
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        cache_path = GstSingleParamUtil.get_repository_paths_for_backup(
            self.param_dict, RepositoryDataTypeEnum.CACHE_REPOSITORY.value)[0]
        if not check_repo_path(cache_path):
            LOGGER.error("The cache directory(%s) is invalid.", cache_path)
            raise ErrCodeException(ErrorCode.PARAMS_IS_INVALID, message="The cache directory is invalid.")
        if os.path.isdir(cache_path):
            clean_dir(cache_path)
            LOGGER.info("Clean cache path(%s) success.", cache_path)
        proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
        return GstSingleCommonUtil.build_action_result(ExecuteResultEnum.SUCCESS.value)

    @job_exception_decorator(write_progress=True)
    def abort_job(self):
        self.sub_job_detail.progress = NumberConst.FIVE
        proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
        self.bak_service.handle_abort_backup_job()
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        proactively_report_progress(self.pid, self.sub_job_detail, LOGGER)
        return GstSingleCommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def output_action_result(self, action_ret: ActionResult):
        super(GstSingleBackup, self).output_action_result(action_ret)
