#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os

from common.logger import Logger
from common.common_models import SubJobDetails
from common.number_const import NumberConst
from common.const import SubJobStatusEnum, ExecuteResultEnum
from common.common import output_result_file
from common.parse_parafile import get_env_variable
from db2.backup.util.dpf_backup_util import DpfBackupUtil
from db2.backup.util.ha_utils import is_power_rhel_ha
from db2.comm.const import Db2JsonConstant, Db2Const, Db2CommonConst
from db2.comm.db2_cmd import check_cluster_node
from db2.comm.error_code import Db2ErrCode
from db2.comm.util.common_util import Db2CommonUtil
from db2.comm.util.param_util import Db2ParamUtil
from db2.backup.util.db2_backup_util import Db2BackupUtil

LOGGER = Logger().get_logger(filename="db2.log")


class CheckCopy:
    def __init__(self, task_name, pid, job_id, param_dict, sub_job_id=None):
        self._task_name = task_name
        self._pid = pid
        self._job_id = job_id
        self._sub_job_id = sub_job_id
        self._param_dict = param_dict
        self.sub_job_detail = SubJobDetails(
            taskId=job_id, subTaskId=sub_job_id, progress=NumberConst.ONE,
            logDetail=list(), taskStatus=SubJobStatusEnum.RUNNING.value)
        self.resource_type = DpfBackupUtil.get_cluster_type(param_dict)

    def check_copies(self):
        LOGGER.info(f"Start check copies.")
        user_name = get_env_variable(f"{Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY}_{self._pid}")
        cache_path = Db2ParamUtil.get_cache_mount_path_for_restore(self._param_dict)
        progress_file_name = self._sub_job_id if self._sub_job_id else self._job_id
        # HA 备节点不执行直接返回成功，当前框架不支持自动切换节点执行
        if is_power_rhel_ha(self.resource_type) and not self.check_has_primary_node(user_name):
            self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
            self.sub_job_detail.progress = NumberConst.HUNDRED
            Db2CommonUtil.write_progress_to_file(self.sub_job_detail, progress_file_name, cache_path)
            LOGGER.info(f"Check copies complete.")
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

        self.sub_job_detail.task_status = SubJobStatusEnum.RUNNING.value
        self.sub_job_detail.progress = NumberConst.FIVE
        Db2CommonUtil.write_progress_to_file(self.sub_job_detail, progress_file_name, cache_path)
        copy_path = Db2ParamUtil.get_copy_path_info(self._param_dict)
        for item in copy_path:
            LOGGER.info(f'Start verify copy path: {os.path.dirname(item.get("data_path", ""))}')
            cache_first_path = item.get("cache_path", "")
            check_filename = f"check_copy_{self._sub_job_id}"
            file_name = os.path.join(cache_first_path, check_filename)
            LOGGER.info(f"Check_file is :{file_name}")
            data_path = os.path.dirname(item.get("data_path", ""))
            LOGGER.info(f'san client: {self.get_san_client_type()}')
            if self.get_san_client_type() == 'true':
                backup_dir_prefix = Db2ParamUtil.get_backup_copy_type_prefix(self._param_dict)
                copy_id = self._param_dict.get('job', {}).get('copies', [])[0].get('id', '')
                data_path = f'{data_path}/{backup_dir_prefix}_{copy_id}'
            if Db2CommonUtil.check_all_copy_verify(user_name, data_path,
                                                   item.get("restore_time", ""), self.resource_type,
                                                   file_name) != "true":
                LOGGER.error(f'Verify copy false,path: {data_path}')
                self.sub_job_detail.task_status = SubJobStatusEnum.FAILED.value
                self.sub_job_detail.progress = NumberConst.HUNDRED
                self.sub_job_detail.extend_info = {Db2Const.IS_DAMAGED: "true"}
                Db2CommonUtil.write_progress_to_file(self.sub_job_detail, progress_file_name, cache_path)
                return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value)
        self.sub_job_detail.task_status = SubJobStatusEnum.COMPLETED.value
        self.sub_job_detail.progress = NumberConst.HUNDRED
        self.sub_job_detail.extend_info = {Db2Const.IS_DAMAGED: "false"}
        Db2CommonUtil.write_progress_to_file(self.sub_job_detail, progress_file_name, cache_path)
        LOGGER.info(f"Check copies complete.")
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def output_action_result(self, action_ret):
        output_result_file(self._pid, action_ret.dict(by_alias=True))

    def check_copies_progress(self):
        progress_file_name = self._sub_job_id if self._sub_job_id else self._job_id
        LOGGER.info(f"Start check copies progress, file name: {progress_file_name}.")
        cache_path = Db2ParamUtil.get_cache_mount_path_for_restore(self._param_dict)
        Db2BackupUtil.report_progress_utl(self._pid, progress_file_name, cache_path)
        LOGGER.info(f"Check copies progress complete, file name: {progress_file_name}.")
        return True

    def check_has_primary_node(self, user_name):
        ret, params = check_cluster_node(user_name)
        if not ret:
            LOGGER.debug(f"This node is not the primary node.sub_job_id:{ self._sub_job_id},job_id: {self._job_id}")
            return False
        return True

    def allow_check_copy_sub_job_in_local_node(self):
        LOGGER.info(f'sub job id: {self._sub_job_id}, start to allow check copy sub job in local_node')
        user_name = get_env_variable(f"{Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY}_{self._pid}")
        if is_power_rhel_ha(self.resource_type) and not self.check_has_primary_node(user_name):
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="Process is not start!",
                                                     body_err=Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER)
        LOGGER.info(f'sub job id: {self._sub_job_id}, end to allow check copy sub job in local_node')
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def allow_check_copy_in_local_node(self):
        LOGGER.info(f'job id: {self._job_id}, start to allow check copy sub job in local_node')
        if not is_power_rhel_ha(self.resource_type):
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)
        if self.get_san_client_type() != 'true':
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)
        # sanclient场景下 check copy主任务和子任务都执行在主节点上，因为无法同时挂载到主和备上
        user_name = get_env_variable(f"{Db2JsonConstant.JOB_PROTECTENV_NODES_0_AUTH_AUTHKEY}_{self._pid}")
        if is_power_rhel_ha(self.resource_type) and not self.check_has_primary_node(user_name):
            return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.INTERNAL_ERROR.value,
                                                     message="Process is not start!",
                                                     body_err=Db2ErrCode.ERROR_COMMON_INVALID_PARAMETER)
        return Db2CommonUtil.build_action_result(code=ExecuteResultEnum.SUCCESS.value)

    def get_san_client_type(self):
        """
        获取是否为sanclient备份
        """
        san_client_type = self._param_dict.get(Db2JsonConstant.JOB, {})\
            .get(Db2JsonConstant.EXTEND_INFO, {}).get(Db2JsonConstant.SANCLIENT_TYPE, "false")
        return san_client_type

