#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import time
import os
import json
import pathlib
from common.const import ExecuteResultEnum, SubJobStatusEnum
from common.common import output_result_file
from goldendb.handle.common.const import LogLevel
from tpops.gaussdb.common.const import EnvName, GaussDBCode, JobInfo, ErrorCode
from tpops.gaussdb.common.gaussdb_common import report_job_details
from tpops.gaussdb.common.parse_param import tpops_parse_param_with_jsonschema
from tpops.gaussdb.common.safe_get_information import ResourceParam
from tpops.gaussdb.handle.backup.exec_backup import BackUp
from tpops.gaussdb.common.log import log
from dws.commons.dws_param_parse import DwsParamParse
from goldendb.schemas.glodendb_schemas import ActionResponse
from common.common_models import Copy, CopyInfoRepModel, SubJobDetails, LogDetail
from tpops.gaussdb.handle.resource.resource_info import ResourceInfo


class GaussDBBackupService(object):
    """
    备份任务相关接口
    """

    @staticmethod
    def allow_backup_in_local_node(req_id, job_id, sub_job_id):
        """
        功能描述：是否允许本地运行, 业务目前不需要实现, 主任务执行
        参数：
        @req_id： 请求ID
        @job_id： 主任务任务ID
        @sub_job_id： 子任务ID
        返回值：
        """
        log.info(
            f'step 1: execute allow_backup_in_local_node, req_id: {req_id}, job_id: {job_id}, sub_job_id: {sub_job_id}')
        param_inst = ResourceParam(req_id)
        _ = param_inst.get_param()
        EnvName.IAM_USERNAME = "job_protectEnv_auth_authKey"
        EnvName.IAM_PASSWORD = "job_protectEnv_auth_authPwd"
        response = ActionResponse(code=ExecuteResultEnum.SUCCESS)
        output_result_file(req_id, response.dict(by_alias=True))
        log.info(f"step 1: execute AllowBackupInLocalNode interface success")
        return True

    @staticmethod
    def check_backup_job_type(req_id, job_id, sub_id, json_param):
        """
        功能描述：检查增量备份是否转全量
        参数：
        @req_id： 请求ID
        @job_id： 主任务任务ID
        返回值：
        """
        log.info(
            f'step 2-1: start execute backup_gen_sub_job, req_id: {req_id}, job_id: {job_id},  sub_id: {sub_id}')
        param_inst = ResourceParam(req_id)
        param = param_inst.get_param()
        backup_inst = BackUp(req_id, job_id, sub_id, json_param, param)
        backup_inst.check_backup_job_type()
        log.info(f'end to check_backup_job_type')

    @staticmethod
    def backup_pre_job(req_id, job_id, sub_id, json_param):
        """
        功能描述：备份前置任务, 主任务执行, 检查参数是否符合要求, 配置roach映射关系
        参数：
         @req_id： 请求ID
         @job_id： 主任务任务ID
        返回值：
        """
        log.info(f'step 2-2: execute backup_pre_job,req_id:{req_id} job_id:{job_id}, json_param:{json_param}')

        response = ActionResponse()

        # 1、验证参数
        try:
            file_content = tpops_parse_param_with_jsonschema(req_id)
        except Exception as exception:
            GaussDBBackupService.set_error_response(response)
            log.error(exception, exc_info=True)
            output_result_file(req_id, response.dict(by_alias=True))
            return False
        # 2、配置roach映射关系
        output_result_file(req_id, response.dict(by_alias=True))

        try:
            backup_inst = BackUp(req_id, job_id, sub_id, json_param, file_content)
        except Exception as ex:
            log.info(f'An exception occurred while executing the pre-task.')
            error_code = ResourceInfo.deal_with_error_message(ex.args[0])
            log_detail = LogDetail(logInfo="plugin_execute_prerequisit_task_fail_label", log_info_param=[sub_id],
                                   logLevel=LogLevel.ERROR, logDetail=error_code)
            sub_dict = SubJobDetails(taskId=job_id, subTaskId=sub_id, progress=100, logDetail=[log_detail],
                                     taskStatus=SubJobStatusEnum.FAILED.value)
            report_job_details(req_id, sub_dict.dict(by_alias=True))
            return False
        cache_area = backup_inst.get_cache_path()
        try:
            result = backup_inst.backup_pre_job()
        except Exception as err:
            log.error(f"backup pre job failed, err: {err}")
            result = False
        if result:
            log.info(f"backup pre job currently in progress.")
            result_file = os.path.join(cache_area, "BackupPrerequisiteProgress")
            pathlib.Path(result_file).touch()
        else:
            log.error(f"backup pre job failed")
            result_file = os.path.join(cache_area, "BackupPrerequisiteFailProgress")
            pathlib.Path(result_file).touch()

        log.info(f'step 2-2: finish to execute backup_pre_job,req_id:{req_id} job_id:{job_id}')
        return True

    @staticmethod
    def backup_prerequisite_progress(req_id, job_id, sub_id, json_param):
        """
        功能描述：备份前置任务进度上报
        参数：
        @req_id： 请求ID
        @job_id： 主任务任务ID
        @sub_id：子任务ID
        返回值：
        """
        log.info(
            f'start execute backup_prerequisite_job_progress, req_id: {req_id}, job_id: {job_id},  sub_id: {sub_id}')
        param_inst = ResourceParam(req_id)
        param = param_inst.get_param()
        backup_inst = BackUp(req_id, job_id, sub_id, json_param, param)
        backup_inst.backup_prerequisite_progress()
        return True

    @staticmethod
    def backup_gen_sub_job(req_id, job_id, sub_id, json_param):
        """
        功能描述：划分子任务
        参数：
        @req_id： 请求ID
        @job_id： 主任务任务ID
        @sub_id：子任务ID
        @json: 任务参数
        返回值：
        """
        log.info(
            f'start execute backup_gen_sub_job, req_id: {req_id}, job_id: {job_id},  sub_id: {sub_id}, '
            f'json_param: {json_param}')
        param_inst = ResourceParam(req_id)
        param = param_inst.get_param()
        backup_inst = BackUp(req_id, job_id, sub_id, json_param, param)
        backup_inst.gen_sub_job()
        return True

    @staticmethod
    def query_back_copy_jobinfo(req_id, param):
        job_info = JobInfo(pid=req_id,
                           job_id=DwsParamParse.get_job_id(param),
                           sub_job_id=DwsParamParse.get_sub_job_id(param),
                           copy_id=DwsParamParse.get_copy_id(param),
                           backup_type=DwsParamParse.get_type(param),
                           meta_path=DwsParamParse.get_meta_path(param),
                           meta_rep=DwsParamParse.get_meta_rep(param),
                           cache_path=DwsParamParse.get_cache_path(param),
                           cache_rep=DwsParamParse.get_cache_rep(param),
                           data_reps=DwsParamParse.get_data_reps(param))
        return job_info

    @staticmethod
    def query_backup_copy(req_id, job_id, sub_job_id):
        log.info(f'execute to query_backup_copy, pid: {req_id}, job_id: {job_id}, sub_job_id {sub_job_id}')
        """
        功能描述： 获取副本信息
        参数：
        @job_info：JobInfo 任务相关信息
        """
        param = tpops_parse_param_with_jsonschema(req_id)
        job_info = GaussDBBackupService.query_back_copy_jobinfo(req_id, param)
        copy_id = ""
        nodes = []
        try:
            nodes = param['job']['protectEnv']['nodes']
        except Exception as ex:
            log.error(ex, exc_info=True)
        try:
            copy_id = param['job']['copy'][0]['id']
        except Exception as ex:
            log.error(ex, exc_info=True)

        data_rep_rsp = []
        # 数据仓
        for item in job_info.data_reps:
            data_rep_rsp.append(
                CopyInfoRepModel(id=item.get('id'),
                                 repositoryType=item.get("repositoryType"),
                                 isLocal=item.get("isLocal"),
                                 protocol="NFS",
                                 remotePath=f"{item.get('remotePath')}/data/{job_info.copy_id}",
                                 remoteHost=item.get("remoteHost"),
                                 extendInfo={
                                     "fsId": item.get("extendInfo", {}).get("fsId")
                                 }).dict(by_alias=True))

        log.debug(f"Construct meta: {job_info.log_format()}")
        # 元数据
        data_rep_rsp.append(
            CopyInfoRepModel(id=job_info.meta_rep.get('id'),
                             repositoryType=job_info.meta_rep.get("repositoryType"),
                             isLocal=job_info.meta_rep.get("isLocal"),
                             protocol="NFS",
                             remotePath=f"{job_info.meta_rep.get('remotePath')}/meta/{job_info.copy_id}",
                             remoteHost=job_info.meta_rep.get("remoteHost"),
                             extendInfo={
                                 "fsId": job_info.meta_rep.get("extendInfo", {}).get("fsId")
                             }).dict(by_alias=True))
        copy_info = Copy(repositories=data_rep_rsp, extendInfo={'nodes': nodes, 'copyId': copy_id,
                                                                'backupTime': int(time.time())}).dict(by_alias=True)
        log.info(f"copy_info: {copy_info}")
        output_result_file(req_id, copy_info)
        log_detail = LogDetail(logInfo="plugin_task_subjob_success_label", logInfoParam=[sub_job_id], logLevel=1)
        report_job_details(req_id, SubJobDetails(taskId=job_id, subTaskId=sub_job_id, progress=100,
                                                 logDetail=[log_detail],
                                                 taskStatus=SubJobStatusEnum.COMPLETED.value).dict(by_alias=True))
        log.info(f'end to query_backup_copy')
        return True

    @staticmethod
    def do_post_job(req_id, job_id, sub_id, json_param):
        """
        功能描述：后置任务
        返回值：
        """
        log.info(
            f'start execute do_post_job, req_id: {req_id}, job_id: {job_id},  sub_id: {sub_id}')
        param_inst = ResourceParam(req_id)
        param = param_inst.get_param()
        backup_inst = BackUp(req_id, job_id, sub_id, json_param, param)
        result = True
        cache_area = backup_inst.get_cache_path()
        try:
            backup_inst.do_post_job()
        except Exception as err:
            log.error(f"backup pre job failed, err: {err}")
            result = False
        if result:
            result_file = os.path.join(cache_area, "BackupPostProgress")
            pathlib.Path(result_file).touch()
        else:
            result_file = os.path.join(cache_area, "BackupPostProgressFailProgress")
            pathlib.Path(result_file).touch()
        log.info(f'end to do_post_job, req_id')
        return True

    @staticmethod
    def get_repository_path(file_content, repository_type):
        repositories = file_content.get("job", {}).get("repositories", [])
        repositories_path = ""
        for repository in repositories:
            if repository['repositoryType'] == repository_type:
                repositories_path = repository["path"][0]
                log.info(f"repositories_path {repositories_path}")
                break
        return repositories_path

    @staticmethod
    def set_error_response(response):
        response.code = GaussDBCode.FAILED.value
        response.body_err = GaussDBCode.FAILED.value
