#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import json
import os
import time
from threading import Thread

from common.const import BackupTypeEnum, SubJobStatusEnum, ReportDBLabel, EnvInfoPrefix
from common.err_code import CommErrCode
from common.parse_parafile import get_env_variable
from sqlserver import log
from sqlserver.commons.common import check_is_sub_job_database, output_action_result, get_channel_number, \
    is_need_adjust_channel, minimum_vdi_virtual_device
from sqlserver.commons.const import SQLServerCode, BodyErr, \
    DatabaseRecoveryModel, SQLTmpFileName, SQLServerJsonConstant, JobType, SQLServerProgressFileType, ProgressConst, \
    SQLServerStrConstant, ExecCmdResult, RecoveryMode
from sqlserver.commons.models import CopyMetaExtendInfo
from sqlserver.commons.sqlserver_utils import get_offset_to_utc
from sqlserver.sqlserver_base import SQLServerBase


class SQLServerBackupDatabase(SQLServerBase):

    def __init__(self, p_id, job_id, sub_job_id, json_param):
        super().__init__(p_id, job_id, sub_job_id, json_param)
        self._job_id = job_id
        self.instance_name = ""
        self.auth_mode = ""
        self.user_name = ""
        self.pass_prefix = ""
        self._report_progress_thread_start = False
        self.channel_number = SQLServerStrConstant.DEFAULT_CHANNEL

    def parse_database_name(self):
        database = self._json_param.get(SQLServerJsonConstant.JOB, {}). \
            get(SQLServerJsonConstant.PROTECT_OBJECT, {}).get(SQLServerJsonConstant.NAME, "")
        if not database:
            log.error("Failed to get database")
            raise Exception("Failed to get database")
        return database

    def prepare_user_infos(self):
        self.channel_number = get_channel_number(self._json_param)
        self.instance_name = self._json_param.get(SQLServerJsonConstant.JOB, {}). \
            get(SQLServerJsonConstant.PROTECT_OBJECT, {}).get(SQLServerJsonConstant.INSTANCE_NAME, "")
        self.auth_mode = get_env_variable(f"{EnvInfoPrefix.OBJ_AUTH_TYPE}_{self._p_id}")
        self.user_name = get_env_variable(f"{EnvInfoPrefix.OBJ_USERNAME_PREFIX}_{self._p_id}")
        self.pass_prefix = f"{EnvInfoPrefix.OBJ_PASS_PREFIX}_{self._p_id}"
        self.check_and_prepare_user_info(self.instance_name)

    def check_allow_in_local(self):
        # 检查数据库是否开启
        self.prepare_user_infos()
        ret = self.sql_utils.check_service_is_running(self.instance_name)
        if not ret:
            node_ip = self._json_param.get("job", {}).get("protectEnv", {}).get("nodes", [{}])[0].get("endpoint", "")
            output_action_result(self._p_id, SQLServerCode.FAILED.value,
                                 BodyErr.INSTANCE_STATE_ABNORMAL.value, "", [node_ip, self.instance_name])
            log.error(f"SQLServer not running. pid:{self._p_id} jobId: {self._job_id}")
            return False
        return True

    def check_allow_backup_in_local(self):
        """
        检查本机是否能做备份任务
        :return:
        """
        log.info(f"Start to check allow backup in local. pid: {self._p_id} jobId: {self._job_id}")
        if check_is_sub_job_database(self._json_param):
            log.info(f"Allow sub job. pid: {self._p_id} jobId: {self._job_id}")
            return True
        try:
            return self.check_allow_in_local()
        except Exception:
            log.error("Exception when check allow in local")
            output_action_result(self._p_id, SQLServerCode.FAILED.value, BodyErr.ERROR_INTERNAL.value, "")
            return False

    def check_diff_backup(self, database):
        # master库默认转全量备份
        if database == "master":
            return SQLServerCode.FAILED.value, BodyErr.ERR_INC_TO_FULL.value, ""
        # Checking Whether to Convert Diff backup to Full backup
        self.get_data_path()
        # 默认需要增转全
        ret = True
        for path in os.listdir(self._data_path):
            new_path = os.path.join(self._data_path, path)
            if not os.path.isdir(new_path):
                continue
            new_path = os.path.join(new_path, f"{database}.bak")
            if os.path.exists(new_path):
                ret = False
                break
        if ret:
            log.error(f"Full backup copy not exist. pid: {self._p_id} jobId: {self._job_id}")
            return SQLServerCode.FAILED.value, BodyErr.ERR_INC_TO_FULL.value, ""
        self.prepare_user_infos()
        result = self.check_full_copy([])
        if not result:
            log.error(f"full backup copy is not the same as the latest full backup copy on the database backupset.  \
            pid: {self._p_id} jobId: {self._job_id}")
            return SQLServerCode.FAILED.value, BodyErr.ERR_INC_TO_FULL.value, ""
        log.info(f"Succeed to check backup type. pid: {self._p_id} jobId: {self._job_id}")
        return SQLServerCode.SUCCESS.value, 0, ""

    def check_backup_job_type(self):
        log.info(f"Start to exec check backup type, pid: {self._p_id} jobId: {self._job_id}")
        try:
            database_name = self.parse_database_name()
        except Exception as err:
            log.error(f"Exception when get database as err: {err}")
            return SQLServerCode.FAILED.value, 0, "Failed to parse database name"
        # 系统库tempdb不支持备份
        if database_name == "tempdb":
            log.error(f"The database cannot be backed up! pid: {self._p_id} jobId: {self._job_id}")
            return SQLServerCode.FAILED.value, 0, "Database not support backup"
        if self.backup_type == BackupTypeEnum.FULL_BACKUP.value:
            log.info(f"Succeed to check backup type. pid: {self._p_id} jobId: {self._job_id}")
            return SQLServerCode.SUCCESS.value, 0, ""
        elif self.backup_type == BackupTypeEnum.DIFF_BACKUP.value:
            code, body_err_code, msg = self.check_diff_backup(database_name)
            return code, body_err_code, msg
        log.error(f"Not support. pid: {self._p_id} jobId: {self._job_id}")
        return SQLServerCode.FAILED.value, 0, "Not support"

    def pre_job(self):
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, ProgressConst.MIN_PROGRESS.value,
                                    "", SQLServerProgressFileType.COMMON)
        self.prepare_user_infos()
        if not self.sql_utils.check_user_permission(self.auth_mode, self.user_name):
            self.error_logtail.log_detail = BodyErr.SQLSERVER_PERMISSIONS_ERROR.value
            log.error("Check user permission failed")
            raise Exception("Check user permission failed")
        database = self.parse_database_name()
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, ProgressConst.MIN_PROGRESS.value,
                                    "", SQLServerProgressFileType.COMMON)
        # 检查数据库状态是否异常
        db_state_info_list = self.sql_utils.get_database_info(database)
        log.info(f"db_state_info_list:{db_state_info_list}")
        if not db_state_info_list:
            self.error_logtail.log_detail = BodyErr.SQLSERVER_DATABASE_NOT_EXISTS.value
            self.error_logtail.log_detail_param = [database]
            log.error(f"Database: {database} not exists")
            raise Exception(f'Databases {database} not exists')
        db_state = db_state_info_list[0].get("state")
        if db_state != ExecCmdResult.SUCCESS:
            self.error_logtail.log_detail = BodyErr.BACKUP_FAIL_FOR_DATABASE_STATE_ABNORMAL.value
            self.error_logtail.log_detail_param = [database]
            log.error(f"Database: {database} in abnormal state")
            raise Exception('Databases with abnormal state')
        if self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            # 系统库master和tempdb不支持日志备份
            if database == "tempdb" or database == "master":
                log.error("Database tempdb or master not support log backup")
                raise Exception("The database cannot be log backup")
            db_recovery_model = db_state_info_list[0].get("recovery_model_desc")
            if RecoveryMode.SIMPLE in db_recovery_model:
                self.error_logtail.log_detail = BodyErr.SQLSERVER_REVERTIVE_MODE_ERROR.value
                self.error_logtail.log_detail_param = [database]
                log.error(f"Failed for recovery_model_desc: {DatabaseRecoveryModel.SIMPLE.value} exists.")
                raise Exception("Database with simple recovery mode")
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, ProgressConst.PROGRESS_ONE_HUNDRED,
                                    "", SQLServerProgressFileType.COMMON)
        return True

    def exec_pre_job(self):
        log.info(f"Start to do pre job, pid: {self._p_id} jobId: {self._job_id}")
        try:
            ret = self.pre_job()
        except Exception as err:
            self.error_logtail.log_info = ReportDBLabel.PRE_REQUISIT_FAILED
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, ProgressConst.PROGRESS_ONE_HUNDRED,
                                        self.error_logtail, SQLServerProgressFileType.COMMON)
            log.error(f"Exception when pre job. pid: {self._p_id} jobId: {self._job_id},err: {err}")
            return False
        log.info(f"Pre job result is ret: {ret}. pid: {self._p_id} jobId: {self._job_id}")
        return ret

    def do_sub_job(self):
        database = self.parse_database_name()
        if not self.prepare_path_param():
            log.error(f"Failed to get repositories info")
            raise Exception("Failed to get repositories info")
        self.prepare_user_infos()
        self.convert_vdi_type()
        # 备份前保存数据库文件信息
        self.save_meta_info([database], "meta.info", True)
        copy_file = os.path.join(self._data_path, f"{database}.bak")
        extend_info = CopyMetaExtendInfo(databaseName=database, backupPath=copy_file, metaPath=self._meta_path)
        extend_info.version = self.sql_utils.get_version()
        log.info("Succeed to prepare param, start to backup")
        # "1"：待备份，"0"：已备份
        self.running_tail.log_info_param = [self._sub_job_id, "1", "0"]
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, ProgressConst.MIN_PROGRESS, self.running_tail,
                                    SQLServerProgressFileType.COMMON)
        progress_thread = Thread(target=self.report_backup_progress_thread, args=(database, JobType.BACKUP,
                                                                                  SQLServerProgressFileType.COMMON))
        progress_thread.daemon = True
        progress_thread.start()
        if not self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            extend_info.begin_time = int(time.time())
        virtual_device_count = minimum_vdi_virtual_device(self.channel_number)
        ret, std_out = self.call_vdi_tool(self.vdi_type, database, copy_file, virtual_device_count)
        database_meta_info = self.sql_utils.get_latest_backup_meta_info(database, self.vdi_type)
        extend_info.meta_info.update({
            f"{database}": database_meta_info
        })
        self._report_progress_thread_start = False
        progress_thread.join()
        if not ret:
            log.error(f"Failed to backup database: {database}, std_out:{std_out}")
            if "there is no current database backup" in std_out:
                self.error_logtail.log_detail = BodyErr.BACKUP_FAIL_FOR_FULL_BACKUP_NEED.value
            elif is_need_adjust_channel(std_out):
                self.error_logtail.log_detail = BodyErr.ADJUST_CHANNELS.value
            else:
                self.error_logtail.log_detail = CommErrCode.FAILED_EXECUTE_COMMAND
                self.error_logtail.log_detail_param = ["BACKUP LOG", std_out]
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, ProgressConst.PROGRESS_ONE_HUNDRED,
                                        self.error_logtail, SQLServerProgressFileType.COMMON)
            return False
        self.set_extend_info_time(extend_info)
        if self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            extend_info.begin_time = self.get_data_start_time(extend_info)
        # save temp data for query copy data
        self.write_tmp_file(json.dumps(extend_info.dict(by_alias=True)), SQLTmpFileName.COPY_INFO_FILE)
        log.info(f"Succeed to backup database: {database}")
        # 备份成功，"1"：备份成功个数
        self.success_logtail.log_info_param = [self._sub_job_id, "1"]
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, ProgressConst.PROGRESS_ONE_HUNDRED,
                                    self.success_logtail, SQLServerProgressFileType.COMMON)
        return True

    def set_extend_info_time(self, extend_info):
        extend_info.end_time = int(time.time())
        extend_info.backup_time = extend_info.end_time
        extend_info.offset_to_utc = get_offset_to_utc()

    def exec_sub_job(self):
        """
        Executing a Backup Task
        :return
        """
        log.info(f"Start to backup. pid: {self._p_id} jobId: {self._job_id}")
        try:
            ret = self.do_sub_job()
        except Exception as err:
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, ProgressConst.PROGRESS_ONE_HUNDRED,
                                        self.error_logtail, SQLServerProgressFileType.COMMON)
            log.error(f"Exception when backup database as err: {err}. pid: {self._p_id} jobId: {self._job_id}")
            return False
        log.info(f"Sub job result: {ret}. pid: {self._p_id} jobId: {self._job_id}")
        return ret

    def query_copy_info(self):
        """
        查询副本信息
        :return:
        """
        try:
            ret = self.query_copy_infos(self.backup_type)
        except Exception as err:
            log.exception(f"Exception when query copy info, err: {err}")
            return False
        return ret
