#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import glob
import json
import os
import time
from pathlib import Path
from threading import Thread

from common.common import execute_cmd, invoke_rpc_tool_interface, get_host_sn
from common.common_models import SubJobDetails, LogDetail
from common.const import DBLogLevel, RpcParamKey, ReportDBLabel
from common.parse_parafile import ParamFileUtil
from exchange import log
from exchange.backup.exchange_backup_sqlite import SqliteInputParam, ExchangeSqliteService
from exchange.commons.command import Commands
from exchange.commons.common import output_execution_result_ex, get_key_value, \
    init_cache_and_data_directory, init_hub_path, get_databases, get_target_path, get_snap_id, copy_backup_data, \
    init_directory, get_command_result, get_last_copy_info, get_passive_databases, write_file, get_database_path, \
    get_local_mailbox_database_copy_status, get_vss_check_command_result, report_job_details_by_rpc, \
    get_exchange_version, read_file, find_db_list_files, read_json_file
from exchange.commons.const import ExchangeJobInfo, ParamConstant, SubJobStatusEnum, ExchangeReportDBLabel, \
    ExecCmdResult, ExchangeWindowsPath, JobBackupType, ExchangeType, DataBackupType, SYSTEM_MAILBOX_LIST, \
    AgentConstant, BodyErr, ExchangeCopyType, ExchangeBackupSubJobName, CopyType
from exchange.commons.exchange_exception import ExchangeErrCodeException, ExchangeInternalException
from exchange.commons.exchange_function_tool import log_start_end, out_result_with_job_info, \
    progress_notify_with_job_info
from exchange.commons.exchange_param_parse import ExchangeParamParse
from exchange.commons.fs_backup_win import BackupTask, BackupConfig
from exchange.commons.powershell import execute


def write_mailbox_to_sqlite_file(condition, job_info):
    database = condition["database"]
    meta_hub_path = condition["meta_hub_path"]
    page_size = int(condition.get("page_size", 0))
    need_skip_num = page_size * int(condition.get("page_num", 0))
    script = Commands.GET_MAILBOX_LIST_BY_DATABASE.format(database_name=database, first=page_size,
                                                          skip=need_skip_num)
    flags, m_username = get_key_value(f'job_protectEnv_auth_authKey_{job_info.pid}')
    flags, m_password = get_key_value(f'job_protectEnv_auth_authPwd_{job_info.pid}')
    result = execute(m_username, m_password, script)
    return_code = result.get("ReturnCode", "")
    if return_code != "0":
        log.error(f"Get Mailbox ReturnCode: {return_code}, ErrorMessage: {result.get('Output', '')}")
        raise ExchangeInternalException("get mailbox list failed!")
    result_obj = []
    if result.get("Output", []):
        result_obj = json.loads(result.get("Output"))
        # 返回结果只有一条时，创建一个list
        if not isinstance(result_obj, list):
            result_obj = [result_obj]
        for mailbox in result_obj:
            # 过滤掉系统内置邮箱
            if any([mailbox["Name"].startswith(mail) for mail in SYSTEM_MAILBOX_LIST]):
                continue
            temp_info = SqliteInputParam(extend_info=mailbox["ExchangeGuid"],
                                         data_name=mailbox["PrimarySmtpAddress"]["Address"],
                                         data_type="Exchange-mailbox", data_parent_path=f"/{database}")
            ExchangeSqliteService.write_metadata_to_sqlite_file(meta_hub_path, temp_info)
    log.info(f"write mailbox size: {len(result_obj)} to sqlite, skip: {need_skip_num}")
    return len(result_obj)


def write_sqlite_file(meta_hub_path, database_name, job_info):
    log.info(f"start write sqlite file, database_name: {database_name}")
    page_size = 1000
    page_num = 0
    condition = {
        "database": database_name,
        "meta_hub_path": meta_hub_path,
        "page_num": page_num,
        "page_size": page_size
    }
    while True:
        result_size = write_mailbox_to_sqlite_file(condition, job_info)
        if result_size < page_size:  # 说明查询到最后一页，不满1000条
            break
        page_num += 1
        condition["page_num"] = page_num
    log.info("backup write sqlite file finished.")


# Exchange备份公共基类
def check_vss_snaphot(db_list, snap_info):
    log.info(f"start check snapshot is consistent")
    log_file_prefix_list = []
    edb_path_list = []
    log_path_list = []
    for database_info in db_list:
        # 获取日志文件前缀
        log_file_prefix_list.append(database_info["logFilePrefix"])
        # 获取edb文件和log路径
        edb_path = database_info["extendInfo"]["edb_path"]
        log_path = database_info["extendInfo"]["log_path"]
        edb_snap_id = get_snap_id(snap_info, edb_path[0])
        virtual_edb_path = ParamConstant.VIRTUAL_PATH + f"{edb_snap_id}" + edb_path.split(':')[1]
        log_snap_id = get_snap_id(snap_info, log_path[0])
        virtual_log_path = ParamConstant.VIRTUAL_PATH + f"{log_snap_id}" + log_path.split(':')[1]
        edb_path_list.append(virtual_edb_path)
        log_path_list.append(virtual_log_path)
    # 打快照
    vss_exec_param = "{} {} {} {} {}".format(ParamConstant.VSS_TOOL_PATH,
                                             "CheckInterity",
                                             "\"" + ','.join(log_file_prefix_list) + "\"",
                                             "\"" + ','.join(log_path_list) + "\"",
                                             "\"" + ','.join(edb_path_list) + "\"")

    code, out, err = get_vss_check_command_result(vss_exec_param)
    return code, err, out


def report_backup_table_failed(job_info: ExchangeJobInfo, node_ip, table_info, data_size):
    if table_info["activeCopy"]:
        copy_flag = ExchangeCopyType.ACTIVE
    else:
        copy_flag = ExchangeCopyType.PASSIVE
    # 库备份失败
    log_detail = LogDetail(logInfo=ExchangeReportDBLabel.DATABASE_BACKUP_FAILED,
                           logInfoParam=[node_ip, table_info["name"], copy_flag],
                           logLevel=DBLogLevel.ERROR.value)
    output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id, progress=100,
                           logDetail=[log_detail], taskStatus=SubJobStatusEnum.FAILED.value,
                           dataSize=data_size).dict(by_alias=True)
    report_job_details_by_rpc(job_info.job_id, job_info.pid, job_info.sub_job_id, output)


class ExchangeBackupSubJob:
    def __init__(self, pid):
        self._pid = pid
        self._param = ParamFileUtil.parse_param_file(pid)
        self._progress = 5

    @staticmethod
    def get_source_path(db):
        input_path1 = "\"" + db["edb_path"][0] + "\""
        input_path2 = "\"" + db["log_path"][0] + "\""
        return input_path1, input_path2

    @staticmethod
    def get_target_path(data_hub, guid, meta_hub):
        input_data_hub = os.path.join(data_hub, f"MailboxDatabase_{guid}").replace("/", "\\")
        input_meta_hub = os.path.join(meta_hub, f"MailboxDatabase_{guid}").replace("/", "\\")
        return input_data_hub, input_meta_hub

    @staticmethod
    def get_databases_under_server(m_username, m_password, job_info: ExchangeJobInfo):
        # 拼接cmd命令，执行powershell脚本
        ps1_path = ParamConstant.PS1_SCRIPTS_PATH + "GetAllMountedMailboxDatabases.ps1"
        cmds = f"{ParamConstant.PS_LOC} {ps1_path} \\\"{m_username}\\\" \\\"{m_password}\\\""
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])
        result = json.loads(result)
        return_code = result.get("ReturnCode", "")
        if return_code != "0":
            raise ExchangeErrCodeException(log_detail=LogDetail(
                logDetail="",
                logDetailParam=[""],
                logLevel=DBLogLevel.ERROR))

        # 解析请求结果, 获取server下所有dbname
        resource_list = []

        # 盘符集合，即需要打快照的盘集合
        drive_letter = []
        if result.get("Output", []):
            for db_info in result.get("Output"):
                resource = dict()
                resource["type"] = "Database"
                resource["subType"] = "Exchange-database"
                resource["uuid"] = db_info["db_uuid"]
                resource["extendInfo"] = db_info
                resource["log_path"] = db_info["log_path"]
                resource["edb_path"] = db_info["edb_path"]
                resource["db_name"] = db_info["db_name"]
                drive_letter.append(db_info["edb_path"][0])
                drive_letter.append(db_info["log_path"][0])
                resource_list.append(resource)
        drive_letter = list(set(drive_letter))
        return resource_list, drive_letter

    @staticmethod
    def get_data_backup_type(backup_type, input_meta_hub, input_data_hub, target_file_name) -> DataBackupType:
        if backup_type == JobBackupType.EXCHANGE_BACKUP_FULL:
            return DataBackupType.FULL_BACKUP
        if input_meta_hub is None or input_data_hub is None:
            return DataBackupType.FULL_BACKUP
        target_full_path = os.path.join(input_data_hub, target_file_name)
        if not os.path.exists(target_full_path):
            return DataBackupType.FULL_BACKUP
        target_meta_file = os.path.join(input_meta_hub, target_file_name + ".volumecopy.meta.json")
        if os.path.exists(target_meta_file):
            return DataBackupType.INCREMENT_BACKUP
        return DataBackupType.FULL_BACKUP

    def upload_copy_infos(self, job_info: ExchangeJobInfo):
        sub_job_progress = f'exchange_{job_info.job_id}_' + job_info.sub_job_id
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, 5, "", sub_job_progress, job_info)
        self.create_report_backup_progress_thread(job_info)

        # DAG组备份在第二个子任务中记录sqlite
        protect_object_sub_type = self._param.get('job').get('protectObject').get('subType')
        if protect_object_sub_type == ExchangeType.EXCHANGE_GROUP:
            self.dag_write_sqlite(job_info)

        # 上报备份副本
        data_hub, meta_hub = self.get_rep_hub_path()
        # 日志备份场景，获取meta仓路径
        backup_type = self._param.get("job").get("jobParam").get("backupType")

        cache_path = ExchangeParamParse.get_cache_path(self._param).replace("/", "\\")
        log.info(f"Get cache path {cache_path}")
        log.info(f"Get backup type {backup_type}.")
        copy_info_dir = os.path.join(cache_path, "copyInfo").replace(
            "/", "\\")
        log.info(f"Upload copy infos get copy info dir path {copy_info_dir}")
        copy_info_list = os.listdir(copy_info_dir)
        min_log_time = ParamConstant.EMPTY_DATA
        max_log_time = ParamConstant.EMPTY_DATA

        # 遍历各节点备份信息, 取本次备份log文件支持恢复的时间轴
        for i in copy_info_list:
            copy_info_path = os.path.join(copy_info_dir, i).replace("/", "\\")
            if os.path.isfile(copy_info_path):
                # 读取文件
                copy_info_json = read_file(copy_info_path)
                log.info(f"Upload copy infos get copy info{copy_info_json}")
                copy_info = copy_info_json
                log.info(f"Upload copy infos get copy info object{copy_info}")
                log_min_time = copy_info_json.get("log_min_time", 0)
                snapshot_time = copy_info_json.get("snapshot_time", 0)
                if min_log_time == ParamConstant.EMPTY_DATA:
                    min_log_time = log_min_time
                else:
                    min_log_time = log_min_time if log_min_time < min_log_time else min_log_time

                if max_log_time == ParamConstant.EMPTY_DATA:
                    max_log_time = snapshot_time
                else:
                    max_log_time = snapshot_time if snapshot_time > max_log_time else max_log_time
                log.info(f"Upload copy info compared min log time {min_log_time}, max log time {max_log_time}")
        job_info.min_log_time = min_log_time
        job_info.max_log_time = max_log_time
        job_info.snapshot_time = max_log_time
        self.upload_copy_info(backup_type, job_info)
        # 记录各副本对应时间
        self.save_copy_to_time(job_info)
        self.report_backup_complete_progress(job_info, None)

    @log_start_end()
    def dag_write_sqlite(self, job_info):
        log.info("start write exchange sqlite")
        meta_hub = ExchangeParamParse.get_meta_path(self._param)
        db_list_files = find_db_list_files(meta_hub)
        # 遍历所有匹配的文件路径
        for file_path in db_list_files:
            try:
                db_list_data = read_json_file(file_path)
                # 打开并读取JSON文件内容
                log.info(f"dag_write_sqlite db_list :{db_list_data}")
                self.write_dag_database_list_to_sqlite(db_list_data, job_info)
            except Exception as e:
                log.error(f"write exchange sqlite failed. {e}", exc_info=True)
                raise ExchangeErrCodeException(log_detail=LogDetail(logInfo=ExchangeReportDBLabel.SUB_JOB_FALIED,
                                                                    logInfoParam=[f"{job_info.sub_job_id}"],
                                                                    logDetail=BodyErr.SYSTEM_ERROR.value,
                                                                    logTimestamp=int(time.time()),
                                                                    logLevel=DBLogLevel.ERROR))

    def write_dag_database_list_to_sqlite(self, db_list, job_info):
        for database_info in db_list:
            log.info(f"write dag database list to sqlite database name :{database_info['name']}")
            self.write_sqlite(job_info, database_info["name"])

    def backup(self, job_info: ExchangeJobInfo):
        sub_job_progress = f'exchange_{job_info.job_id}_' + job_info.sub_job_id
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, 5, "", sub_job_progress, job_info)
        self.create_report_backup_progress_thread(job_info)
        node_ip = self.get_node_ip()
        # 初始化文件系统路径，/rep/{agent_id}
        data_hub, meta_hub = self.get_rep_hub_path()
        log.info(f"Get data hub {data_hub}. get meta hub {meta_hub}")
        # 日志备份场景，获取meta仓路径
        backup_type = self._param.get("job").get("jobParam").get("backupType")
        meta_hub = self.get_log_rep_meta_hub_path(backup_type, meta_hub)
        log.info(f"Get meta hub {meta_hub}.")
        # 规避exchange日志备份子任务不能重入问题
        snapshot_time = int(time.time())
        db_list_path = meta_hub + f"\\db_list.json"
        # 解析snapInfo.json
        snap_info_path = meta_hub + f"\\{job_info.job_id}.json"
        if not os.path.isfile(snap_info_path):
            # 文件不存在，则创建快照
            log.info("start create snapshot !!")
            # vss 关闭，直接报错
            if not self.retry_check_vss_snap_shot(job_info):
                raise ExchangeErrCodeException(log_detail=LogDetail(logInfo=ExchangeReportDBLabel.SUB_JOB_FALIED,
                                                                    logInfoParam=[f"{job_info.sub_job_id}"],
                                                                    logDetail=BodyErr.
                                                                    EXCHANGE_WRITER_IS_NOT_STABLE.value,
                                                                    logTimestamp=int(time.time()),
                                                                    logLevel=DBLogLevel.ERROR))

            # 获取当前服务器下数据库及盘符
            db_list, drive_letter, guids = self.get_databases_info(job_info)
            self.report_backup_table_progress(job_info, node_ip, str(len(db_list)))
            if len(db_list) < 1:
                self._progress = 100
                protect_object_sub_type = self._param.get('job').get('protectObject').get('subType')
                if protect_object_sub_type != ExchangeType.EXCHANGE_GROUP:
                    raise ExchangeErrCodeException(log_detail=LogDetail(logInfo=ReportDBLabel.BACKUP_SUB_FAILED,
                                                                        logInfoParam=[
                                                                            f"{job_info.sub_job_id}"],
                                                                        logDetail=BodyErr.ALL_DATABASE_NOT_EXIST.value,
                                                                        logTimestamp=int(time.time()),
                                                                        logLevel=DBLogLevel.ERROR))
                self.report_backup_complete_progress(job_info, 0)
                return True
            # 将dab_list信息写入文件用于续作
            output_execution_result_ex(db_list_path, db_list)

            # 打快照
            code, err, out = self.do_snapshot(guids, drive_letter, meta_hub, job_info.job_id)
            if code != ExecCmdResult.SUCCESS:
                log.error(f"failed!! vss result :{out} ,err :{err}")
                raise ExchangeErrCodeException(log_detail=LogDetail(logInfo=ExchangeReportDBLabel.SUB_JOB_FALIED,
                                                                    logInfoParam=[
                                                                        f"{job_info.sub_job_id}"],
                                                                    logDetail=BodyErr.CREATE_SNAPSHOT_FILED.value,
                                                                    logTimestamp=int(time.time()),
                                                                    logLevel=DBLogLevel.ERROR))
            self.report_backup_create_snapshot_success(job_info, node_ip)
            log.info("Do snapshot success")
            snapshot_time = int(time.time())

        # 拷贝数据
        # 解析snapInfo.json
        with open(snap_info_path, 'r') as f_read:
            snap_info = json.load(f_read)
        with open(db_list_path, 'r') as f_read:
            db_list = json.load(f_read)
        self.check_consistent(db_list, snap_info)
        database_name_dict = {"name_list": []}
        backup_data_total_size = 0
        min_log_time = ParamConstant.EMPTY_DATA
        max_log_time = ParamConstant.EMPTY_DATA
        for database_info in db_list:
            edb_path = database_info["extendInfo"]["edb_path"]
            log_path = database_info["extendInfo"]["log_path"]
            log.info(f"Get edb path {edb_path}")
            log.info(f"Get log path {log_path}")
            database_guid = database_info["uuid"]
            if backup_type == JobBackupType.EXCHANGE_BACKUP_LOG:
                database_data_total_size, min_create_time, max_create_time = self.do_log_backup(database_guid, log_path,
                                                                                                snap_info, database_info
                                                                                                )
                log.info(f"Copy log completed size:{database_data_total_size}, "
                         f"min create time: {min_create_time}, max create time: {min_create_time}")
            else:
                # 全量备份逻辑
                # 初始化数据仓
                input_data_hub, input_meta_hub = get_target_path(data_hub, database_guid, meta_hub)
                init_cache_and_data_directory(input_data_hub, input_meta_hub)
                log.info(f"Get input_data_hub path {input_data_hub}")
                log.info(f"Get input_meta_hub path {input_meta_hub}")
                # 复制edb文件，获取虚拟盘edb路径
                edb_snap_id = get_snap_id(snap_info, edb_path[0])
                virtual_edb_path = ExchangeWindowsPath.UNC + ParamConstant.VIRTUAL_PATH + f"{edb_snap_id}" + \
                                   edb_path.split(':')[1]
                if not os.path.exists(virtual_edb_path):
                    log.error(f"virtual_edb_path not exist,virtual_edb_path is {virtual_edb_path}")
                    report_backup_table_failed(job_info, node_ip, database_info, 0)
                    return False
                input_data_hub, task = self.copy_exchange_edb_file(backup_type, input_data_hub, input_meta_hub,
                                                                   virtual_edb_path)
                if not task.do_backup():
                    log.error("edb file backup failed.")
                    report_backup_table_failed(job_info, node_ip, database_info, 0)
                    return False
                # 复制xx.log,xx.chk数据
                log_copy_size, min_create_time, max_create_time, _ = \
                    copy_backup_data(input_data_hub, log_path, snap_info, True)
                log.info(f"log size:{log_copy_size}")
                edb_copy_size = task.statistics().bytes_written
                log.info(f"edb size:{edb_copy_size}")
                database_data_total_size = edb_copy_size + log_copy_size
                log.info(f"database total size:{database_data_total_size}")

            max_log_time, min_log_time = self.get_log_time(max_create_time, max_log_time, min_create_time, min_log_time)
            log.info(f"Compared min log time {min_log_time}, max log time {max_log_time}")
            self.write_exchange_sqlite(database_info, database_name_dict, job_info)
            self.report_backup_table_success(job_info, node_ip, database_info, int(database_data_total_size / 1024))
            backup_data_total_size = backup_data_total_size + database_data_total_size
            log.info(f"backup total size:{backup_data_total_size}")

        job_info.min_log_time = min_log_time
        job_info.max_log_time = max_log_time
        job_info.snapshot_time = snapshot_time

        cache_path = ExchangeParamParse.get_cache_path(self._param).replace("/", "\\")
        log.info(f"Get cache path {cache_path}")
        cur_copy_to_time_path = os.path.join(cache_path,
                                             "copyInfo",
                                             f"copy_to_time_{job_info.job_id}_{job_info.sub_job_id}").replace(
            "/", "\\")
        cur_copy_to_dir = os.path.join(cache_path,
                                       "copyInfo").replace(
            "/", "\\")
        if not os.path.exists(cur_copy_to_dir):
            log.info(f"Start to create dir {cur_copy_to_dir}")
            os.makedirs(cur_copy_to_dir, True)
        sub_job_copy_info = {
            "job_id": job_info.job_id,
            "sub_job_id": job_info.sub_job_id,
            "log_min_time": min_log_time,
            "log_max_time": max_log_time,
            "snapshot_time": snapshot_time
        }
        output_execution_result_ex(cur_copy_to_time_path, sub_job_copy_info)
        log.info(f"Save job copy info {sub_job_copy_info} to {cur_copy_to_time_path} success")
        self.report_backup_complete_progress(job_info, int(backup_data_total_size / 1024))
        return True

    def get_log_time(self, max_create_time, max_log_time, min_create_time, min_log_time):
        if min_log_time == ParamConstant.EMPTY_DATA:
            log.info(f"Init min create time {min_create_time}")
            min_log_time = min_create_time
        else:
            log.info(f"Compare min create time {min_create_time}")
            min_log_time = min_create_time if min_create_time < min_log_time else min_log_time
        if max_log_time == ParamConstant.EMPTY_DATA:
            log.info(f"Init min create time {max_create_time}")
            max_log_time = max_create_time
        else:
            log.info(f"Compare max create time {max_create_time}")
            max_log_time = max_create_time if max_create_time > max_log_time else max_log_time
        log.info(f"Get max log time {max_log_time}, min log time {min_log_time}, pid {self._pid}")
        return max_log_time, min_log_time

    def save_copy_to_time(self, job_info: ExchangeJobInfo):
        # 记录各次备份的.log对应的可恢复时间范围, 该文件在数据备份生成，在后续日志备份、下一次数据备份追加写;有任务失败回滚机制;
        # 结构是 [CopyToTimeModel1, CopyToTimeModel2]
        # 用处1：日志备份时，查找上一次任务备份到的时间点;
        # 用处2：按时间点恢复时，找到恢复到对应时间点，需要取的副本列表;
        cache_parent, last_backup_exist_flag, last_full_copy_info = self.get_repo_path(job_info)
        last_copy_to_time_path = ""
        if not last_full_copy_info:
            log.warn(f"Last full copy info is not exist")
            last_backup_exist_flag = False
        else:
            last_full_copy_id = last_full_copy_info.get("id", "")
            log.info(f"Query copy to time last full copy id exist: {last_full_copy_id}")
            last_copy_to_time_path = os.path.join(cache_parent, f"copy_to_time_{last_full_copy_id}").replace("/", "\\")
            if not os.path.exists(last_copy_to_time_path):
                log.warn(f"Last full copy info is not exist")
                last_backup_exist_flag = False

        if self._param.get("job").get("jobParam").get("backupType") == JobBackupType.EXCHANGE_BACKUP_LOG:
            # 若此次备份日志备份，追加本次副本信息至全备记录，start time = 上次副本结束时间， end time = 此次备份快照时间
            if last_backup_exist_flag:
                self.save_copy_to_time_in_log_task(job_info, last_copy_to_time_path)
            return

        cur_copy_to_time_infos = []
        if last_backup_exist_flag:
            log.info("Save copy info, have last full copy")
            copy_to_time_info = read_file(last_copy_to_time_path)
            last_copy_to_time_info = copy_to_time_info[len(copy_to_time_info) - 1]
            cur_copy_to_time_object = {
                "job_id": job_info.job_id,
                "log_start_time": last_copy_to_time_info.get("log_end_time", job_info.min_log_time),
                "log_end_time": job_info.snapshot_time,
                "job_type": self._param.get("job").get("jobParam").get("backupType")
            }
            if last_copy_to_time_info.get("job_type", "") == JobBackupType.EXCHANGE_BACKUP_LOG:
                # 上次备份是日志备份， start time = 上次日志备份结束时间, end time = 此次备份快照时间
                log.info("Save copy info, last back is log no need to append last copy")
            else:
                # 上次备份是数据备份， start time = 上次日志备份结束时间, end time = 此次备份快照时间,
                log.info("Save copy info, last back is data, need to append last copy")
                cur_copy_to_time_infos.extend(copy_to_time_info)

            cur_copy_to_time_infos.append(cur_copy_to_time_object)
        else:
            # 上次没做过全量备份， start time = 此次备份的最早的一个.log文件开始时间, end time = 此次备份快照时间
            log.info("Save copy info, not have last full copy")
            cur_copy_to_time_object = {
                "job_id": job_info.job_id,
                "log_start_time": job_info.min_log_time,
                "log_end_time": job_info.snapshot_time,
                "job_type": self._param.get("job").get("jobParam").get("backupType")
            }
            cur_copy_to_time_infos.append(cur_copy_to_time_object)

        cur_copy_to_time_path = os.path.join(cache_parent,
                                             f"copy_to_time_{job_info.job_id}").replace("/", "\\")
        output_execution_result_ex(cur_copy_to_time_path, cur_copy_to_time_infos)
        log.info(f"Write copy to time info {cur_copy_to_time_infos} to {cur_copy_to_time_path} success")

    def get_repo_path(self, job_info):
        cache_path = ExchangeParamParse.get_cache_path(self._param).replace("/", "\\")
        cache_parent = Path(cache_path).parent
        log.info(f"Get cache path {cache_path}, cache parent {cache_parent}")
        last_backup_exist_flag = True
        last_full_copy_info = get_last_copy_info(job_info, self._param, CopyType.DATA_COPY)
        log.info(f"Get last full copy info {last_full_copy_info}")
        return cache_parent, last_backup_exist_flag, last_full_copy_info

    def save_copy_to_time_in_log_task(self, job_info, last_copy_to_time_path):
        log.warn(f"Cur backup type is log, no need to create copy to time")
        new_copy_to_time_info = []
        copy_to_time_info = read_file(last_copy_to_time_path)
        log.info(f"Read copy from time info {copy_to_time_info} from {last_copy_to_time_path} success")
        new_copy_to_time_info.extend(copy_to_time_info)
        last_copy_to_time_info = copy_to_time_info[len(copy_to_time_info) - 1]
        log.info(f"Read last copy from time info {last_copy_to_time_info}")
        cur_copy_to_time_object = {
            "job_id": job_info.job_id,
            "log_start_time": last_copy_to_time_info.get("log_end_time", job_info.min_log_time),
            "log_end_time": job_info.snapshot_time,
            "job_type": self._param.get("job").get("jobParam").get("backupType")
        }
        new_copy_to_time_info.append(cur_copy_to_time_object)
        output_execution_result_ex(last_copy_to_time_path, new_copy_to_time_info)
        log.info(f"Write copy to time info {new_copy_to_time_info} to {last_copy_to_time_path} success")
        return

    def check_consistent(self, db_list, snap_info):
        # 是否一致性校验
        is_consistent = self._param.get("job").get("protectObject").get("extendInfo").get("m_isConsistent")
        log.info(f"need to check snapshot is consistent: {is_consistent}")
        if is_consistent == "true":
            code, err, out = check_vss_snaphot(db_list, snap_info)
            if code != ExecCmdResult.SUCCESS:
                log.error(f"check snapshot failed!! vss result :{out} ,err :{err}")
                raise ExchangeErrCodeException(log_detail=LogDetail(logInfo=ExchangeReportDBLabel.SUB_JOB_FALIED,
                                                                    logDetail=BodyErr.CHECK_SNAPSHOT_FILED.value,
                                                                    logTimestamp=int(time.time()),
                                                                    logLevel=DBLogLevel.ERROR))
            log.info("check snapshot success!")

    def do_log_backup(self, database_guid, log_path, snap_info, database_info):
        # 日志备份逻辑
        input_log_hub = ExchangeParamParse.get_log_path(self._param)
        input_log_hub = os.path.join(input_log_hub, f"MailboxDatabase_{database_guid}").replace("/", "\\")
        init_directory(input_log_hub)
        input_log_hub = ExchangeWindowsPath.UNC + input_log_hub
        # 复制xx.log,xx.chk数据
        log_copy_size, min_create_time, max_create_time, log_to_time = \
            copy_backup_data(input_log_hub, log_path, snap_info, True)
        log_hub = ExchangeParamParse.get_log_path(self._param)
        log_time_info = SqliteInputParam(data_name=database_info.get("name"),
                                         data_type="LogTime",
                                         data_parent_path="/logtime",
                                         extend_info=json.dumps({"logtimes": sorted(log_to_time.values())}))
        ExchangeSqliteService.write_metadata_to_sqlite_file(log_hub, log_time_info)
        log.info(f"Write log times to sqlite file successfully.")
        return log_copy_size, min_create_time, max_create_time

    def copy_exchange_edb_file(self, backup_type, input_data_hub, input_meta_hub, virtual_edb_path):
        input_data_hub = ExchangeWindowsPath.UNC + input_data_hub
        input_meta_hub = ExchangeWindowsPath.UNC + input_meta_hub
        edb_file_name = os.path.basename(virtual_edb_path)
        data_backup_type = self.get_data_backup_type(backup_type, input_meta_hub, input_data_hub, edb_file_name)
        log.info(f"data backup type: {data_backup_type.name}")
        task = BackupTask(BackupConfig(
            backup_type=data_backup_type,
            source_file_full_path=virtual_edb_path.encode(),
            target_file_name=edb_file_name.encode(),
            prev_copy_meta_dir_path=input_meta_hub.encode(),
            output_copy_meta_dir_path=input_meta_hub.encode(),
            output_copy_data_dir_path=input_data_hub.encode()))
        return input_data_hub, task

    def write_exchange_sqlite(self, database_info, database_name_dict, job_info):
        # 写入db_name.meta文件
        backup_type = self._param.get("job").get("jobParam").get("backupType")
        if backup_type == JobBackupType.EXCHANGE_BACKUP_LOG:
            # 日志备份逻辑
            des_path = ExchangeParamParse.get_log_path(self._param).replace("/", "\\")
        else:
            des_path = ExchangeParamParse.get_meta_path(self._param).replace("/", "\\")
        meta_data_path = f'{des_path}\\{database_info["name"]}.meta'
        database_name_dict["name_list"].append(database_info["name"])
        write_file(meta_data_path, json.dumps(database_info))
        # 非DAG组全量备份在子任务中记录sqlite
        protect_object_sub_type = self._param.get('job').get('protectObject').get('subType')
        if protect_object_sub_type != ExchangeType.EXCHANGE_GROUP:
            self.write_sqlite(job_info, database_info["name"])
        self.write_mailbox_to_sqlite(job_info, protect_object_sub_type)

    def upload_copy_info(self, backup_type, job_info):
        if backup_type == JobBackupType.EXCHANGE_BACKUP_LOG:
            # 上报日志副本信息
            self.upload_log_copy(job_info, ExchangeParamParse.get_log_path(self._param))
        else:
            # 上报数据副本信息
            self.upload_copy(job_info)

    def write_mailbox_to_sqlite(self, job_info, protect_object_sub_type):
        # 数据库备份记录邮箱的数据到sqlite
        if protect_object_sub_type == ExchangeType.EXCHANGE_DATABASE:
            try:
                write_sqlite_file(ExchangeParamParse.get_sqlite_meta_path(self._param),
                                  self._param.get("job").get("protectObject").get("name"), job_info)
            except Exception as e:
                log.error(f"write to sqlite failed, err: {e}", exc_info=True)

    def do_snapshot(self, guids, drive_letter, meta_hub, job_id):
        # 写工具
        input_writer_name = "\"" + "Microsoft Exchange Writer" + "\""
        # 快照仓meta路径
        snap_meta_hub = ExchangeWindowsPath.VSS_PATH_Prefix + ExchangeWindowsPath.UNC + meta_hub + "\""
        # 数据库guid

        # 打快照
        vss_exec_param = "{} {} {} {} {} {} {}".format(ParamConstant.VSS_TOOL_PATH,
                                                       ExchangeParamParse.get_job_type(self._param),
                                                       input_writer_name,
                                                       snap_meta_hub, "\"" + ','.join(drive_letter) + "\"",
                                                       "\"" + ','.join(guids) + "\"", job_id)

        for _ in range(AgentConstant.RETRY_TIMES):
            code, out, err = get_command_result(vss_exec_param)
            if code == ExecCmdResult.SUCCESS:
                return code, out, err
            else:
                log.warn(f"Failed create Microsoft Exchange snapshot,\
                 wait for {AgentConstant.RETRY_WAIT_SECONDS} seconds")
                time.sleep(AgentConstant.RETRY_WAIT_SECONDS)
        return code, err, out

    def get_log_rep_meta_hub_path(self, backup_type, meta_hub):
        if backup_type == JobBackupType.EXCHANGE_BACKUP_LOG:
            # 日志备份逻辑
            meta_hub = ExchangeParamParse.get_log_path(self._param)
            agent_id = get_host_sn().split("-")[0]
            meta_hub = meta_hub.replace("/", "\\") + f"\\{agent_id}"
            init_directory(meta_hub)
        return meta_hub

    def get_rep_hub_path(self):
        data_hub = ExchangeParamParse.get_data_path(self._param)
        meta_hub = ExchangeParamParse.get_meta_path(self._param)
        data_hub, meta_hub = init_hub_path(data_hub, meta_hub)
        return data_hub, meta_hub

    def get_node_ip(self):
        node_id = get_host_sn()
        nodes = self._param.get("job").get("protectEnv").get("nodes")
        node_ip = ""
        for node in nodes:
            if node['id'] == node_id:
                node_ip = node['endpoint']
                return node_ip
        return node_ip

    def get_databases_info(self, job_info):
        flags, m_username = get_key_value(f'job_protectEnv_auth_authKey_{job_info.pid}')
        flags, m_password = get_key_value(f'job_protectEnv_auth_authPwd_{job_info.pid}')
        db_list, drive_letter, guids = get_databases(m_username, m_password)
        node_id = get_host_sn()
        nodes = self._param.get("job").get("protectEnv").get("nodes")
        server_name = ""
        for node in nodes:
            if node['id'] == node_id:
                server_name = node["name"]
                break
        # 如果是单库备份，直接用下发的参数组装
        protect_object_sub_type = self._param.get('job').get('protectObject').get('subType')
        log.info(f"backup subtype :{protect_object_sub_type}")
        if protect_object_sub_type == ExchangeType.EXCHANGE_DATABASE:
            db_name = self._param.get("job").get("protectObject").get("name")
            copy_list = get_local_mailbox_database_copy_status(m_username, m_password, db_name, server_name)
            db_list, drive_letter, guids = get_database_path(m_username, m_password, copy_list)
        # 如果是dag备份，且备份被动副本
        dag_backup = self._param.get("job").get("protectObject").get("extendInfo").get("dag_backup")
        log.info(f"backup active or passive :{dag_backup}")
        if protect_object_sub_type == ExchangeType.EXCHANGE_GROUP and dag_backup == "passive":
            log.info(f"get passive copy info start!")
            db_list, drive_letter, guids = get_passive_databases(m_username, m_password, server_name)
        return db_list, drive_letter, guids

    @log_start_end()
    @out_result_with_job_info()
    @progress_notify_with_job_info(ExchangeReportDBLabel.SUB_JOB_SUCCESS, ExchangeReportDBLabel.SUB_JOB_FALIED)
    def do_sub_job_task(self, job_info: ExchangeJobInfo):
        sub_job_name = self._param.get("subJob", {}).get("jobName", "")
        log.info(f"Get sub job name {sub_job_name}")

        try:
            if sub_job_name == ExchangeBackupSubJobName.SUB_EXEC:
                self.backup(job_info)
            elif sub_job_name == ExchangeBackupSubJobName.SUB_UPLOAD:
                self.upload_copy_infos(job_info)
            else:
                raise ExchangeErrCodeException(log_detail=LogDetail(logInfo=ExchangeReportDBLabel.SUB_JOB_FALIED,
                                                                    logInfoParam=[f"{job_info.sub_job_id}"],
                                                                    logDetail=BodyErr.EXCHANGE_AUTH_INFO_ERROR.value,
                                                                    logTimestamp=int(time.time()),
                                                                    logLevel=DBLogLevel.ERROR))
        except ExchangeErrCodeException as error:
            log.error(error, exc_info=True)
            log_detail = error.get_log_detail()
            output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id, progress=self._progress,
                                   logDetail=[log_detail], taskStatus=SubJobStatusEnum.FAILED.value,
                                   dataSize=0).dict(by_alias=True)
            report_job_details_by_rpc(job_info.job_id, job_info.pid, job_info.sub_job_id, output)
            raise error
        return True

    @log_start_end()
    def upload_copy(self, job_info: ExchangeJobInfo):
        version = get_exchange_version()
        log.info(f"this exchange copy version is :{version}")
        copy_info = {
            "extendInfo": {
                "backupTime": int(time.time()),
                "beginTime": None,
                "endTime": None,
                "beginSCN": None,
                "endSCN": None,
                "copyId": job_info.copy_id,
                "version": version
            }
        }

        copy_info["repositories"] = ExchangeParamParse.get_data_reps(self._param)
        copy_info = {"copy": copy_info, "jobId": job_info.job_id}

        log.info(f"Get copy info {copy_info}")
        try:
            invoke_rpc_tool_interface(job_info.job_id, RpcParamKey.REPORT_COPY_INFO, copy_info)
        except Exception as err_info:
            log.error("An exception occurred when invoking the tool for reporting copy information..%s.")
            return False
        return True

    def build_log_backup_copy_info(self, job_info, input_log_hub):
        """
        组装日支副本上报信息
        :return:
        """
        log.info("Start to build_log_backup_copy_info")
        last_copy_info = get_last_copy_info(job_info, self._param, CopyType.DATA_COPY)
        log.info(f"Get last copy info {last_copy_info}")
        cache_path = ExchangeParamParse.get_cache_path(self._param).replace("/", "\\")
        cache_parent = Path(cache_path).parent
        log.info(f"Get cache path {cache_path}, cache parent {cache_parent}")

        last_copy_id = last_copy_info.get("id", "")
        log.info(f"Get last copy id {last_copy_id}")
        copy_to_time_path = os.path.join(cache_parent, f"copy_to_time_{last_copy_id}").replace("/", "\\")
        copy_to_time_infos = read_file(copy_to_time_path)
        log.info(f"Read copy to time info {copy_to_time_infos}")
        copy_to_time_infos.reverse()
        begin_time = ParamConstant.EMPTY_DATA
        
        # 全量备份1-日志备份1-恢复-全量备份2-日志备份2，则日志备份1-全量备份2之间不连续，is_recovered表示是否执行过恢复
        protect_object_sub_type = self._param.get('job').get('protectObject').get('subType')
        file_name = "is_recovered"
        if protect_object_sub_type == ExchangeType.EXCHANGE_DATABASE:
            file_name = "database_is_recovered"
        file_path = os.path.join(ParamConstant.PARAM_FILE_PATH, file_name)
        if os.path.exists(file_path):
            is_recovered = read_file(file_path).get(file_name, False)
            output_execution_result_ex(file_path, {file_name: False})
            if is_recovered:
                copy_to_time_info = copy_to_time_infos[-1]
                begin_time = copy_to_time_info.get("log_end_time", 0)
                log.info(f"Get last data copy info {copy_to_time_info}, begin_time {begin_time}")

        # 开始时间取上一次备份副本结束时间
        for copy_to_time_info in copy_to_time_infos:
            copy_to_time = copy_to_time_info
            job_type = copy_to_time.get("job_type", "")
            if job_type == JobBackupType.EXCHANGE_BACKUP_LOG:
                begin_time = copy_to_time.get("log_end_time", 0)
                log.info(f"Build log copy, Get last log copy info {copy_to_time_info}, begin_time {begin_time}")
                break
                
        if begin_time == ParamConstant.EMPTY_DATA:
            copy_to_time_info = copy_to_time_infos[len(copy_to_time_infos) - 1]
            begin_time = copy_to_time_info.get("log_start_time", 0)
            log.info(f"Build log copy, Get last data copy info {copy_to_time_info}, begin_time {begin_time}")

        # 结束时间取此次快照生成时间
        end_time = job_info.snapshot_time

        # 获取版本信息
        version = get_exchange_version()
        log.info(f"this exchange copy version is :{version}")
        # 开始时间取第一个文件的起始时间， 该起始时间 < 上次日志备份上报结束时间， 取上次日志备份上报结束时间
        # 结束时间取快照时间
        return begin_time, end_time, last_copy_id, version

    @log_start_end()
    def upload_log_copy(self, job_info: ExchangeJobInfo, input_log_hub):
        """
        组装日支副本上报信息
        :return:
        """
        begin_time, end_time, last_copy_id, version = self.build_log_backup_copy_info(job_info, input_log_hub)
        json_copy = {
            "extendInfo": {
                "backupTime": begin_time,
                "beginTime": begin_time,
                "endTime": end_time,
                "beginSCN": None,
                "endSCN": None,
                "backupset_dir": '',
                "backupSetName": "",
                "backupType": "",
                "baseBackupSetName": "",
                "dbName": "",
                "groupId": '',
                "tabal_space_info": [],
                "copyId": job_info.job_id,
                "associatedCopies": [last_copy_id],
                "logDirName": input_log_hub,
                "version": version
            }
        }
        copy_info = {"copy": json_copy, "jobId": job_info.job_id}
        log.info(f"Get copy info {copy_info}")
        try:
            invoke_rpc_tool_interface(job_info.job_id, RpcParamKey.REPORT_COPY_INFO, copy_info)
        except Exception as err_info:
            log.error(f"An exception occurred when invoking the tool for reporting copy information: {err_info}")
            return False
        return True

    @log_start_end()
    def write_sqlite(self, job_info: ExchangeJobInfo, db_name):
        parent_path = ExchangeParamParse.get_sqlite_meta_path(self._param)
        temp_info = SqliteInputParam(data_name=db_name, data_type="MailBoxDatabase",
                                     data_parent_path="/")
        ExchangeSqliteService.write_metadata_to_sqlite_file(parent_path, temp_info)
        return True

    @log_start_end()
    def check_vss_snap_shot(self, job_info: ExchangeJobInfo):
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}IsVSSWriterStable.ps1"
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])
        if result == "True":
            log.info("Vss snap shot open")
            return True
        log.info("Vss snap shot closed")
        return False

    def retry_check_vss_snap_shot(self, job_info: ExchangeJobInfo):
        # 获取Microsoft Exchange Writer容易出现可重试错误，重试3次，状态未恢复上报异常
        exchange_writer_status = False
        for _ in range(AgentConstant.RETRY_TIMES):
            exchange_writer_status = self.check_vss_snap_shot(job_info)
            if exchange_writer_status:
                return exchange_writer_status
            else:
                log.warn(f"Failed getting Microsoft Exchange Writer status,\
                 wait for {AgentConstant.RETRY_WAIT_SECONDS} seconds")
                time.sleep(AgentConstant.RETRY_WAIT_SECONDS)
        return exchange_writer_status

    def write_progress_to_file(self, status, progress, log_lab, progress_type, job_info: ExchangeJobInfo):
        output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id, progress=int(progress),
                               logDetail=list(), taskStatus=status)
        file_path = os.path.join(ExchangeParamParse.get_cache_path(self._param), progress_type)
        output_execution_result_ex(file_path, output.dict(by_alias=True))

    def get_job_info(self):
        job_info = ExchangeJobInfo(pid=self._pid,
                                   job_id=ExchangeParamParse.get_job_id(self._param),
                                   sub_job_id=ExchangeParamParse.get_sub_job_id(self._param),
                                   copy_id=ExchangeParamParse.get_copy_id(self._param),
                                   backup_type=ExchangeParamParse.get_type(self._param),
                                   meta_path=ExchangeParamParse.get_meta_path(self._param),
                                   data_reps=ExchangeParamParse.get_data_reps(self._param),
                                   cache_path=ExchangeParamParse.get_cache_path(self._param),
                                   backup_result=ExchangeParamParse.get_backup_result(self._param))
        return job_info

    def create_report_backup_progress_thread(self, job_info: ExchangeJobInfo):
        """
        功能描述：单独起个后台线程进度上报
        """
        progress_thread = Thread(target=self.report_backup_progress, args=(job_info,))
        progress_thread.setDaemon(True)
        progress_thread.start()

    def report_backup_progress(self, job_info: ExchangeJobInfo):
        while self._progress < 100:
            output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id,
                                   taskStatus=SubJobStatusEnum.RUNNING, progress=self._progress).dict(by_alias=True)
            report_job_details_by_rpc(job_info.job_id, job_info.pid, job_info.sub_job_id, output)
            time.sleep(30)

    def report_backup_create_snapshot_success(self, job_info: ExchangeJobInfo, node_ip):
        # 创建快照成功
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.DATABASE_BACKUP_SNAPSHOT_SUCCESS,
                               logInfoParam=[node_ip],
                               logLevel=DBLogLevel.INFO.value)
        output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id, progress=self._progress,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value,
                               dataSize=0).dict(by_alias=True)
        report_job_details_by_rpc(job_info.job_id, job_info.pid, job_info.sub_job_id, output)

    def report_backup_table_success(self, job_info: ExchangeJobInfo, node_ip, table_info, data_size):
        if table_info["activeCopy"]:
            copy_flag = ExchangeCopyType.ACTIVE
        else:
            copy_flag = ExchangeCopyType.PASSIVE
        # 库备份成功
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.DATABASE_BACKUP_SUCCESS,
                               logInfoParam=[node_ip, table_info["name"], copy_flag],
                               logLevel=DBLogLevel.INFO.value)
        output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id, progress=self._progress,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value,
                               dataSize=data_size).dict(by_alias=True)
        report_job_details_by_rpc(job_info.job_id, job_info.pid, job_info.sub_job_id, output)

    def report_backup_table_progress(self, job_info: ExchangeJobInfo, node_ip, length):
        # 待备份信息
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.DATABASE_BACKUP_NUMBER,
                               logInfoParam=[node_ip, length],
                               logLevel=DBLogLevel.INFO.value)
        output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id, progress=self._progress,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value,
                               dataSize=0).dict(by_alias=True)
        report_job_details_by_rpc(job_info.job_id, job_info.pid, job_info.sub_job_id, output)

    def report_backup_complete_progress(self, job_info: ExchangeJobInfo, data_size):
        # 备份完成
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.SUB_JOB_SUCCESS,
                               logInfoParam=[job_info.sub_job_id],
                               logLevel=DBLogLevel.INFO.value)
        output = SubJobDetails(taskId=job_info.job_id, subTaskId=job_info.sub_job_id, progress=self._progress,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.COMPLETED.value,
                               dataSize=data_size).dict(by_alias=True)
        report_job_details_by_rpc(job_info.job_id, job_info.pid, job_info.sub_job_id, output)
