#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import sys
import shutil
import json
import locale
import subprocess
import shlex
import stat
import time

from exchange import log
from exchange.commons.common import read_temp_file, check_del_dir_or_file, output_action_result, \
    output_result_file, get_key_value
from exchange.commons.const import ExchangeCode, ParamConstant, BodyErr, ExecCmdResult, JobBackupType, LastCopyType
from common.common import execute_cmd, exter_attack, invoke_rpc_tool_interface
from common.const import SysData, SubJobStatusEnum, BackupTypeEnum, RpcParamKey, RepositoryDataTypeEnum
from common.cleaner import clear
from common.common_models import SubJobModel
from exchange.backup.exchange_backup_sqlite import ExchangeSqliteService, SqliteInputParam
from exchange.backup.exchange_backup_base import ExchangeBackupBase

VSS_REG_PARENT = "HKEY_LOCAL_MACHINE"
VSS_REG_PATH = "SYSTEM\\CurrentControlSet\\Services\\MSExchangeIS\\"
VSS_REG_PARAM = "ParametersSystem"
VSS_ON = 1
DAG_NODE = "DAG"
SINGLE_NODE = "Single"
ACTIVE_BACKUP = 0
PASSIVE_BACKUP = 1
MOUNTED_STATUS = "Mounted"


def get_command_result(command):
    tmp_code = locale.getdefaultlocale()[1]
    if "|" in command:
        raise Exception("Special characters exist.")

    process = subprocess.Popen(shlex.split(command), stdin=subprocess.PIPE, stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE, encoding=tmp_code, errors="ignore")
    process.wait()
    code, out, err = str(process.returncode), process.stdout.read(), process.stderr.read()
    return code, out, err


class ExchangeBackupComm(ExchangeBackupBase):
    m_func_type = ""
    m_check_output = {'code': ExchangeCode.FAILED.value, 'bodyErr': 0, 'message': ''}

    def __init__(self, pid, job_id, sub_job_id):
        log.info("init start!")
        self._exchange_object = None
        self._database_type = ""
        self.pid = pid
        self._job_id = job_id
        self._sub_job_id = sub_job_id
        self.m_username = ""
        self.m_password = ""
        self.writer_name = "Microsoft Exchange Writer"
        self.guid = ""
        self.data_hub = ""
        self.log_hub = ""
        self.meta_hub = ""
        self.m_progress = 50
        self.m_progress_status = SubJobStatusEnum.RUNNING.value
        self.job_type = ""
        self._copy_id = ""

        file_path = os.path.join(ParamConstant.PARAM_FILE_PATH, f"param{self.pid}")
        json_param = read_temp_file(file_path)
        check_del_dir_or_file(file_path)
        self._json_param = json_param

        super().__init__(self.pid, self._job_id, self._sub_job_id, self._json_param)

    # 备份鉴权
    @exter_attack
    def allow_backup_in_local_node(self):
        log.info("allow_backup_in_local_node start!")
        db_name = self._json_param.get("job").get("protectObject").get("name")

        ret = self.check_exchange_db_exist(db_name)
        if not ret:
            log.error("allow_backup_in_local_node FAILED!")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            return False
        ret = self.check_db_load_status(db_name)
        if not ret:
            log.error("allow_backup_in_local_node FAILED!")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            return False
        log.info("allow_backup_in_local_node SUCCESS!")
        output_action_result(self.pid, ExchangeCode.SUCCESS.value, 0, "")
        return True

    @exter_attack
    def check_backup_job_type(self):
        log.info("check_backup_job_type!")
        db_name = self._json_param.get("job").get("protectObject").get("name")

        self.init_data_meta_log_path()

        ret = self.get_db_guid(db_name)
        if not ret:
            log.error("backup_prerequisite FAILED! get_db_guid!")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            return False

        ret = self.check_can_backup_log()
        if not ret:
            log.error("backup_prerequisite FAILED! check_can_backup_inc!")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_INC_TO_FULL.value, "")
            return False

        output_action_result(self.pid, ExchangeCode.SUCCESS.value, 0, "")
        log.info("check_backup_job_type SUCCESSED")
        return True

    def init_data_meta_log_path(self):
        log.info("Reading Data Hub")
        data_hub_volume = self._json_param.get("job").get("repositories")[1].get("path")[0]
        data_hub_path = self._json_param.get("job").get("repositories")[1].get("remotePath")
        log.info("Reading Meta Hub")
        meta_hub_volume = self._json_param.get("job").get("repositories")[0].get("path")[0]
        meta_hub_path = self._json_param.get("job").get("repositories")[0].get("remotePath")
        log.info("Reading Log Hub")
        log_hub_volume, log_hub_path = self.get_log_volume_and_path(self._json_param)
        self.data_hub = data_hub_volume + data_hub_path
        self.meta_hub = meta_hub_volume + meta_hub_path
        self.log_hub = log_hub_volume + log_hub_path

    def get_log_volume_and_path(self, param: dict):
        log_volume = ''
        log_path = ''
        paths = param.get("job", {}).get("repositories", [{}])
        for path in paths:
            if path.get("repositoryType", -1) == RepositoryDataTypeEnum.LOG_REPOSITORY:
                log_volume = path.get("path", [""])[0]
                log_path = path.get("remotePath", "")
        return log_volume, log_path

    # 前置任务
    @exter_attack
    def backup_prerequisite(self):
        log.info("backup_prerequisite start!")
        self.m_progress = 50
        self.m_progress_status = SubJobStatusEnum.RUNNING.value
        db_name = self._json_param.get("job").get("protectObject").get("name")

        self.init_data_meta_log_path()

        ret = self.get_db_guid(db_name)
        if not ret:
            log.error("backup_prerequisite FAILED! get_db_guid!")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            return False

        ret = self.check_db_load_status(db_name)
        if not ret:
            log.error("backup_prerequisite FAILED! check_db_load_status!")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            return False

        ret = self.check_vss_snap_shot()
        if not ret:
            log.error("backup_prerequisite FAILED! check_vss_snap_shot")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            return False

        ret = self.init_cache_and_data_directory(self.data_hub, self.meta_hub)
        if not ret:
            log.error("backup_prerequisite FAILED! init_cache_and_data_directory")
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            return False

        output_action_result(self.pid, ExchangeCode.SUCCESS.value, 0, "")
        log.info("FINISH output_action_result!!!")
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, 100, "", "common")
        log.info("backup_prerequisite SUCCESS!")
        return True

    # 生成子任务
    @exter_attack
    def backup_gen_sub_job(self):
        log.info("backup_gen_sub_job start!")
        node_name = self._json_param.get("job").get("protectObject").get("extendInfo").get("server_name")
        # 和PM确认字段
        backup_type = self._json_param.get("job", {}).get("protectObject", {}).get("extendInfo", {}).get(
            "next_backup_type")
        response = []

        node_type = self.check_node_type(node_name)
        # 临时，后续加
        if node_type == SINGLE_NODE or node_type == DAG_NODE:
            node_id = self.get_server_id(node_name)
            response.append(
                SubJobModel(jobId=self._job_id, subJobId=self._sub_job_id, jobType=2, jobName="ExchangeFullBackup",
                            execNodeId=node_id).dict(by_alias=True))
        output_result_file(self.pid, response)
        log.info("backup_gen_sub_job SUCCESS!")
        return True

    # 执行备份
    @exter_attack
    def backup(self):
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, 5, "", "common")
        try:
            log.info("Backup Start!")
            self.m_progress = 50
            self.m_progress_status = SubJobStatusEnum.RUNNING.value
            path_list = self.get_database_and_log_volume()

            # 重新获取GUID，后续优化
            log.info("init_cache_and_data_directory start!")
            username = self.m_username.replace("\\", "\\\\")
            db_name = self._json_param.get("job").get("protectObject").get("name")
            cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}GetDatabaseGuid.ps1 \
                {username} {self.m_password} \\\"{db_name}\\\""
            ret, std_out, std_err = execute_cmd(cmds)
            result = '\n'.join(std_out.splitlines()[-1:])

            self.init_data_meta_log_path()
            self.set_job_type()
            if self.job_type == "Backup":
                final_target_path = self.data_hub + "\\MailboxDatabase_" + self.guid
            else:
                final_target_path = self.log_hub + "\\MailboxDatabase_" + self.guid
            final_target_path = final_target_path.replace("/", "\\")
            final_meta_path = self.meta_hub + "\\MailboxDatabase_" + self.guid
            final_meta_path = final_meta_path.replace("/", "\\")

            input_path1 = "\"" + path_list[0] + "\""
            input_path2 = "\"" + path_list[1] + "\""
            input_writer_name = "\"" + self.writer_name + "\""
            input_guid = "\"" + self.guid + "\""
            input_data_hub = "\"" + final_target_path + "\""
            input_meta_hub = "\"" + final_meta_path + "\""
            if self.set_job_type():
                log.info(self.job_type)
                vss_exec_param = "{} {} {} {} {} {} {} {}".format(ParamConstant.VSS_TOOL_PATH, self.job_type,
                                                                  input_path1, input_path2,
                                                                  input_writer_name, input_guid, input_data_hub,
                                                                  input_meta_hub)
            else:
                log.error("Backup Type Error!")
                output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
                self.write_progress_to_file(SubJobStatusEnum.FAILED.value, 10, "", "common")
                return False

            log.info("Start VSS Operation")
            code, out, err = get_command_result(vss_exec_param)
            if code != ExecCmdResult.SUCCESS:
                output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
                self.write_progress_to_file(SubJobStatusEnum.FAILED.value, 10, "", "common")
                return False
            # 要做：写入meta.info
            # 要做：写入sqlite
            if self.job_type == "Log":
                self.report_copy_info_binlog()
            elif self.job_type == "Backup":
                self.report_copy_info()
            output_action_result(self.pid, ExchangeCode.SUCCESS.value, 0, "")
            self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, 100, "", "common")
            log.info("Backup SUCCESS!")

        except Exception as error:
            log.error(error, exc_info=True)
            output_action_result(self.pid, ExchangeCode.FAILED.value, BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value, "")
            self.write_progress_to_file(SubJobStatusEnum.FAILED.value, 10, "", "common")
            return False
        return True

    def build_log_backup_copy_info(self):
        """
        组装日支副本上报信息
        :return:
        """
        log.info("Start to build_log_backup_copy_info")
        last_copy_info = self.get_last_copy_info()
        self._copy_id = self._json_param.get("job", {}).get("copy", [])[0].get("id", "")

        end_time = int((time.time()))
        out_put_info = {
            "extendInfo": {
                "backupTime": last_copy_info.get("timestamp", ""),
                "beginTime": last_copy_info.get("timestamp", ""),
                "endTime": end_time,
                "beginSCN": None,
                "endSCN": None,
                "backupset_dir": '',
                "backupSetName": "",
                "backupType": "",
                "baseBackupSetName": "",
                "dbName": "",
                "groupId": '',
                "tabal_space_info": [],
                "copyId": self._job_id,
                "associatedCopies": [self._copy_id],
                "logDirName": self.log_hub,
            }
        }
        return out_put_info

    def report_copy_info(self):
        """
        组装日支副本上报信息
        :return:
        """
        json_copy = {
            "extendInfo": {
                "copy_id": self._copy_id, "backup_time": int((time.time())),
            }
        }
        copy_info = {"copy": json_copy, "jobId": self._job_id}
        try:
            invoke_rpc_tool_interface(self._job_id, RpcParamKey.REPORT_COPY_INFO, copy_info)
        except Exception as err_info:
            log.error(f"Report copy info fail.err: {err_info},{self.get_log_comm()}")
            return False
        return True

    def get_log_comm(self):
        return f"pid:{self.pid} jobId:{self._job_id} subjobId:{self._sub_job_id}"

    def report_copy_info_binlog(self):
        log.info(f"Start to report_copy_info_binlog.")
        json_copy = self.build_log_backup_copy_info()
        copy_info = {"copy": json_copy, "jobId": self._job_id}
        try:
            invoke_rpc_tool_interface(self._job_id, RpcParamKey.REPORT_COPY_INFO, copy_info)
        except Exception as err_info:
            log.error(f"Report copy info fail.err: {err_info},{self.get_log_comm()}")
            return False
        return True

    def get_last_copy_info(self):
        # 获取上次数据备份副本信息
        log.info("start get_last_copy_info")
        last_copy_type = LastCopyType.last_copy_type_dict.get(4)
        input_param = {
            RpcParamKey.APPLICATION: self._json_param.get("job", {}).get("protectObject"),
            RpcParamKey.TYPES: last_copy_type,
            RpcParamKey.COPY_ID: "",
            RpcParamKey.JOB_ID: self._job_id
        }
        try:
            result = invoke_rpc_tool_interface(self._job_id, RpcParamKey.QUERY_PREVIOUS_CPOY, input_param)
        except Exception as err_info:
            log.error(f"Get last copy info fail.{err_info}")
            return {}
        return result

    # 后置任务
    @exter_attack
    def backup_post_job(self):
        log.info("backup_post_job Start!")

        if (os.path.exists("C:\\BackupTempFile")):
            shutil.rmtree("C:\\BackupTempFile")

        output_action_result(self.pid, ExchangeCode.SUCCESS.value, 0, "")
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, 100, "", "common")
        log.info("backup_post_job SUCCESS!")
        return True

    def set_job_type(self):
        ret = self._json_param.get("job").get("jobParam").get("backupType")
        if ret == JobBackupType.EXCHANGE_BACKUP_FULL:
            self.job_type = "Backup"
            return True
        elif ret == JobBackupType.EXCHANGE_BACKUP_LOG:
            if self.check_circular_logging():
                self.job_type = "Log"
                return True
            else:
                log.error("Set job type Failed! check_circular_logging Failed!")
                return False
        else:
            log.error("Set job type Failed! JobBackupType Error!")
            return False

    # 检查是否需要日志转全量
    def check_can_backup_log(self):
        log.info("check_can_backup_inc start!")
        try:
            last_copy_info = self.get_last_copy_info()
            if last_copy_info:
                log.info("can find last copy")
                return True
            return False
        except Exception as error:
            log.error("check_can_backup_inc FAILED!")
            return False

    def get_code(self, input_ret):
        if input_ret:
            return ExchangeCode.SUCCESS.value
        else:
            return ExchangeCode.FAILED.value

    def format_body_err(self, input_ret):
        temp_body_err = 0
        if not input_ret:
            temp_body_err = BodyErr.ERR_PLUGIN_CANNOT_BACKUP.value
        return temp_body_err

    def check_exchange_db_exist(self, db_name):
        log.info("check_exchange_db_exist start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}IsDatabaseExisted.ps1 \
            {username} {self.m_password} \\\"{db_name}\\\""
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])
        if result == "True":
            return True
        return False

    def check_db_load_status(self, db_name):
        log.info("check_db_load_status start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}GetDatabaseStatus.ps1 \
            {username} {self.m_password} \\\"{db_name}\\\""
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])
        if result == "Healthy" or result == "Mounted":
            return True
        return False

    def check_vss_snap_shot(self):
        log.info("check_VSS_snap_shot start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}IsVSSWriterStable.ps1"
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])

        if result == "True":
            return True
        return False

    def get_db_guid(self, db_name):
        log.info("get_db_guid start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}GetDatabaseGuid.ps1 \
            {username} {self.m_password} \\\"{db_name}\\\""
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])
        if result == "False":
            log.error("get_db_guid Failed!")
            return False
        self.guid = result
        log.info("get_db_guid SUCCESS!")
        return True

    def init_cache_and_data_directory(self, data_hub_path, meta_hub_path):
        log.info("init_cache_and_data_directory start!")
        path_list = [data_hub_path, meta_hub_path]
        folder_name = "/MailboxDatabase_" + self.guid + self._job_id
        pre_str = "\\\\?\\"
        for path in path_list:
            final_path = path + folder_name
            final_path = final_path.replace("/", "\\")
            try:
                if not os.path.exists(final_path):
                    os.makedirs(pre_str + final_path)
            except Exception as error:
                log.error(error, exc_info=True)
                log.info("init_cache_and_data_directory FAILED!")
                return False
            else:
                continue
        log.info("init cache SUCCESS!")
        return True

    @exter_attack
    def query_job_permission(self):
        output_action_result(self.pid, ExchangeCode.SUCCESS.value, 0, "")
        return True

    def check_node_type(self, node_name):
        log.info("check_node_type start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}CheckNodeType.ps1 \
            {username} {self.m_password} {node_name}"
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])

        return result

    def backup_type_is_active(self, backup_type):
        log.info("backup_type_is_active start!")
        if backup_type == ACTIVE_BACKUP:
            return ACTIVE_BACKUP
        return PASSIVE_BACKUP

    def get_server_id(self, server_name):
        log.info("get_server_id start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}GetServerGuid.ps1 \
            {username} {self.m_password} {server_name}"
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])
        return result

    def get_database_and_log_volume(self):
        log.info("get_database_and_log_volume start!")
        edb_path = self._json_param.get("job").get("protectObject").get("extendInfo").get("edb_path")
        log_path = self._json_param.get("job").get("protectObject").get("extendInfo").get("log_path")
        path_list = [edb_path[0], log_path[0]]
        return path_list

    def write_db_file_and_log_directories_to_metainfo(self, db_name, parent_dir, meta_info_dir, backup_type):
        log.info("write_db_file_and_log_directories_to_metainfo start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}GetDatabaseDetail.ps1 \
            {username} {self.m_password} \\\"{db_name}\\\""
        ret, std_out, std_err = execute_cmd(cmds)
        db_detail = '\n'.join(std_out.splitlines()[-1:])
        if db_detail == "False":
            return False

        json.loads(db_detail)
        target_meta_info = {
            "HostName": db_detail.get("Server"),
            "Guid": db_detail.get("Guid"),
            "EdbFilePath": db_detail.get("EdbFilePath"),
            "LogFolderPath": db_detail.get("LogFolderPath"),
            "LogFilePrefix": db_detail.get("LogFilePrefix"),
            "Name": db_detail.get("AdminDisplayName"),
            "ExchangeVersion": db_detail.get("ExchangeVersion")
        }

        flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
        modes = stat.S_IWUSR | stat.S_IRUSR
        with os.fdopen(os.open(os.path.join(meta_info_dir, "meta.info"), \
                               flags, modes), 'w') as meta_info:
            meta_info.write(json.dumps(target_meta_info))
            meta_info.close()

        last_backup_map = {
            BackupTypeEnum.FULL_BACKUP: "LastFullBackup",
            BackupTypeEnum.INCRE_BACKUP: "LastIncrementalBackup",
            BackupTypeEnum.DIFF_BACKUP: "LastDifferentialBackup",
        }

        target_backup_copy_meta = {
            "BackupType": backup_type,
            "lastBackupTime": db_detail.get(last_backup_map.get(backup_type))
        }
        with os.fdopen(os.open(os.path.join(parent_dir, "backup-copy-meta.json"), \
                               flags, modes), 'w') as backup_copy_meta:
            backup_copy_meta.write(target_backup_copy_meta)
            backup_copy_meta.close()
        return True

    def write_sqlite(self, parent_path, db_name):
        log.info("write_sqlite start!")
        username = self.m_username.replace("\\", "\\\\")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}GetMailboxDetailByDatabase.ps1 \
            {username} {self.m_password} \\\"{db_name}\\\""
        ret, std_out, std_err = execute_cmd(cmds)
        sqlite_items = '\n'.join(std_out.splitlines()[-1:])
        result = json.loads(sqlite_items)
        # 输入server
        temp_info = SqliteInputParam(data_name=result[0].get("ServerName"), data_type="Server", data_parent_path="/")
        ExchangeSqliteService.write_metadata_to_sqlite_file(parent_path, temp_info)
        # 输入database
        temp_info = SqliteInputParam(data_name=result[0].get("Database"), data_type="MailBoxDatabase",
                                     data_parent_path="/" + result[0].get("ServerName"))
        ExchangeSqliteService.write_metadata_to_sqlite_file(parent_path, temp_info)
        # 输入mailbox
        for item in result:
            temp_info = SqliteInputParam(data_name=item.get("Name"), data_type=item.get("RecipientType"),
                                         data_parent_path="/" + item.get("ServerName") + "/" + item.get("Database"))
            ExchangeSqliteService.write_metadata_to_sqlite_file(parent_path, temp_info)
        return True

    @exter_attack
    def progress_comm(self):
        log.info("progress_com Start!!!")
        self.report_progress_comm()
        log.info("progress_com Finished!!!")
        return True

    def check_circular_logging(self):
        log.info("check circular logging enabled!")
        username = self.m_username.replace("\\", "\\\\")
        db_name = self._json_param.get("job").get("protectObject").get("name")
        cmds = f"{ParamConstant.PS_LOC} {ParamConstant.PS1_SCRIPTS_PATH}GetCircularLoggingEnabled.ps1 \
            {username} {self.m_password} \\\"{db_name}\\\""
        log.info(cmds)
        ret, std_out, std_err = execute_cmd(cmds)
        result = '\n'.join(std_out.splitlines()[-1:])
        if result == "True":
            return True
        return False


def do_work(args_list):
    log.info("do_work start!")
    cmd = args_list[0]
    pid = args_list[1]
    job_id = ""
    sub_job_id = ""
    if len(args_list) >= 3:
        job_id = args_list[2]
    if len(args_list) == 4:
        sub_job_id = args_list[3]
    backup_comm_inst = ExchangeBackupComm(pid, job_id, sub_job_id)

    flags, backup_comm_inst.m_username = get_key_value(f'job_protectEnv_auth_authKey_{pid}')
    if not flags:
        log.error("get username FAILED")
        return False
    flags, backup_comm_inst.m_password = get_key_value(f'job_protectEnv_auth_authPwd_{pid}')
    if not flags:
        log.error("get user password FAILED")
        return False

    cmd_dict = {
        "AllowBackupInLocalNode": [backup_comm_inst.allow_backup_in_local_node],
        "BackupPrerequisite": [backup_comm_inst.backup_prerequisite],
        "Backup": [backup_comm_inst.backup],
        "BackupPostJob": [backup_comm_inst.backup_post_job],
        "QueryJobPermission": [backup_comm_inst.query_job_permission],
        "progress_comm": [backup_comm_inst.progress_comm],
        "BackupGenSubJob": [backup_comm_inst.backup_gen_sub_job],
        "CheckBackupJobType": [backup_comm_inst.check_backup_job_type]
    }
    func_array = cmd_dict.get(cmd)
    if not func_array:
        output_action_result(pid, ExchangeCode.FAILED.value, BodyErr.ERROR_INTERNAL.value,
                             f"Cmd Param error.")
        return False
    try:
        ret = func_array[0]()
    except Exception as err:
        log.error(f"Exec cmd failed as err: {str(err)} pid: {pid} jobId: {job_id}")
        return False
    finally:
        clear(SysData.SYS_STDIN)
    return ret


if __name__ == "__main__":
    log.info("Running main....")
    for line in sys.stdin:
        SysData.SYS_STDIN = line
        break

    args = sys.argv[1:]
    if len(args) < 2:
        log.error("Not enough parameters, param cnt: %s.", len(args))
        sys.exit(1)
    if not do_work(args):
        sys.exit(0)
