#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

# coding=utf-8
import json
import os
import re
import shutil
import time
import uuid

from common.common import execute_cmd, check_command_injection
from common.common_models import SubJobDetails, CopyInfoRepModel, Copy, LogDetail, RepositoryPath, ScanRepositories
from common.const import SubJobStatusEnum, RepositoryDataTypeEnum, AuthType, BackupTypeEnum, \
    CopyDataTypeEnum, DBLogLevel
from common.parse_parafile import ParamFileUtil
from common.util.scanner_utils import scan_dir_size
from sqlserver import log
from sqlserver.commons.common import get_env_variable, output_execution_result_ex, output_tmp_info, \
    check_clean_dir, check_del_dir_or_file
from sqlserver.commons.const import ExecCmdResult, SQLServerJsonConstant, ParamConstant, SQLTmpFileName
from sqlserver.commons.const import SQLServerProgressFileType, SQLServerStrConstant, \
    VDIActionType, SqlServerReportLabel, BodyErr
from sqlserver.commons.sqlserver_db_model import SqlServerIndex
from sqlserver.commons.sqlserver_db_sqlite import SqlServerIndexFile
from sqlserver.commons.sqlserver_utils import SqlServerUtils


class SQLServerBase(object):

    def __init__(self, p_id, job_id, sub_job_id, json_param):
        self.backup_type = ParamFileUtil.parse_backup_type(json_param.get("job").get("jobParam"))
        self._job_id = job_id
        self._sub_job_id = sub_job_id
        self._p_id = p_id
        self._json_param = json_param
        self._cache_path = ""
        self._data_path = ""
        self._log_path = ""
        self._meta_path = ""
        self._report_progress_thread_start = True
        self._last_progress = 5
        self.data_rep = dict()
        self.meta_rep = dict()
        self._restore_numbers = 0
        self.auth_mode = str(AuthType.NO_AUTO.value)
        self.user_name = ''
        self.user_info = ""  # 鉴权信息
        self.pass_prefix = ""
        self.sql_utils = None
        self.vdi_type = VDIActionType.FULL_BACKUP.value
        self.vdi_info = ""
        self.instance_name = ""
        self.error_logtail = LogDetail(logInfo=SqlServerReportLabel.SUB_JOB_FAILED_LABEL,
                                       logDetail=BodyErr.ERROR_INTERNAL.value,
                                       logInfoParam=[sub_job_id], logLevel=DBLogLevel.ERROR.value)
        self.success_logtail = LogDetail(logInfo=SqlServerReportLabel.BACKUP_SUB_JOB_SUCCESS_LABEL,
                                         logInfoParam=[sub_job_id], logLevel=DBLogLevel.INFO.value)
        self.running_tail = LogDetail(logInfo=SqlServerReportLabel.BACKUP_SUB_JOB_RUNNING_LABEL,
                                      logInfoParam=[sub_job_id], logLevel=DBLogLevel.INFO.value)

    @staticmethod
    def read_temp_file(file_path):
        """
        解析临时文件
        :return:
        """
        if not os.path.isfile(file_path):
            raise Exception(f"File:{file_path} not exist")
        try:
            with open(file_path, "r", encoding='UTF-8') as f:
                json_dict = json.loads(f.read())
        except Exception as e:
            raise Exception("parse param file failed") from e
        return json_dict

    @staticmethod
    def write_context_to_tmp_file(context, tmp_file_path):
        """
        写临时文件
        :param tmp_file_path: tmp_file_path
        :param context: context
        :return:
        """
        if os.path.exists(tmp_file_path):
            check_del_dir_or_file(tmp_file_path)
        output_tmp_info(tmp_file_path, context)

    @staticmethod
    def get_path_time(path):
        return os.path.getmtime(path)

    @staticmethod
    def single_space_using_regex(text):
        return re.sub(r'\s+', ' ', text)

    def prepare_user_infos(self):
        pass

    def get_hostname(self):
        net_name = self._json_param.get("job", {}).get("protectObject", {}) \
            .get("extendInfo", {}).get("networkName", "")
        if net_name:
            return net_name
        cmd = 'hostname'
        ret, std_out, std_err = execute_cmd(cmd)
        if int(ret) != 0:
            return ""
        return std_out.strip()

    def get_latest_data_copy_id(self):
        # 日志备份获取latestDataCopyId
        log_rep = ParamFileUtil.get_rep_info(self._json_param.get("job", {}),
                                             RepositoryDataTypeEnum.LOG_REPOSITORY.value)
        return log_rep.get("extendInfo", {}).get("logBackup", {}).get("latestDataCopyId")

    def check_last_backup(self):
        # 检查全量副本ID和上一次日志备份结果
        self.get_meta_path()
        status_file = os.path.realpath(os.path.join(self._meta_path,
                                                    SQLServerProgressFileType.LAST_LOG_BACKUP_STATUS))
        if not os.path.exists(status_file):
            log.info("Status file not exist")
            return
        full_copy_id = self.get_latest_data_copy_id()
        status_info = self.read_temp_file(status_file)
        full_copy_list = status_info.get("fullCopy", [])
        # 下发的copyID是最新的，且未做过日志备份
        if full_copy_id not in full_copy_list:
            log.info("Latest copy id not in list, support backup")
            return
        # 下发的copyID不是最新的
        elif full_copy_id in full_copy_list and full_copy_id != full_copy_list[-1]:
            self.error_logtail.log_detail = BodyErr.BACKUP_FAIL_FOR_LAST_LOG_FAIL.value
            log.error(f"Need latest full copy id: {full_copy_list[-1]}")
            raise Exception("Need latest full copy id")
        elif full_copy_id == full_copy_list[-1]:
            # 检查上一次日志备份结果
            if status_info.get("result") is True:
                return
            self.error_logtail.log_detail = BodyErr.BACKUP_FAIL_FOR_LAST_LOG_FAIL.value
            log.error("Last log backup failed, need full backup")
            raise Exception('Last log backup failed, need full backup')

    def update_status_file(self, ret):
        # 日志备份记录全量副本ID，更新标记
        if self.backup_type != BackupTypeEnum.LOG_BACKUP.value:
            return
        full_id = self.get_latest_data_copy_id()
        meta_dir = os.path.abspath(os.path.dirname(os.path.realpath(self._meta_path)))
        status_file = os.path.realpath(os.path.join(meta_dir, SQLServerProgressFileType.LAST_LOG_BACKUP_STATUS))
        if not os.path.exists(status_file):
            data = {"fullCopy": [full_id], "result": ret}
        else:
            data = self.read_temp_file(status_file)
            full_list = data.get("fullCopy", [])
            if full_id not in full_list:
                full_list.append(full_id)
            data["result"] = ret
        output_tmp_info(status_file, json.dumps(data))

    def check_and_prepare_user_info(self, instance_name):
        if not instance_name or check_command_injection(instance_name):
            log.error("Failed to get instance name")
            raise Exception("Failed to get instance name")
        localhost = self.get_hostname()
        if instance_name.upper() == SQLServerStrConstant.SQLSERVER_DEFAULT_SERVICES.upper():
            instance = localhost
        else:
            instance = f'\"{localhost}\\\\{instance_name}\"'
        if self.auth_mode == str(AuthType.APP_PASSWORD.value):
            if not self.user_name and not get_env_variable(self.pass_prefix):
                log.error("Failed to get user info or user pass")
                raise Exception("Failed to get user info or user pass")
            self.user_info = f"-S {instance} -U {self.user_name}"
            self.vdi_info = self.user_info
        elif self.auth_mode == str(AuthType.OS_PASSWORD.value):
            self.user_info = f"-S {instance}"
            self.vdi_info = f"-S {instance} -E"
        else:
            log.error(f"Auth mode: {self.auth_mode} is not support")
            raise Exception("Auth mode not support")
        self.sql_utils = SqlServerUtils(self.auth_mode, self.user_info, self.pass_prefix, self._job_id)

    def convert_vdi_type(self):
        if self.backup_type == BackupTypeEnum.FULL_BACKUP.value:
            # 全量备份
            self.vdi_type = VDIActionType.FULL_BACKUP.value
        elif self.backup_type == BackupTypeEnum.DIFF_BACKUP.value:
            # 差异备份
            self.vdi_type = VDIActionType.DIFF_BACKUP.value
        elif self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            # 日志备份
            self.vdi_type = VDIActionType.LOG_BACKUP.value
        else:
            raise Exception(f"Backup type: {self.backup_type} not support.")

    def get_cache_path(self):
        """
        从参数中获取cache仓的位置
        :return:
        """
        if self._cache_path:
            return
        if not self._json_param:
            return
        job_json = self._json_param.get(SQLServerJsonConstant.JOB, {})
        if job_json:
            repositories_json = job_json.get(SQLServerJsonConstant.REPOSITORIES, [])
        else:
            repositories_json = self._json_param.get(SQLServerJsonConstant.REPOSITORIES, [])
        repositories_num = len(repositories_json)
        for i in range(repositories_num):
            if repositories_json[i].get(SQLServerJsonConstant.REPOSITORY_TYPE, "") \
                    == RepositoryDataTypeEnum.CACHE_REPOSITORY.value:
                self._cache_path = repositories_json[i].get(SQLServerJsonConstant.PATH, [""])[0]
                if self._cache_path.endswith(":"):
                    self._cache_path += "\\"
                break

    def get_data_path(self):
        """
        从参数中获取数据仓的位置
        :return:
        """
        if self._data_path:
            return
        if not self._json_param:
            return
        job_json = self._json_param.get(SQLServerJsonConstant.JOB, {})
        if job_json:
            repositories_json = job_json.get(SQLServerJsonConstant.REPOSITORIES, [])
        else:
            repositories_json = self._json_param.get(SQLServerJsonConstant.REPOSITORIES, [])
        repositories_num = len(repositories_json)
        for i in range(repositories_num):
            if repositories_json[i].get(SQLServerJsonConstant.REPOSITORY_TYPE, "") \
                    == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                self._data_path = repositories_json[i].get(SQLServerJsonConstant.PATH, [""])[0]
                if self._data_path.endswith(":"):
                    self._data_path += "\\"
                break

    def get_log_path(self):
        """
        从参数中获取日志仓路径
        :return:
        """
        if self._log_path:
            return
        if not self._json_param:
            return
        job_json = self._json_param.get(SQLServerJsonConstant.JOB, {})
        repositories_json = job_json.get(SQLServerJsonConstant.REPOSITORIES, [])
        for repository in repositories_json:
            if repository.get(SQLServerJsonConstant.REPOSITORY_TYPE, "") \
                    == RepositoryDataTypeEnum.LOG_REPOSITORY.value:
                self._log_path = repository.get(SQLServerJsonConstant.PATH, [""])[0]
                if self._log_path.endswith(":"):
                    self._log_path += "\\"
                break

    def get_meta_path(self):
        """
        从参数中获取元数据仓路径
        :return:
        """
        if self._meta_path:
            return
        if not self._json_param:
            return
        job_json = self._json_param.get(SQLServerJsonConstant.JOB, {})
        repositories_json = job_json.get(SQLServerJsonConstant.REPOSITORIES, [{}])
        for repository in repositories_json:
            if repository.get(SQLServerJsonConstant.REPOSITORY_TYPE, "") == \
                    RepositoryDataTypeEnum.META_REPOSITORY.value:
                self._meta_path = os.path.join(repository.get(SQLServerJsonConstant.PATH, [""])[0],
                                               job_json.get(SQLServerJsonConstant.ID, ""))
                if self._meta_path.endswith(":"):
                    self._meta_path += "\\"
                break

    def prepare_path_param(self, delete_data=True, create_data_path=True):
        self.get_cache_path()
        if not self._cache_path:
            log.error(f"Failed to get cache path.")
            return False
        if self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            self.get_log_path()
            self.get_meta_path()
            if not self._log_path or not self._meta_path:
                log.error(f"Failed to get path info.")
                return False
            # 日志备份不需要再创建目录
            self._data_path = self._log_path
            self._meta_path = os.path.join(self._meta_path, self._job_id)
        else:
            self.get_data_path()
            if not self._data_path:
                log.error(f"Failed to get data path.")
                return False
            self.get_meta_path()
            if not self._meta_path:
                log.error(f"Failed to get meta path.")
                return False
            if self.backup_type == BackupTypeEnum.FULL_BACKUP.value:
                path_prefix = "full"
            else:
                path_prefix = "diff"
            self._data_path = os.path.join(self._data_path, f"{path_prefix}_{self._job_id}")
            self._meta_path = os.path.join(self._meta_path, f"{path_prefix}_{self._job_id}")
        if delete_data and os.path.exists(self._data_path):
            # 支持续作，备份路径已存在先删除
            try:
                shutil.rmtree(self._data_path, ignore_errors=False)
            except Exception as err:
                log.error(f"Delete data path failed: {err}!")
                return False
        if create_data_path:
            os.mkdir(self._data_path)
        return True

    def output_other_result(self, json_str):
        """
        将json_str写入到结果文件,供框架读取
        :return:
        """
        file_path = os.path.join(ParamConstant.RESULT_PATH, f"result{self._p_id}")
        output_execution_result_ex(file_path, json_str)

    def check_dir_is_exist(self, database_list):
        log.info("Enter check_dir_is_exist")
        self.get_data_path()
        full_copy_dirs = list()
        for path in os.listdir(self._data_path):
            if path.startswith("full"):
                full_copy_dirs.append(os.path.join(self._data_path, path))
        if not full_copy_dirs:
            log.error('Full copy is empty')
            return False
        dirs = sorted(full_copy_dirs, key=self.get_path_time)
        full_copy_file = os.listdir(dirs[-1])
        for database in database_list:
            if f'{database}.bak' not in full_copy_file:
                log.error(f"Database: {database} copy not exist")
                return False
        log.info("Full copy exist")
        result = self.check_full_copy(database_list)
        return result

    def check_full_copy(self, database_list: list):
        # 检查OP上最新的全量副本是否是数据库最新的全量副本
        self.get_cache_path()
        last_full_copy = self.get_previous_copy_info(CopyDataTypeEnum.FULL_COPY.value)
        log.info(f'The latest full copy is : {last_full_copy.get("id")}')
        meta_info = last_full_copy.get("extendInfo", {}).get("meta_info", {})
        for key, value in meta_info.items():
            if key == "master":
                continue
            if database_list and key not in database_list:
                continue
            current_meta_info = self.sql_utils.get_latest_backup_meta_info(key, VDIActionType.FULL_BACKUP.value)
            if value.get("first_lsn") != current_meta_info.get("first_lsn") or value.get(
                    "last_lsn") != current_meta_info.get("last_lsn"):
                log.info(f'first lsn is: {value.get("first_lsn")}, '
                         f'latest full copy lsn is :{current_meta_info.get("first_lsn")}')
                log.info(f'copy :{last_full_copy.get("id")} is not latest in backup set')
                log_detail = LogDetail(logInfo=SqlServerReportLabel.SWITCH_TO_FULL_BACKUP_LABEL,
                                       logInfoParam=[last_full_copy.get("id")], logLevel=DBLogLevel.WARN.value)
                sub_job_details = SubJobDetails(taskId=self._job_id,
                                                progress=0, logDetail=[log_detail],
                                                taskStatus=SubJobStatusEnum.RUNNING.value)
                SqlServerUtils.report_job_details(self._job_id, sub_job_details.dict(by_alias=True))
                return False
        log.info(f"Do not need to convert to a full backup")
        return True

    def get_data_start_time(self, copy_extend_info):
        log.info("Start to get data copy time")
        out_info = self.get_previous_copy_info(CopyDataTypeEnum.LOG_COPY.value)
        if not out_info:
            out_info = self.get_previous_copy_info(CopyDataTypeEnum.FULL_COPY.value)
            return out_info.get("extendInfo", {}).get("backupTime")
        last_meta_info = out_info.get("extendInfo", {}).get("meta_info", {})
        if not last_meta_info:
            out_info = self.get_previous_copy_info(CopyDataTypeEnum.FULL_COPY.value)
            return out_info.get("extendInfo", {}).get("backupTime")
        current_meta = copy_extend_info.meta_info
        for key, value in current_meta.items():
            if key == "master":
                continue
            current_first_lsn = value["first_lsn"]
            last_last_lsn = last_meta_info.get(key, {}).get("last_lsn", "")
            if current_first_lsn != last_last_lsn:
                out_info = self.get_previous_copy_info(CopyDataTypeEnum.FULL_COPY.value)
                return out_info.get("extendInfo", {}).get("backupTime")
        return out_info.get("extendInfo", {}).get("backupTime")

    def get_previous_copy_info(self, backup_type):
        param = dict()
        param["application"] = self._json_param.get("job", {}).get("protectObject")
        param["types"] = [backup_type]
        param["copyId"] = self._job_id
        param["jobId"] = self._job_id
        self.write_tmp_file(json.dumps(param), SQLServerProgressFileType.COPY_INFO_PARAM)
        param_file = os.path.join(self._cache_path, SQLServerProgressFileType.COPY_INFO_PARAM)
        out_file = os.path.join(self._cache_path, SQLServerProgressFileType.COPY_INFO)
        cmd = f"\"{ParamConstant.RPC_TOOL}\" QueryPreviousCopy \"{param_file}\" \"{out_file}\""
        code, out, err = execute_cmd(cmd)
        if code != ExecCmdResult.SUCCESS:
            log.error(f"Failed to QueryPreviousCopy, as code: {code}, out: {out}, err: {err}")
            return {}
        out_info = self.read_temp_file(out_file)
        return out_info

    def exec_rear_job(self):
        """
        执行后置任务
        :return:
        """
        self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, 5, "", SQLServerProgressFileType.COMMON)
        backup_job_ret = self._json_param.get(SQLServerJsonConstant.BACK_JOB_RESULT, 0)
        log.info(f"Backup job ret:{backup_job_ret}")
        # 若备份任务执行不成功，需要删除生成的目录
        if backup_job_ret != 0:
            log.info("Backup exec failed. delete copy.")
            self.get_meta_path()
            if self.backup_type == BackupTypeEnum.FULL_BACKUP.value:
                # 全量备份失败不删除目录
                self.get_data_path()
                check_clean_dir(os.path.join(self._data_path, f"full_{self._job_id}"))
                check_del_dir_or_file(os.path.join(self._meta_path, f"full_{self._job_id}"))
            elif self.backup_type == BackupTypeEnum.DIFF_BACKUP.value:
                self.get_data_path()
                check_del_dir_or_file(os.path.join(self._data_path, f"diff_{self._job_id}"))
                check_del_dir_or_file(os.path.join(self._meta_path, f"diff_{self._job_id}"))
            else:
                self.get_log_path()
                check_clean_dir(self._log_path)
                check_del_dir_or_file(os.path.join(self._meta_path, self._job_id))
        # 清除cache目录
        self.get_cache_path()
        if self._cache_path:
            check_clean_dir(self._cache_path)
        self.write_progress_to_file(SubJobStatusEnum.COMPLETED.value, 100, "", SQLServerProgressFileType.COMMON)
        self.abort_job()
        return True

    def write_progress_to_file(self, task_status, progress, log_detail, progress_type):
        """
        将进度写入进度文件供框架读取
        :return:
        """
        if not self._cache_path:
            self.get_cache_path()
        output = SubJobDetails(taskId=self._job_id, subTaskId=self._sub_job_id, progress=int(progress),
                               logDetail=list(), taskStatus=task_status)
        if log_detail:
            output.log_detail.append(log_detail)
        try:
            if task_status == SubJobStatusEnum.COMPLETED.value and self._data_path:
                _, size = scan_dir_size(self._job_id, self._data_path)
                log.info(f"SQLServer copy size {size}")
                output.data_size = size
        except Exception as error:
            log.exception(error)
            log.error(f"failed to scan dir size,{error}")
            output.data_size = 0
        if progress_type == SQLServerProgressFileType.COMMON:
            file_path = os.path.join(self._cache_path, progress_type + self._sub_job_id)
        else:
            file_path = os.path.join(self._cache_path, progress_type)
        log.info(f"Write file.path: {file_path} progress: {progress}, taskStatus: {task_status}, "
                 f"pid: {self._p_id} taskId: {self._job_id} subTaskId: {self._sub_job_id}")
        output_execution_result_ex(file_path, output.dict(by_alias=True))

    def report_progress_comm(self, file_name: str = None):
        """
        上报任务进度
        :return:
        """
        self.get_cache_path()
        if not self._cache_path:
            log.error("get_cache_path_filed.")
            return False
        # 默认查询common进度文件
        if not file_name:
            file_name = SQLServerProgressFileType.COMMON + self._sub_job_id
        file_path_comm = os.path.join(self._cache_path, file_name)
        log.info(f"file_path_comm:{file_path_comm}")
        if os.access(file_path_comm, os.F_OK):
            try:
                json_str = self.read_temp_file(file_path_comm)
            except Exception as e:
                log.error(f"Report progress_comm failed, err msg is:{str(e)}")
                return False
            self.output_other_result(json_str)
            log.info(f"Report progress_comm success. progress: {json_str.get('progress')}, taskStatus: "
                     f"{json_str.get('taskStatus')}")
            return True
        else:
            log.error(f"Report progress comm failed, file path: {file_path_comm}")
            return False

    def calc_progress(self, database_name, job_type):
        """
        Calculate the backup progress.
        :return
        """
        tmp_path = os.path.join(self._cache_path, SQLTmpFileName.QUERY_PROGRESS_SQL)
        out_path = os.path.join(self._cache_path, SQLTmpFileName.DATABASE_PROGRESS_FILE)
        cmd = f"select t.percent_complete from sys.dm_exec_requests as t " \
              f"inner join sys.databases as d on t.database_id = d.database_id " \
              f"where d.name = '{database_name}' and t.command  in ('{job_type}') " \
              f"order by t.start_time desc;"
        self.write_tmp_file(cmd, SQLTmpFileName.QUERY_PROGRESS_SQL)
        ret, output, msg = self.sql_utils.get_command_result(f"sqlcmd {self.user_info} -i '{tmp_path}' -o '{out_path}'")
        if ret != ExecCmdResult.SUCCESS:
            log.error(f"Exec query progress cmd failed. output:{output} pid:{self._p_id} jobId:{self._job_id}")
            return False, "0"
        try:
            output = self.read_temp_file(out_path)
        except Exception as e:
            if self._last_progress != 0:
                return True, "95"
            else:
                log.warning(f"Read query result file failed, err msg is:{str(e)}")
                return False, "0"
        current_progress = output[0].get(f"percent_complete")
        self._last_progress = current_progress

        if current_progress < 95:
            return True, str(current_progress)
        else:
            return True, "95"

    def report_backup_progress_thread(self, database_name, job_type, filename):
        while self._report_progress_thread_start:
            ret, progress = self.calc_progress(database_name, job_type)
            if ret == 0:
                self.write_progress_to_file(SubJobStatusEnum.RUNNING.value, progress, "", filename)
            time.sleep(1)

    def call_vdi_tool(self, action_type, database_name, copy_path, channel_number=4):
        """
        执行vdi命令
        :return
        """
        cmd = (f"\"{ParamConstant.VDI_TOOL_PATH}\" {action_type} \"{database_name}\" \"{copy_path}\" "
               f"\"{self.vdi_info}\" \"\" \"\" {channel_number}")
        log.info(f"cmd: {cmd}")
        ret, std_out, str_err = self.sql_utils.get_command_result(cmd)
        if ret != ExecCmdResult.SUCCESS:
            log.error(f"Exec vdi tool failed. std_out: {std_out} str_err:{str_err} "
                      f"pid: {self._p_id} jobId: {self._job_id}")
            return False, std_out
        log.info(f"Exec vdi tool success. std_out:{std_out}  pid: {self._p_id} jobId: {self._job_id}")
        return True, ""

    def write_tmp_file(self, context, file_name):
        """
        写临时文件
        :param context:
        :param file_name:
        :return:
        """
        file_path = os.path.join(self._cache_path, file_name)
        if os.path.exists(file_path):
            check_del_dir_or_file(file_path)
        output_tmp_info(file_path, context)

    def save_meta_info(self, database_list, file_name, flag):
        """
        写元数据信息到元数据仓
        :param database_list: 需要保存的文件信息的数据库列表
        :param file_name: 保存文件名称
        :param flag
        :return:
        """
        meta_info = dict()
        for database in database_list:
            meta_info[database] = self.sql_utils.get_database_file_info(database, flag)
        if self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            dir_path = self._data_path
        else:
            dir_path = self._meta_path
        if not os.path.exists(dir_path):
            os.mkdir(dir_path)
        file_path = os.path.realpath(os.path.join(dir_path, file_name))
        output_execution_result_ex(file_path, meta_info)

    def prepare_rep_param(self, backup_type):
        # 查询副本repositories参数准备
        self.get_cache_path()
        if not self._cache_path:
            raise Exception("Failed to get cache path.")
        if backup_type != BackupTypeEnum.LOG_BACKUP.value:
            self.data_rep = ParamFileUtil.get_rep_info(self._json_param.get("job"),
                                                       RepositoryDataTypeEnum.DATA_REPOSITORY.value)
            self.meta_rep = ParamFileUtil.get_rep_info(self._json_param.get("job"),
                                                       RepositoryDataTypeEnum.META_REPOSITORY.value)
        else:
            self.meta_rep = ParamFileUtil.get_rep_info(self._json_param.get("job"),
                                                       RepositoryDataTypeEnum.META_REPOSITORY.value)

    def query_copy_infos(self, backup_type):
        """
        查询副本信息
        :return:
        """
        log.info(f"Start to query copy info. pid: {self._p_id} jobId: {self._job_id}")
        try:
            self.prepare_rep_param(backup_type)
        except Exception as err:
            log.error(f"Exception when get path info as err: {err}. pid: {self._p_id} jobId: {self._job_id}")
            return False
        file_path = os.path.join(self._cache_path, SQLTmpFileName.COPY_INFO_FILE)
        try:
            extend_info = self.read_temp_file(file_path)
        except Exception as e:
            log.error(f"Failed to query copy info from tmp file, err: {str(e)}. "
                      f"pid: {self._p_id} jobId: {self._job_id}")
            return False
        copy_info = Copy(id=self._job_id)
        if backup_type != BackupTypeEnum.LOG_BACKUP.value:
            # 非日志备份上报repo信息
            if backup_type == BackupTypeEnum.FULL_BACKUP.value:
                path_prefix = "full"
            else:
                path_prefix = "diff"
            extend_info["dataPath"] = f"{path_prefix}_{self._job_id}"
            extend_info["metaPath"] = f"{path_prefix}_{self._job_id}"
            rep_rsp = [
                CopyInfoRepModel(id=self.data_rep.get('id'), repositoryType=self.data_rep.get("repositoryType"),
                                 isLocal=self.data_rep.get("isLocal"), protocol=0,
                                 remotePath=f"{self.data_rep.get('remotePath')}/{path_prefix}_{self._job_id}",
                                 remoteHost=self.data_rep.get("remoteHost"),
                                 extendInfo=self.data_rep.get('extendInfo'), ),
                CopyInfoRepModel(id=self.meta_rep.get('id'), repositoryType=self.meta_rep.get("repositoryType"),
                                 isLocal=self.meta_rep.get("isLocal"), protocol=0,
                                 remotePath=f"{self.meta_rep.get('remotePath')}/{path_prefix}_{self._job_id}",
                                 remoteHost=self.meta_rep.get("remoteHost"),
                                 extendInfo=self.meta_rep.get('extendInfo'))
            ]
            copy_info.repositories = rep_rsp
        else:
            extend_info["metaPath"] = f"{self.meta_rep.get('remotePath')}/meta/{self._job_id}"
        copy_info.extend_info = extend_info
        self.output_other_result(copy_info.dict(by_alias=True))
        log.info(f"End to query copy info, pid: {self._p_id} jobId:{self._job_id}")
        return True

    def init_db(self):
        db_files = os.path.join(self._meta_path, "sqlite/copymetadata.sqlite")
        file_dir = os.path.dirname(db_files)
        if not os.path.exists(file_dir):
            os.makedirs(file_dir)
        try:
            m_db = SqlServerIndexFile(f'sqlite:///{db_files}')
        except Exception as err:
            log.error(f'Create db session err, msg: {err}')
            raise Exception("Failed to create db session") from err
        return m_db

    def get_file_size(self, database_name):
        file_size = 0
        file_list = os.listdir(self._data_path)
        for file_mbr in file_list:
            if file_mbr.startswith(database_name):
                file_path = os.path.join(self._data_path, file_mbr)
                file_size += os.path.getsize(file_path)
        return file_size

    def save_sqlite_info(self, database_list):
        m_db = self.init_db()
        # 非默认实例ReportServer和ReportServerTempDB名称不一样
        report_list = SqlServerUtils.get_report_service_database_info(self.instance_name)
        for database in database_list:
            if database in SQLServerStrConstant.SYS_DATABASE or database in report_list:
                continue
            size = self.get_file_size(database)
            tmp_id = str(uuid.uuid4())
            m_db.insert_record(SqlServerIndex(UUID=tmp_id, NAME=database,
                                              TYPE="SQLServer-database",
                                              PARENT_PATH='/', PARENT_UUID='',
                                              SIZE=size))

    def abort_job(self):
        log.info(f"Start to abort job. pid: {self._p_id} jobId: {self._job_id}")
        self.prepare_user_infos()
        self.write_progress_to_file(SubJobStatusEnum.ABORTING.value, 5, self.error_logtail,
                                    SQLServerProgressFileType.ABORT)
        cmd_str = f"wmic process where \"commandline like '%{self._job_id}%'\" get caption,processId"
        code, std_out, std_err = self.sql_utils.get_command_result(cmd_str)
        log.info(f"code:{code},std_out:{std_out},std_err:{std_err}")
        for out_str in std_out.split('\n'):
            if out_str.startswith("backupAPI"):
                log.info(f"out_str:{out_str}")
                out_str = self.single_space_using_regex(out_str)
                process_id = str(out_str).split(" ")[1].strip()
                kill_cmd = f"taskkill /pid {process_id} -f"
                self.sql_utils.get_command_result(kill_cmd)
        self.write_progress_to_file(SubJobStatusEnum.ABORTED.value, 100, self.error_logtail,
                                    SQLServerProgressFileType.ABORT)
        log.info(f"End to abort job. pid: {self._p_id} jobId: {self._job_id}")

    def query_scan_repositories(self):
        # E6000适配
        log.info(f"Query scan repositories, job_id: {self._job_id}.")
        if not self.prepare_path_param(False, False):
            log.error(f"Failed to scan repositories info. pid: {self._p_id} jobId: {self._job_id}")
            return False
        if self.backup_type == BackupTypeEnum.LOG_BACKUP.value:
            # log仓的meta区 /Database_{resource_id}_LogRepository_su{num}/{ip}/meta/{job_id}
            meta_copy_path = self._meta_path
            if not os.path.exists(meta_copy_path):
                os.makedirs(meta_copy_path)
            # log仓的data区 /Database_{resource_id}_LogRepository_su{num}/{ip}/{job_id}
            data_path = self._log_path
            # /Database_{resource_id}_LogRepository_su{num}/{ip}
            save_path = os.path.dirname(self._log_path)
            data_repo = RepositoryPath(repositoryType=RepositoryDataTypeEnum.LOG_REPOSITORY.value, scanPath=data_path)
        else:
            # meta/Database_{resource_id}_InnerDirectory_su{num}/source_policy_{job_id}/Context_Global_MD/{ip}
            meta_copy_path = os.path.dirname(self._meta_path)
            # data/Database_{resource_id}_InnerDirectory_su{num}/source_policy_{job_id}/Context/{ip}
            data_path = self._data_path
            # meta/Database_{resource_id}_InnerDirectory_su{num}/source_policy_{job_id}/Context_Global_MD/{ip}
            save_path = os.path.dirname(self._meta_path)
            data_repo = RepositoryPath(repositoryType=RepositoryDataTypeEnum.DATA_REPOSITORY.value, scanPath=data_path)
        meta_copy_repo = RepositoryPath(repositoryType=RepositoryDataTypeEnum.META_REPOSITORY.value,
                                        scanPath=meta_copy_path)
        scan_repos = ScanRepositories(scanRepoList=[data_repo, meta_copy_repo], savePath=save_path)
        self.output_other_result(scan_repos.dict(by_alias=True))
        log.info(f"Query scan repos success, return result {scan_repos}, job id: {self._job_id}")
        return True
