#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import json
import math
import multiprocessing
import os.path
import stat
import time
import warnings
import xml.etree.ElementTree as ET
from collections import namedtuple
from concurrent.futures import ThreadPoolExecutor, as_completed
from multiprocessing import Pool
from threading import Lock, Thread

from common.cleaner import clear
from common.common import convert_timestamp_to_time
from common.common_models import SubJobDetails, LogDetail
from exchange import log
from exchange.backup.exchange_backup_sqlite import ExchangeSqliteService, SqliteInputParam
from exchange.commons.common import read_temp_file, mailbox_anonymization, report_job_details_by_rpc, get_file_size, \
    convert_iso_time_to_timestamp
from exchange.commons.const import ExchangeCode, ExchangeMailBosConstants, BodyErr, SubJobStatusEnum, \
    ExchangeReportDBLabel
from exchange.commons.exchange_exception import ExchangeInternalException
from exchange.ntlm.ntlm_request import get_post_response_stream
from oceanbase.common.const import LogLevel

warnings.filterwarnings('ignore')

URL = "https://localhost/ews/exchange.asmx"

HEADER = {
    "Content-type": "text/xml; charset=utf-8",
    "Accept": "text/xml",
}

XMLNS_S = "{http://schemas.xmlsoap.org/soap/envelope/}"
XMLNS_M = "{http://schemas.microsoft.com/exchange/services/2006/messages}"
XMLNS_T = "{http://schemas.microsoft.com/exchange/services/2006/types}"
XMLNS_E = "{http://schemas.microsoft.com/exchange/services/2006/errors}"


# 查询邮件响应: email_item_list: 邮件列表, next_offset: 下一偏移, total: 总数, last_in_range: 是否查到末尾
FindItemsResponse = namedtuple('FindItemsResponse', ['email_item_list', 'next_offset', 'total', 'last_in_range'])

# 邮件: id: ID, size: 大小, last_modified_time: 最后修改时间, subject: 主题, total_path: 父目录总路径
EmailItemModel = namedtuple('EmailItemModel', ['id', 'size', 'last_modified_time', 'subject', 'total_path'])

# 查询文件夹响应: folder_list: 文件夹列表, next_offset: 下一偏移, total: 总数, last_in_range: 是否查到末尾
FindFoldersResponse = namedtuple('FindFoldersResponse',
                                 ['folder_list', 'next_offset', 'total', 'last_in_range'])

# 文件夹: id: ID，name: 文件夹名称, type:文件夹类型（root（根文件夹）、common（普通文件夹））, total_path: 目录总路径,
#     ext_parameters: 扩展参数json字符串， {"distinguish_id": "文件夹内置ID（drafts/inbox/sentitems)"}
EmailFolderModel = namedtuple('EmailFolderModel', ['id', 'name', 'type', "total_path", "ext_parameters"])

# 重试等待间隔
RETRY_WAIT_SECONDS = 60

# 分页查询每次查询数量
QUERY_PAGE_LIMIT = 100

# 导出邮件的线程数
BATCH_READ_THREAD_NUM = 4

# 导出邮件单次读取字节数
BATCH_READ_BYTES = 1024 * 1024 * 20

# 备份/恢复邮箱类型列表
MAILBOX_TYPE_LIST = ["drafts", "inbox", "sentitems"]

# 邮箱根目录
MAILBOX_ROOT = "root"

# 错误码
ERROR_CODE_DICT = {
    "ErrorNonExistentMailbox": BodyErr.RESOURCE_NOT_EXIST.value,
    "ErrorMailboxStoreUnavailable": BodyErr.BACKUP_FAIL_FOR_DATABASE_STATE_ABNORMAL.value,
    "ErrorImpersonateUserDenied": BodyErr.MAILBOX_ERROR_NO_IMPERSONATE_USER.value
}

# 文件夹分隔符
SPLIT_CHAR = "/"

# 邮件文件前缀
ITEM_PREFIX = "item"

# 文件夹文件前缀
FOLDER_PREFIX = "folder"

# sqlite邮件/文件夹的元数据文件 扩展参数
SQLITE_EXTINFO_META_FILE = "meta_file"

# 邮件备份大小上限=1434*1MB约等于1.4GB
SIZE_LIMIT = 1434 * 1024 * 1024


def string_to_bool(raw_str: str):
    if raw_str.lower() == "true":
        return True
    else:
        return False


def error_callback(error):
    identifier = multiprocessing.current_process().name
    log.error(f"sub process: {identifier} exec failed, err: {error}", exc_info=True)


def merge_mail_list(total_meta_dict):
    mail_list = []
    folder_list = []
    # 合并三个目录的数据
    for mailbox_type, meta_list in total_meta_dict.items():
        for mail_index, mail in enumerate(meta_list[0]):
            mail_list.append((mailbox_type, mail, mail_index))
        for folder_index, folder in enumerate(meta_list[1]):
            folder_list.append((mailbox_type, folder, folder_index))
    log.info(f"mailbox backup merge data finish, mail size: {len(mail_list)}, "
                f"folder size: {len(folder_list)}")
    return mail_list, folder_list


def wait_for_retry(ex, retry_count):
    if retry_count > 3:
        raise ex
    # 等待1分钟重试，最多3次
    time.sleep(RETRY_WAIT_SECONDS)


class ExchangeMailBoxService:
    def __init__(self, **kwargs):
        self.mailbox_name = kwargs.get('mailbox_name')
        self.anonymization_mailbox = mailbox_anonymization(self.mailbox_name)
        self.username = kwargs.get('username')
        self.password = kwargs.get('password')
        self.data_hub = kwargs.get('data_hub')
        self.meta_hub = kwargs.get('meta_hub')
        self.repo_path = kwargs.get('repo_path')
        self.pub_meta_path = kwargs.get("pub_meta_path")
        self.allow_half_success = kwargs.get("allow_half_success", True)
        self.job_id = kwargs.get('job_id')
        self.pid = kwargs.get('pid')
        self.sub_job_id = kwargs.get('sub_job_id')
        # 进程池数
        self.process_num = kwargs.get('process_num')
        # 存储恢复时文件夹的id
        self.restore_folder_id_dict = dict()
        self.windows_version = kwargs.get('windows_version')
        # 0: scan.. 1: backup.. 2: end
        self.backup_state = 0
        self.scan_email_count = 0

        # 0: restore.. 1: end
        self.restore_state = 0
        self.restore_email_success_count = 0
        self.restore_email_fail_count = 0
        self.compatibility_mode = kwargs.get('compatibility_mode')

    @staticmethod
    def convert_folder_name(raw_str):
        return raw_str.replace("\\", "~2").replace("/", "~1").replace("~", "~0")

    @staticmethod
    def is_sub_path(email_path: str, restore_paths: list[str]):
        for path in restore_paths:
            if email_path.startswith(path):
                return True
        return False

    @staticmethod
    def check_find_item_response_message(find_item_response_message, response_class, extra_msg):
        response_code = ExchangeMailBoxService.get_sub_item(find_item_response_message, [f"{XMLNS_M}ResponseCode"])
        if response_code is None or response_code.text is None or response_code.text != "NoError":
            error_message: str = ""
            if response_code is not None:
                error_message = "" if response_code.text is None else response_code.text
            log.error(f"find_item_response_message not valid, response_code: {error_message}")
            raise ExchangeInternalException(f"check_find_item_response_message response_code: {error_message}",
                                            ERROR_CODE_DICT.get(error_message, BodyErr.ERROR_INTERNAL.value))
        if response_class is None or response_class != "Success":
            log.error(f"find_item_response_message not valid, response_class:{response_class}")
            raise ExchangeInternalException(f"check_find_item_response_message response_class: {response_class}")

    @staticmethod
    def check_get_item_exist(get_item_response_message):
        response_class = get_item_response_message.get("ResponseClass")
        response_code = ExchangeMailBoxService.get_sub_item(get_item_response_message, [f"{XMLNS_M}ResponseCode"])
        if response_code is None or response_code.text != "NoError":
            if response_code is not None:
                error_message = "" if response_code.text is None else response_code.text
                if error_message != "ErrorNonExistentMailbox" or error_message != "ErrorCannotUseFolderIdForItemId":
                    log.error(f"get_item_response_message not valid, response_code: {error_message}")
                return False
        if response_class is None or response_class != "Success":
            log.error(f"get_item_response_message not valid, response_code: """)
            return False
        return True

    @staticmethod
    def check_response_http_code(response, root, extra_msg):
        if response.code == 401:
            raise ExchangeInternalException("unauthorized", BodyErr.LOGIN_FAILED.value)
        if response.code == 500:
            error_message = ExchangeMailBoxService.get_sub_item(root, [f"{XMLNS_S}Body", f"{XMLNS_S}Fault", f"detail",
                                                                       f"{XMLNS_E}ResponseCode"]).text
            log.error(f"check_response_http_code 500, response code {response.code}, message: {error_message}")
            raise ExchangeInternalException(f"unexpected response code {response.code}, message: {error_message}",
                                            ERROR_CODE_DICT.get(error_message, BodyErr.ERROR_INTERNAL.value))
        if response.code != 200:
            log.error(f"check_response_http_code not 200, response code {response.code}")
            raise ExchangeInternalException(f"unexpected response code {response.code}")

    @staticmethod
    def get_sub_item(parent, sub_item_tag_list: list[str], extra_msg=""):
        """
        获得xml子对象
        :param parent: 父
        :param sub_item_tag_list: tag列表
        :param extra_msg: 额外日志
        :return:
        """
        iter_item = parent
        for sub_item_tag in sub_item_tag_list:
            iter_item = iter_item.find(sub_item_tag)
            if iter_item is None:
                raise ExchangeInternalException(f"sub_item_tag is None")
        return iter_item

    @staticmethod
    def write_to_file(email_item_meta_dict, file_name, parent_path):
        os.makedirs(parent_path, exist_ok=True)
        log.debug(f"save backup copy file_name:{file_name}")
        if os.path.exists(file_name):
            flags = os.O_WRONLY | os.O_EXCL
        else:
            flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
        modes = stat.S_IWUSR | stat.S_IRUSR + stat.S_IROTH
        with os.fdopen(os.open(file_name, flags, modes), 'w') as file_f:
            json.dump(email_item_meta_dict, file_f)

    @staticmethod
    def exec_find_items_get_restriction(greater_than_time, item_id):
        if greater_than_time is not None and greater_than_time != "":
            restriction = """<m:Restriction>
                    <t:And>
                        <t:IsGreaterThan>
                            <t:FieldURI FieldURI="item:LastModifiedTime"/>
                            <t:FieldURIOrConstant>
                                <t:Constant Value="{}"/>
                            </t:FieldURIOrConstant>
                        </t:IsGreaterThan>
                    </t:And>
                </m:Restriction>""".format(greater_than_time)
        elif item_id is not None and item_id != "":
            restriction = """<m:Restriction>
                <t:And>
                    <t:IsEqualTo>
                        <t:FieldURI FieldURI="item:ItemId"/>
                        <t:FieldURIOrConstant>
                            <t:Constant Value="{}"/>
                        </t:FieldURIOrConstant>
                    </t:IsEqualTo>
                </t:And>
            </m:Restriction>
            """.format(item_id)
        else:
            restriction = ""
        return restriction

    @staticmethod
    def exec_export_item(file_name, response_stream):
        if os.path.exists(file_name):
            flags = os.O_WRONLY | os.O_EXCL
        else:
            flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
        modes = stat.S_IWUSR | stat.S_IRUSR + stat.S_IROTH
        with os.fdopen(os.open(file_name, flags, modes), 'w') as g:
            while True:
                read_strs = response_stream.read(BATCH_READ_BYTES).decode('utf-8')
                if len(read_strs) != 0:
                    g.write(read_strs)
                else:
                    break

    def exec_get_folder(self, folder_dir: EmailFolderModel) -> EmailFolderModel:
        # 准备请求数据
        post_body = \
            """<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope
xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"
xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
    <soap:Header>
        <t:RequestServerVersion Version="Exchange2010"/>
        <t:ExchangeImpersonation>
            <t:ConnectingSID>
                <t:PrimarySmtpAddress><![CDATA[{}]]></t:PrimarySmtpAddress>
            </t:ConnectingSID>
        </t:ExchangeImpersonation>
    </soap:Header>
    <soap:Body>
        <m:GetFolder>
            <m:FolderShape>
                <t:BaseShape>Default</t:BaseShape>
                <t:AdditionalProperties>
                    <t:FieldURI FieldURI="folder:FolderClass" />
                    <t:FieldURI FieldURI="folder:DisplayName" />
                </t:AdditionalProperties>
            </m:FolderShape>
            <m:FolderIds>
              <t:{} Id="{}"/>
            </m:FolderIds>
        </m:GetFolder>
    </soap:Body>
</soap:Envelope>
            """
        if folder_dir.type == "root":
            mailbox_id_type, mailbox_id_value = "DistinguishedFolderId", folder_dir.name
            log.debug(f"exec_get_folders: folder_name:{folder_dir.name}")
        else:
            mailbox_id_type = "FolderId"
            mailbox_id_value = folder_dir.id
            log.debug(f"exec_get_folders: folder_id:{folder_dir.id}")
        post_body = post_body.format(self.mailbox_name, mailbox_id_type, mailbox_id_value)
        # 发起请求
        response_stream = get_post_response_stream(URL, post_body, self.username, self.password,
                                                   windows_version=self.windows_version,
                                                   compatibility_mode=self.compatibility_mode)
        # 解析响应、http响应码校验
        content_str = response_stream.read().decode()
        root = ET.XML(content_str) if content_str else None
        ExchangeMailBoxService.check_response_http_code(response_stream, root, post_body)
        # 解析响应体，获取文件夹数据
        item_response = self.get_sub_item(root, [f"{XMLNS_S}Body", f"{XMLNS_M}GetFolderResponse",
                                                 f"{XMLNS_M}ResponseMessages", f"{XMLNS_M}GetFolderResponseMessage"],
                                          post_body)
        response_class = item_response.get("ResponseClass")
        self.check_find_item_response_message(item_response, response_class, "")
        folder_model = self.get_sub_item(item_response, [f"{XMLNS_M}Folders", f"{XMLNS_T}Folder"])
        folder_id = self.get_sub_item(folder_model, [f"{XMLNS_T}FolderId"]).get("Id")
        if folder_id is None:
            log.warn("get no item when exec_find_items")
            raise ExchangeInternalException(f"exec_find_folders get no item when exec_find_items")
        folder_display_name = self.get_sub_item(folder_model, [f"{XMLNS_T}DisplayName"]).text
        if folder_dir.type == "root":
            distinguish_id = folder_dir.name
        else:
            distinguish_id = ""
        return EmailFolderModel._make(
            [folder_id, folder_display_name, "common", "", json.dumps({"distinguish_id": distinguish_id})])

    def exec_find_folders(self, pre_dir: EmailFolderModel, limit: int, offset: int,
                          folder_name: str = None) -> FindFoldersResponse:
        # 准备请求数据
        post_body = \
            """<?xml version="1.0" encoding="utf-8"?>
    <soap:Envelope
            xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
            xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"
            xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
        <soap:Header>
            <t:RequestServerVersion Version="Exchange2010"/>
            <t:ExchangeImpersonation>
                <t:ConnectingSID>
                    <t:PrimarySmtpAddress><![CDATA[{}]]></t:PrimarySmtpAddress>
                </t:ConnectingSID>
            </t:ExchangeImpersonation>
        </soap:Header>
        <soap:Body>
            <m:FindFolder Traversal="Shallow">
                <m:FolderShape>
                    <t:BaseShape>Default</t:BaseShape>
                    <t:AdditionalProperties>
                        <t:FieldURI FieldURI="folder:FolderClass" />
                        <t:FieldURI FieldURI="folder:DisplayName" />
                    </t:AdditionalProperties>
                </m:FolderShape>
                <m:IndexedPageFolderView MaxEntriesReturned="{}" Offset="{}" BasePoint="Beginning" />
                {}
                <m:ParentFolderIds>
                    <t:{} Id="{}"/>
                </m:ParentFolderIds>
            </m:FindFolder>
        </soap:Body>
    </soap:Envelope>
            """
        if pre_dir.type == "root":
            mailbox_id_type, mailbox_id_value = "DistinguishedFolderId", pre_dir.name
            log.debug(f"exec_find_folders: folder_name:{pre_dir.name}, limit:{limit}, offset:{offset}")
        else:
            mailbox_id_type = "FolderId"
            mailbox_id_value = pre_dir.id
            log.debug(f"exec_find_folders: folder_id:{pre_dir.id}, limit:{limit}, offset:{offset}")
        if folder_name:
            restriction = """<m:Restriction>
                <t:And>
                    <t:IsEqualTo>
                        <t:FieldURI FieldURI="folder:DisplayName"/>
                        <t:FieldURIOrConstant>
                            <t:Constant Value="{}"/>
                        </t:FieldURIOrConstant>
                    </t:IsEqualTo>
                </t:And>
            </m:Restriction>
            """.format(folder_name)
        else:
            restriction = ""
        post_body = post_body.format(self.mailbox_name, limit, offset, restriction, mailbox_id_type, mailbox_id_value)
        # 发起请求
        response_stream = get_post_response_stream(URL, post_body, self.username, self.password,
                                                   windows_version=self.windows_version,
                                                   compatibility_mode=self.compatibility_mode)
        # 解析响应、http响应码校验
        content_str = response_stream.read().decode()
        root = ET.XML(content_str) if content_str else None
        ExchangeMailBoxService.check_response_http_code(response_stream, root, post_body)
        # 解析响应体，获取文件夹数据
        folder_list = []
        item_response = self.get_sub_item(root, [f"{XMLNS_S}Body", f"{XMLNS_M}FindFolderResponse"], post_body)
        find_item_resp = self.get_sub_item(item_response, [f"{XMLNS_M}ResponseMessages",
                                                           f"{XMLNS_M}FindFolderResponseMessage"], post_body)
        response_class = find_item_resp.get("ResponseClass")
        self.check_find_item_response_message(find_item_resp, response_class, post_body)
        root_folder = self.get_sub_item(find_item_resp, [f"{XMLNS_M}RootFolder"])
        # 构造文件夹数据并返回
        for folder_model in self.get_sub_item(root_folder, [f"{XMLNS_T}Folders"]):
            folder_id = self.get_sub_item(folder_model, [f"{XMLNS_T}FolderId"]).get("Id")
            if folder_id is None:
                log.warn("get no item when exec_find_items")
                raise ExchangeInternalException(f"exec_find_folders get no item when exec_find_items")
            folder_display_name = self.get_sub_item(folder_model, [f"{XMLNS_T}DisplayName"]).text
            folder_list.append(EmailFolderModel._make(
                [folder_id, folder_display_name, "common", "", json.dumps({"distinguish_id": ""})]))
        return FindFoldersResponse._make([folder_list, int(root_folder.get("IndexedPagingOffset")),
                                          int(root_folder.get("TotalItemsInView")),
                                          string_to_bool(root_folder.get("IncludesLastItemInRange"))])

    def exec_get_item(self, item_id: str):
        """
        批量查找邮箱的邮件
        注意：greater_than_time和item_id最多只能有一个生效，优先greater_than_time
        :param item_id: 邮件ID
        :return: 邮件查询响应
        """
        # 准备请求数据
        post_body = \
            """<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope             
xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"             
xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"             
xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
    <soap:Header>
        <t:RequestServerVersion Version="Exchange2010"/>
        <t:ExchangeImpersonation>
            <t:ConnectingSID>
                <t:PrimarySmtpAddress><![CDATA[{}]]></t:PrimarySmtpAddress>
            </t:ConnectingSID>
        </t:ExchangeImpersonation>
    </soap:Header>
    <soap:Body>
        <m:GetItem>
            <m:ItemShape>
                <t:BaseShape>IdOnly</t:BaseShape>
                <t:AdditionalProperties>
                    <t:FieldURI FieldURI="item:LastModifiedTime" />
                    <t:FieldURI FieldURI="item:Size" />
                    <t:FieldURI FieldURI="item:Subject" />
                </t:AdditionalProperties>                
            </m:ItemShape>
            <m:ItemIds>
                <t:ItemId Id="{}"/>
            </m:ItemIds>
        </m:GetItem>
    </soap:Body>
</soap:Envelope>
            """
        post_body = post_body.format(self.mailbox_name, item_id)
        # 发起请求
        response_stream = get_post_response_stream(URL, post_body, self.username, self.password,
                                                   windows_version=self.windows_version,
                                                   compatibility_mode=self.compatibility_mode)
        # 解析响应、http响应码校验
        content_str = response_stream.read().decode()
        root = ET.XML(content_str) if content_str else None
        ExchangeMailBoxService.check_response_http_code(response_stream, root, post_body)
        # 解析响应体，获取邮件数据
        item_response = ExchangeMailBoxService.get_sub_item(root, [f"{XMLNS_S}Body", f"{XMLNS_M}GetItemResponse"])
        find_item_resp = ExchangeMailBoxService.get_sub_item(item_response,
                                                             [f"{XMLNS_M}ResponseMessages",
                                                              f"{XMLNS_M}GetItemResponseMessage"], post_body)
        # 判断错误码
        if not ExchangeMailBoxService.check_get_item_exist(find_item_resp):
            log.warn("get no item when exec_find_items")
            return None
        item_message = ExchangeMailBoxService.get_sub_item(find_item_resp, [f"{XMLNS_M}Items", f"{XMLNS_T}Message"])
        # 构造邮件数据并返回
        item_id = ExchangeMailBoxService.get_sub_item(item_message, [f"{XMLNS_T}ItemId"]).get("Id")
        item_size = ExchangeMailBoxService.get_sub_item(item_message, [f"{XMLNS_T}Size"]).text
        item_modified_time = ExchangeMailBoxService.get_sub_item(item_message, [f"{XMLNS_T}LastModifiedTime"]).text
        try:
            item_subject = ExchangeMailBoxService.get_sub_item(item_message, [f"{XMLNS_T}Subject"]).text
        except ExchangeInternalException:
            item_subject = ""
        return EmailItemModel._make([item_id, int(item_size), item_modified_time, item_subject, ""])

    def exec_find_items(self, pre_dir: EmailFolderModel, greater_than_time: str, limit: int,
                        offset: int, item_id: str) -> FindItemsResponse:
        """
        批量查找邮箱的邮件
        注意：greater_than_time和item_id最多只能有一个生效，优先greater_than_time
        :param pre_dir: 文件夹
        :param greater_than_time: 大于时间, 例如2024-01-25T00:00:00Z
        :param limit: 限制条数
        :param offset: 跳过条数
        :param item_id: 邮件ID
        :return: 邮件查询响应
        """
        # 准备请求数据
        post_body = \
            """<?xml version="1.0" encoding="utf-8"?>
    <soap:Envelope
            xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
            xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"
            xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
        <soap:Header>
            <t:RequestServerVersion Version="Exchange2010"/>
            <t:ExchangeImpersonation>
                <t:ConnectingSID>
                    <t:PrimarySmtpAddress><![CDATA[{}]]></t:PrimarySmtpAddress>
                </t:ConnectingSID>
            </t:ExchangeImpersonation>
        </soap:Header>
        <soap:Body>
            <m:FindItem Traversal="Shallow">
                <m:ItemShape>
                    <t:BaseShape>IdOnly</t:BaseShape>
                    <t:AdditionalProperties>
                        <t:FieldURI FieldURI="item:LastModifiedTime" />
                        <t:FieldURI FieldURI="item:Size" />
                        <t:FieldURI FieldURI="item:Subject" />
                    </t:AdditionalProperties>
                </m:ItemShape>
                <m:IndexedPageItemView MaxEntriesReturned="{:d}" Offset="{:d}" BasePoint="Beginning" />
                {}
                <m:ParentFolderIds>
                    <t:{} Id="{}"/>
                </m:ParentFolderIds>
            </m:FindItem>
        </soap:Body>
    </soap:Envelope>
            """

        restriction = self.exec_find_items_get_restriction(greater_than_time, item_id)
        if pre_dir.type == "root":
            mailbox_id_type = "DistinguishedFolderId"
            mailbox_id_value = pre_dir.name
            log.debug(f"exec_find_items: name:{pre_dir.name}, time:{greater_than_time}, lim:{limit}, off:{offset}")
        else:
            mailbox_id_type = "FolderId"
            mailbox_id_value = pre_dir.id
            log.debug(f"exec_find_items: id:{pre_dir.id}, time:{greater_than_time}, lim:{limit}, off:{offset}")
        post_body = post_body.format(self.mailbox_name, limit, offset, restriction, mailbox_id_type, mailbox_id_value)
        # 发起请求
        response_stream = get_post_response_stream(URL, post_body, self.username, self.password,
                                                   windows_version=self.windows_version,
                                                   compatibility_mode=self.compatibility_mode)
        # 解析响应、http响应码校验
        content_str = response_stream.read().decode()
        root = ET.XML(content_str) if content_str else None
        ExchangeMailBoxService.check_response_http_code(response_stream, root, post_body)
        # 解析响应体，获取邮件数据
        email_list = []
        item_response = ExchangeMailBoxService.get_sub_item(root, [f"{XMLNS_S}Body", f"{XMLNS_M}FindItemResponse"])
        find_item_resp = ExchangeMailBoxService.get_sub_item(item_response,
                                                             [f"{XMLNS_M}ResponseMessages",
                                                              f"{XMLNS_M}FindItemResponseMessage"], post_body)
        response_class = find_item_resp.get("ResponseClass")
        ExchangeMailBoxService.check_find_item_response_message(find_item_resp, response_class, post_body)
        root_folder = ExchangeMailBoxService.get_sub_item(find_item_resp, [f"{XMLNS_M}RootFolder"])
        # 构造邮件数据并返回
        for item_model in ExchangeMailBoxService.get_sub_item(root_folder, [f"{XMLNS_T}Items"]):
            item_id = ExchangeMailBoxService.get_sub_item(item_model, [f"{XMLNS_T}ItemId"]).get("Id")
            if item_id is None:
                log.warn("get no item when exec_find_items")
            item_size = ExchangeMailBoxService.get_sub_item(item_model, [f"{XMLNS_T}Size"]).text
            item_modified_time = ExchangeMailBoxService.get_sub_item(item_model, [f"{XMLNS_T}LastModifiedTime"]).text
            try:
                item_subject = ExchangeMailBoxService.get_sub_item(item_model, [f"{XMLNS_T}Subject"]).text
            except ExchangeInternalException:
                item_subject = ""
            email_list.append(EmailItemModel._make([item_id, int(item_size), item_modified_time, item_subject, ""]))
        next_offset = root_folder.get("IndexedPagingOffset")
        total = root_folder.get("TotalItemsInView")
        last_in_range = root_folder.get("IncludesLastItemInRange")
        return FindItemsResponse._make([email_list, int(next_offset), int(total), string_to_bool(last_in_range)])

    def find_items(self, pre_dir: EmailFolderModel, greater_than_time_str: str) -> list[EmailItemModel]:
        """
        查找邮件
        :param greater_than_time_str: 大于时间, 例如2024-01-25T00:00:00Z
        :param pre_dir: 文件夹
        :return: 邮件ID列表
        """
        offset = 0
        limit = QUERY_PAGE_LIMIT
        total_email_item_list = []
        while True:
            email_list_response = self.exec_find_items(pre_dir, greater_than_time_str, limit, offset, "")
            total_email_item_list.extend(email_list_response.email_item_list)
            offset = email_list_response.next_offset
            if email_list_response.last_in_range:
                break
        return total_email_item_list

    def find_folders(self, pre_dir: EmailFolderModel, folder_name: str = None) -> list[EmailFolderModel]:
        """
        查找邮件
        :param pre_dir: 文件夹
        :param folder_name: 待搜索的文件夹名称
        :return: 邮件ID列表
        """
        offset = 0
        limit = QUERY_PAGE_LIMIT
        total_folder_list = []
        while True:
            folder_list_response = self.exec_find_folders(pre_dir, limit, offset, folder_name)
            total_folder_list.extend([_ for _ in folder_list_response.folder_list])
            offset = folder_list_response.next_offset
            if folder_list_response.last_in_range:
                break
        return total_folder_list

    def get_folder(self, folder: EmailFolderModel) -> EmailFolderModel:
        """
        查找邮件
        :param folder: 文件夹
        :return: 邮件ID列表
        """
        return self.exec_get_folder(folder)

    def backup_items(self, **kwargs) -> set[int]:
        """
        备份邮件
        :param kwargs: 参数
        :return:
        """
        gt_time_str = kwargs.get('greater_than_time_str', None)
        process_shared_dict = kwargs.get("process_shared_dict")
        backup_success_count_list = process_shared_dict.get('backup_success_count_list')
        self.create_report_backup_progress_thread(backup_success_count_list)
        total_meta_dict = {}
        # 错误码集合
        error_code_set = set({})
        for mailbox_type in MAILBOX_TYPE_LIST:
            folder_meta_list, item_meta_list = self.scan_folders(error_code_set, gt_time_str, mailbox_type)
            if not self.allow_half_success and error_code_set:
                self.backup_state = 2
                return error_code_set
            log.info(f"item size:{len(item_meta_list)}, "
                        f"folder size:{len(folder_meta_list)}, mailbox_type:{mailbox_type}")
            self.scan_email_count += len(item_meta_list)
            total_meta_dict.update({mailbox_type: [item_meta_list, folder_meta_list]})
        self.report_scan_finish()
        self.backup_state = 1
        # 创建进程池
        pool = Pool(processes=self.process_num + 1)
        error_code_list = None
        save_meta_and_sqlite_result = None
        try:
            # 一个子进程保存元数据和sqlite
            save_meta_and_sqlite_result = pool.apply_async(self.save_meta_data_and_sqlite, (total_meta_dict,),
                                                           error_callback=error_callback)
            mail_list, folder_list = merge_mail_list(total_meta_dict)
            # 当需要备份的邮件数大于0时才进行备份
            log.info(f"start mailbox backup, mail total size: {len(mail_list)}, process num: {self.process_num}")
            if len(mail_list) > 0:
                error_code_list = self.exec_multi_process_backup(mail_list, pool, process_shared_dict)
        except Exception as exception:
            pool.terminate()
            log.error(f"mailbox backup occurred error: {exception}", exc_info=True)
            raise exception
        finally:
            # 关闭进程池
            pool.close()
            pool.join()
        if save_meta_and_sqlite_result is not None and not save_meta_and_sqlite_result.get():
            raise ExchangeInternalException("save_meta_and_sqlite failed!")
        if error_code_list is not None:
            for error_code_set_export in error_code_list.get():
                error_code_set.update(error_code_set_export)
        if not self.allow_half_success and error_code_set:
            self.backup_state = 2
            return error_code_set

        self.backup_state = 2
        if error_code_set:
            backup_failed_count_list = process_shared_dict.get('backup_failed_count_list')
            self.report_backup_partial_success(backup_success_count_list, backup_failed_count_list)
        else:
            self.report_backup_success(backup_success_count_list)
        return error_code_set

    def exec_multi_process_backup(self, mail_list, pool, process_shared_dict):
        # 向上取整
        step = math.ceil(len(mail_list) / self.process_num)
        # 将邮件list切分为{self.process_num}个子列表
        chunk_mail_lists = [mail_list[i:i + step] for i in range(0, len(mail_list), step)]
        # 多进程导出邮箱
        # 参数列表，[(邮件列表0，进程编号0，共享变量dict), (邮件列表1，进程编号1，共享变量dict), ...]
        error_code_list = pool.starmap_async(self.export_entry,
                                             [(chunk, i, process_shared_dict)
                                              for (i, chunk) in enumerate(chunk_mail_lists)],
                                             error_callback=error_callback)
        return error_code_list

    def save_meta_data_and_sqlite(self, total_meta_dict):
        try:
            for mailbox_type, meta_list in total_meta_dict.items():
                start_time = time.time()
                for index, email_item_meta in enumerate(meta_list[0]):
                    parent_path = os.path.join(self.meta_hub, mailbox_type)
                    email_item_meta_dict = {
                        "id": email_item_meta.id,
                        "size": email_item_meta.size,
                        "subject": email_item_meta.subject,
                        "last_modified_time": email_item_meta.last_modified_time,
                        "total_path": email_item_meta.total_path,
                    }
                    file_name = os.path.join(parent_path, ITEM_PREFIX + str(index))
                    self.write_to_file(email_item_meta_dict, file_name, parent_path)
                # 保存文件夹元数据
                for index, email_folder_meta in enumerate(meta_list[1]):
                    parent_path = os.path.join(self.meta_hub, mailbox_type)
                    email_folder_dict = {
                        "id": email_folder_meta.id,
                        "name": email_folder_meta.name,
                        "type": email_folder_meta.type,
                        "total_path": email_folder_meta.total_path,
                    }
                    file_name = os.path.join(parent_path, FOLDER_PREFIX + str(index))
                    self.write_to_file(email_folder_dict, file_name, parent_path)
                self.write_data_to_sqlite(mailbox_type, meta_list[1], meta_list[0])
                identifier = multiprocessing.current_process().name
                log.info(f"process {identifier} finish write data to sqlite, type: {mailbox_type}, "
                            f"time: {time.time() - start_time}")
            return True
        except Exception as err:
            log.error(f"save_meta_data_and_sqlite failed: {err}", exc_info=True)
            return False

    def scan_folders(self, error_code_set, gt_time_str, mailbox_type):
        """
        扫描文件夹
        :param error_code_set: 错误码集合
        :param gt_time_str: 大于等于时间
        :param mailbox_type: 邮箱根目录类型
        :return:
        """
        # 当前队列
        pre_queue: list[EmailFolderModel] = [EmailFolderModel._make(["", mailbox_type, "root", "", ""])]
        # 查询数据
        total_email_item_meta_list = []
        total_email_folder_meta_list = []
        while len(pre_queue) > 0:
            # 查邮件ID列表、查文件夹列表
            next_queue = []
            for pre_dir in pre_queue:
                error_code, query_item_meta, query_folder_meta = self.get_email_and_folder(gt_time_str, pre_dir)
                if error_code != ExchangeCode.SUCCESS.value:
                    error_code_set.add(error_code)
                total_email_item_meta_list.extend(query_item_meta)
                total_email_folder_meta_list.extend(query_folder_meta)
                next_queue.extend(query_folder_meta)
            pre_queue = [_ for _ in next_queue]
        return total_email_folder_meta_list, total_email_item_meta_list

    def get_email_and_folder(self, gt_time_str, pre_dir) -> (bool, list[EmailItemModel], list[EmailFolderModel]):
        """
        查询指定文件夹下的邮件和子文件夹
        :param gt_time_str: 大于时间, 例如2024-01-25T00:00:00Z
        :param pre_dir: 文件夹
        :return: 邮件列表，子文件夹列表
        """
        try:
            return self.get_mail_list_with_retry(gt_time_str, pre_dir)
        except ExchangeInternalException as ex:
            log.error(f"{pre_dir} get item or get folder failed", exc_info=True)
            return ex.code, [], []
        except Exception as ex:
            log.error(f"{pre_dir} get item or get folder failed", exc_info=True)
            return BodyErr.ERROR_INTERNAL.value, [], []

    def get_mail_list_with_retry(self, gt_time_str, pre_dir):
        retry_count = 0
        while True:
            try:
                email_item_list = self.find_items(pre_dir, gt_time_str)
                added_total_email_item_meta_list = []
                for email_item in email_item_list:
                    added_total_email_item_meta_list.append(EmailItemModel._make(
                        [email_item.id, email_item.size, email_item.last_modified_time, email_item.subject,
                         pre_dir.total_path]))
                folder_list = self.find_folders(pre_dir)
                added_total_path_folder_list = []
                for folder_item in folder_list:
                    added_total_path_folder_list.append(EmailFolderModel._make(
                        [folder_item.id, folder_item.name, folder_item.type,
                         pre_dir.total_path + SPLIT_CHAR + self.convert_folder_name(folder_item.name), ""]))
                return ExchangeCode.SUCCESS.value, added_total_email_item_meta_list, added_total_path_folder_list
            except Exception as ex:
                log.error(f"get_email_and_folder failed, pre dir: {pre_dir}, err: {ex}, and retry: {retry_count}.",
                             exc_info=True)
                retry_count += 1
                wait_for_retry(ex, retry_count)

    def write_data_to_sqlite(self, mailbox_type, total_email_folder_meta_list, total_email_item_meta_list):
        """
        往sqlite写数据
        :param mailbox_type: 根目录
        :param total_email_folder_meta_list: 所有文件夹
        :param total_email_item_meta_list: 所有邮件
        """
        path_prefix = SPLIT_CHAR + self.mailbox_name
        # 保存文件夹数据到sqlite
        temp_info = SqliteInputParam(data_name=mailbox_type, data_type="Folder",
                                     data_parent_path=path_prefix,
                                     create_time="",
                                     modify_time="",
                                     data_size=0,
                                     extend_info=json.dumps({SQLITE_EXTINFO_META_FILE: mailbox_type}))
        ExchangeSqliteService.write_metadata_to_sqlite_file(self.pub_meta_path, temp_info, True)
        # 存文件夹
        sql_data = []
        for index, email_folder_meta in enumerate(total_email_folder_meta_list):
            # 存sqlite时，开头要加上分隔符\
            if email_folder_meta.total_path.rfind(SPLIT_CHAR) == -1:
                parent_path = ""
            else:
                parent_path = email_folder_meta.total_path[0:email_folder_meta.total_path.rfind(SPLIT_CHAR)]
            sqlite_path = path_prefix + SPLIT_CHAR + mailbox_type + parent_path
            meta_file = mailbox_type + SPLIT_CHAR + FOLDER_PREFIX + str(index)
            temp_info = SqliteInputParam(data_name=email_folder_meta.name, data_type="Folder",
                                         data_parent_path=sqlite_path,
                                         create_time="",
                                         modify_time="",
                                         data_size=0,
                                         extend_info=json.dumps({SQLITE_EXTINFO_META_FILE: meta_file}))
            sql_data.append(temp_info)
            self.write_batch_metadata_to_sqlite_file(sql_data)
        self.write_batch_metadata_to_sqlite_file(sql_data, True)
        # 存邮件
        for index, email_item_meta in enumerate(total_email_item_meta_list):
            # 存sqlite时，开头要加上分隔符\
            sqlite_path = path_prefix + SPLIT_CHAR + mailbox_type + email_item_meta.total_path
            meta_file = mailbox_type + SPLIT_CHAR + ITEM_PREFIX + str(index)
            extend_info_dict = {SQLITE_EXTINFO_META_FILE: meta_file, "has_subject": True}
            timestamp = convert_iso_time_to_timestamp(email_item_meta.last_modified_time)
            temp_info = SqliteInputParam(data_name=email_item_meta.subject, data_type="Email",
                                         data_parent_path=sqlite_path,
                                         create_time=convert_timestamp_to_time(timestamp),
                                         modify_time=convert_timestamp_to_time(timestamp),
                                         data_size=email_item_meta.size,
                                         extend_info="")
            if not email_item_meta.subject:
                temp_info.data_name = "(no subject)"
                extend_info_dict.update({"has_subject": False})
            temp_info.extend_info = json.dumps(extend_info_dict)
            sql_data.append(temp_info)
            self.write_batch_metadata_to_sqlite_file(sql_data)
        self.write_batch_metadata_to_sqlite_file(sql_data, True)

    def write_batch_metadata_to_sqlite_file(self, sql_data: list, force_write=False):
        if len(sql_data) >= 100 or force_write:
            ExchangeSqliteService.write_metadata_list_to_sqlite_file(self.pub_meta_path, sql_data, True)
            sql_data.clear()

    def get_export_item(self, email_item: EmailItemModel, mailbox_type, index):
        """
        导出邮件
        由于邮件大小上限2GB，不能全读到内存用XML解析，而是得流式解析，数据放到临时文件
        :param email_item: 邮件对象
        :param mailbox_type 邮件类型
        :param index 邮件编号（内部）
        :return:
        """
        if email_item.size > SIZE_LIMIT:
            log.warn(f"save email subject:{email_item.subject} failed, size:{email_item.size} exceed {SIZE_LIMIT}")
            return BodyErr.EMAIL_EXCEED_SIZE_LIMIT.value, email_item, ""
        post_body = \
            """<?xml version="1.0" encoding="utf-8" ?>
    <soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
                   xmlns:xsd="http://www.w3.org/2001/XMLSchema"
                   xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"
                   xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"
                   xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages">
        <soap:Header>
            <t:RequestServerVersion Version="Exchange2010_SP1"/>
            <t:ExchangeImpersonation>
                <t:ConnectingSID>
                    <t:PrimarySmtpAddress><![CDATA[{}]]></t:PrimarySmtpAddress>
                </t:ConnectingSID>
            </t:ExchangeImpersonation>
        </soap:Header>
        <soap:Body>
            <m:ExportItems>
                <m:ItemIds>
                    <t:ItemId Id="{}"/>
                </m:ItemIds>
            </m:ExportItems>
        </soap:Body>
    </soap:Envelope>
            """
        retry_count = 0
        try:
            while True:
                try:
                    response_stream = get_post_response_stream(URL, post_body.format(self.mailbox_name, email_item.id),
                                                               self.username, self.password,
                                                               windows_version=self.windows_version,
                                                               compatibility_mode=self.compatibility_mode)
                    parent_path = os.path.join(self.data_hub, mailbox_type)
                    os.makedirs(parent_path, exist_ok=True)
                    file_name = os.path.join(parent_path, "item" + index)
                    log.debug(f"save backup copy file_name:{file_name}, subject:{email_item.subject} start")
                    self.exec_export_item(file_name, response_stream)
                    log.debug(f"save backup copy file_name:{file_name}, subject:{email_item.subject} end")
                    return ExchangeCode.SUCCESS.value, email_item, file_name
                except Exception as ex:
                    log.error(f"export {email_item} failed, err: {ex}, and retry: {retry_count}.", exc_info=True)
                    retry_count += 1
                    wait_for_retry(ex, retry_count)
        except ExchangeInternalException as ex:
            log.error(f"export {email_item} failed., err: {ex}", exc_info=True)
            return ex.code, email_item, ""
        except Exception as ex:
            log.error(f"export {email_item} failed, err: {ex}.", exc_info=True)
            return BodyErr.ERROR_INTERNAL.value, email_item, ""

    def full_backup_entry(self, process_shared_dict):
        try:
            return self.backup_items(process_shared_dict=process_shared_dict)
        finally:
            clear(self.password)

    def inc_backup_entry(self, latest_copy_id, process_shared_dict):
        try:
            if latest_copy_id is None:
                log.warn(f"increment backup but cannot find latest_copy_id")
                return self.full_backup_entry(process_shared_dict)
            copy_dir = os.path.join(self.repo_path, latest_copy_id)
            last_backup_meta_file = os.path.join(copy_dir, ExchangeMailBosConstants.META_DIR,
                                                 ExchangeMailBosConstants.FULL_BACKUP)
            if not os.path.exists(last_backup_meta_file):
                last_backup_meta_file = os.path.join(copy_dir, ExchangeMailBosConstants.META_DIR,
                                                     ExchangeMailBosConstants.INC_BACKUP)
            backup_json = read_temp_file(last_backup_meta_file)
            backup_start_time = backup_json.get('backup_start_time')
            greater_than_time_str = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(backup_start_time))
            log.info(f"inc_backup_entry greater_than_time_str:{greater_than_time_str}")
            return self.backup_items(greater_than_time_str=greater_than_time_str,
                                     process_shared_dict=process_shared_dict)
        finally:
            clear(self.password)

    def create_folder(self, parent_dir: EmailFolderModel, folder_name: str):
        """
        创建文件夹
        参考https://learn.microsoft.com/zh-cn/exchange/client-developer/
        exchange-web-services/ews-schema-versions-in-exchange
        :param parent_dir: 父文件夹
        :param folder_name: 新建文件夹名称
        :return:
        """
        post_body = """<?xml version="1.0" encoding="utf-8"?>
        <soap:Envelope
                xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
                xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"
                xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
            <soap:Header>
                <t:RequestServerVersion Version="Exchange2010"/>
                <t:ExchangeImpersonation>
                    <t:ConnectingSID>
                        <t:PrimarySmtpAddress><![CDATA[{}]]></t:PrimarySmtpAddress>
                    </t:ConnectingSID>
                </t:ExchangeImpersonation>
            </soap:Header>
            <soap:Body>
              <CreateFolder xmlns="http://schemas.microsoft.com/exchange/services/2006/messages">
                <ParentFolderId>
                  <t:{} Id="{}"/>
                </ParentFolderId>
                <Folders>
                  <t:Folder>
                    <t:DisplayName>{}</t:DisplayName>
                  </t:Folder>
                </Folders>
              </CreateFolder>
            </soap:Body>
        </soap:Envelope>
                """
        if parent_dir.type == "root":
            mailbox_id_type = "DistinguishedFolderId"
            mailbox_id_value = parent_dir.name
        else:
            mailbox_id_type = "FolderId"
            mailbox_id_value = parent_dir.id
        log.info(f"Create folder, mailbox_id_type: {mailbox_id_type}: mailbox_id_value: {mailbox_id_value}")
        post_body = post_body.format(self.mailbox_name, mailbox_id_type, mailbox_id_value, folder_name)
        # 发起请求
        response_stream = get_post_response_stream(URL, post_body, self.username, self.password,
                                                   windows_version=self.windows_version,
                                                   compatibility_mode=self.compatibility_mode)
        # 解析响应、http响应码校验
        content_str = response_stream.read().decode()
        root = ET.XML(content_str) if content_str else None
        ExchangeMailBoxService.check_response_http_code(response_stream, root, post_body)

        # 解析响应体，获取新建的文件夹ID
        create_folder_response = self.get_sub_item(root, [f"{XMLNS_S}Body", f"{XMLNS_M}CreateFolderResponse"],
                                                   post_body)
        find_item_resp = self.get_sub_item(create_folder_response, [f"{XMLNS_M}ResponseMessages",
                                                                    f"{XMLNS_M}CreateFolderResponseMessage"], post_body)
        response_class = find_item_resp.get("ResponseClass")
        self.check_find_item_response_message(find_item_resp, response_class, post_body)
        # 构造文件夹数据并返回
        for folder_model in self.get_sub_item(find_item_resp, [f"{XMLNS_M}Folders"]):
            folder_id = self.get_sub_item(folder_model, [f"{XMLNS_T}FolderId"]).get("Id")
            return folder_id

    def get_root_folder_id_map(self):
        """
        查询根目录ID
        由于DistinguishedFolderId不兼容,FindFolder不是所有exchange版本都能返回DistinguishedFolderId
        没法区分msgfolderroot下目录的类型。
        备份时只备份"drafts", "inbox", "sentitems"这三个父文件夹
        :return:
        """
        error_code = set({})
        try:
            for mailbox_type in MAILBOX_TYPE_LIST:
                folder: EmailFolderModel = self.get_folder(EmailFolderModel._make(["", mailbox_type, "root", "", ""]))
                self.restore_folder_id_dict[f"/{mailbox_type}"] = folder.id
        except ExchangeInternalException as ex:
            log.error(f"get_root_folder_id_map failed", exc_info=True)
            error_code.update({ex.code})
        except Exception as ex:
            log.error(f"get_root_folder_id_map failed", exc_info=True)
            error_code.update({BodyErr.ERROR_INTERNAL.value})
        return error_code

    def restore_entry(self, restore_strategy, restore_sub_objects, process_shared_dict) -> set[int]:
        """
        恢复入口
        :param restore_strategy: 0:替换已存在的邮件, 2:忽略已存在的邮件
        :param restore_sub_objects: 细粒度恢复对象
        :param process_shared_dict: 恢复用进程共享变量dict
        :return:
        """
        log.info(
            f"get restore task, restore_strategy is:{restore_strategy}, restore_sub_objects is:{restore_sub_objects}")
        restore_item_list = [_.get('name', '') for _ in restore_sub_objects]
        # 查询根目录，如果有问题直接返回
        return_code_set = self.get_root_folder_id_map()
        if return_code_set:
            clear(self.password)
            return return_code_set
        # 恢复文件夹和邮件
        self.create_report_restore_progress_thread(process_shared_dict)
        return_code_set.update(self.restore_folders())
        return_code_set.update(self.restore_items(restore_strategy == "2", restore_item_list, process_shared_dict))
        self.restore_state = 1
        if return_code_set:
            self.report_restore_partial_success(process_shared_dict)
        else:
            self.report_restore_success(process_shared_dict)
        clear(self.password)
        return return_code_set

    def restore_items(self, skip_occur, restore_item_list, process_shared_dict) -> set[int]:
        """
        恢复邮件
        :return:
        """
        result = self.restore_items_for_one_root_folder(skip_occur, restore_item_list, process_shared_dict)
        return result

    def get_folder_paths_from_folder_ids(self, mailbox_type, folder_ids):
        folder_paths = []
        for folder_id in folder_ids:
            file_json = read_temp_file(os.path.join(self.meta_hub, mailbox_type, folder_id))
            folder_paths.append(file_json.get("total_path"))
        return folder_paths

    def restore_items_for_one_root_folder(self, skip_occur, restore_item_list, process_shared_dict=None) -> set[int]:
        files_list = self.get_restore_file_list(restore_item_list)
        # 错误码集合
        error_code_list = None
        if len(files_list) > 0:
            # 向上取整
            step = math.ceil(len(files_list) / self.process_num)
            # 将邮件list切分为{self.process_num}个子列表
            chunk_mail_lists = [files_list[i:i + step] for i in range(0, len(files_list), step)]
            # 创建进程池
            pool = Pool(processes=self.process_num)
            try:
                error_code_list = pool.starmap_async(self.restore_item_with_multi_processes,
                                                     [(chunk, skip_occur, i, process_shared_dict)
                                                      for (i, chunk) in enumerate(chunk_mail_lists)],
                                                     error_callback=error_callback)
            except Exception as exception:
                pool.terminate()
                log.error(f"mailbox restore occurred error: {exception}")
                raise exception
            finally:
                # 关闭进程池
                pool.close()
                pool.join()
        error_code_set = set({})
        if error_code_list is not None:
            for error_code_set_export in error_code_list.get():
                error_code_set.update(error_code_set_export)
        return error_code_set

    def get_restore_file_list(self, restore_item_list):
        files_list = []
        for mailbox_type in MAILBOX_TYPE_LIST:
            copy_folder = os.path.join(self.meta_hub, mailbox_type)
            if not os.path.exists(copy_folder):
                log.info(f"copy_folder:{copy_folder} is not found")
                continue
            files = os.listdir(copy_folder)
            mailbox_type_files_list = list(filter(lambda x: x.startswith("item"), files))

            # 没有过滤条件，或者是整个文件夹（收件箱/发件箱/草稿箱），或者是整个邮箱，就恢复整个文件夹
            if not restore_item_list or (MAILBOX_ROOT in restore_item_list) or (mailbox_type in restore_item_list):
                log.info(f"mailbox_type:{mailbox_type}, restore all data")
                files_list.extend([(file, mailbox_type) for file in mailbox_type_files_list])
            # 细粒度恢复
            else:
                files_list.extend(
                    self.get_fine_grained_restore(mailbox_type, mailbox_type_files_list, restore_item_list))
            log.info(f"mailbox restore mail list size: {len(files_list)}, mailbox type: {mailbox_type}")
        return files_list

    def get_fine_grained_restore(self, mailbox_type, mailbox_type_files_list, restore_item_list):
        files_list = []
        restore_data_filter = filter(lambda x: x.startswith(mailbox_type + SPLIT_CHAR), restore_item_list)
        restore_data = [_.replace(mailbox_type + SPLIT_CHAR, "", 1) for _ in restore_data_filter]
        restore_items = [_ for _ in filter(lambda x: x.startswith(ITEM_PREFIX), restore_data)]
        restore_folder_ids = [_ for _ in filter(lambda x: x.startswith(FOLDER_PREFIX), restore_data)]
        # 文件夹ID转文件夹全路径
        restore_folder_paths = self.get_folder_paths_from_folder_ids(mailbox_type, restore_folder_ids)
        for file_name in mailbox_type_files_list:
            file_json = read_temp_file(os.path.join(self.meta_hub, mailbox_type, file_name))
            total_path = file_json.get("total_path", "")
            if file_name in restore_items or self.is_sub_path(total_path, restore_folder_paths):
                files_list.append((file_name, mailbox_type))
        log.info(f"mailbox_type:{mailbox_type}, restore items:{restore_items}, "
                    f"folder ids:{restore_folder_ids}, restore_folder_paths:{restore_folder_paths}")
        return files_list

    def restore_item_with_multi_processes(self, files_list, skip_occur, index, process_shared_dict):
        result = set({})
        # 遍历meta仓，item开头的文件（邮件），对每个邮件做处理
        start_time = time.time()
        identifier = multiprocessing.current_process().name
        restore_email_success_count = process_shared_dict.get("restore_email_success_count")
        restore_email_fail_count = process_shared_dict.get("restore_email_fail_count")
        running_thread_num = [0]
        running_thread_num_lock = Lock()
        result_count_lock = Lock()
        future_list = []
        with ThreadPoolExecutor(max_workers=BATCH_READ_THREAD_NUM) as thread_pool:
            for file_name, mailbox_type in files_list:
                future_list.append(self.multi_thread_import_entry(file_name, mailbox_type, skip_occur,
                                                                  process_shared_dict=process_shared_dict,
                                                                  index=index,
                                                                  thread_pool=thread_pool,
                                                                  running_thread_num=running_thread_num,
                                                                  running_thread_num_lock=running_thread_num_lock,
                                                                  running_thread_result_count_lock=result_count_lock,
                                                                  max_workers=BATCH_READ_THREAD_NUM))
        for future in as_completed(future_list):
            code = future.result()
            if int(code) != ExchangeCode.SUCCESS.value:
                result.add(str(code))
        log.info(f"process {identifier} finished restore, size: {len(files_list)}, "
                    f"restore_email_success_count: {restore_email_success_count}, "
                    f"restore_email_fail_count: {restore_email_fail_count}, time: {time.time() - start_time}")
        return result

    def multi_thread_import_entry(self, file_name, mailbox_type, skip_occur, **kwargs):
        index = kwargs.get("index")
        restore_email_success_count = kwargs.get("process_shared_dict").get("restore_email_success_count")
        restore_email_fail_count = kwargs.get("process_shared_dict").get("restore_email_fail_count")
        thread_pool = kwargs.get("thread_pool")
        running_thread_num_lock = kwargs.get("running_thread_num_lock")
        running_thread_result_count_lock = kwargs.get("running_thread_result_count_lock")
        running_thread_num = kwargs.get("running_thread_num")
        future = None

        def restore_metadata_and_data_done_callback(future_result):
            running_thread_result_count_lock.acquire()
            code = future_result.result()
            if int(code) == ExchangeCode.SUCCESS.value:
                restore_email_success_count[index] += 1
            else:
                restore_email_fail_count[index] += 1
            running_thread_num[0] = running_thread_num[0] - 1
            running_thread_result_count_lock.release()

        while True:
            running_thread_num_lock.acquire()
            if running_thread_num[0] < kwargs.get('max_workers'):
                running_thread_num[0] = running_thread_num[0] + 1
                running_thread_num_lock.release()
                try:
                    future = thread_pool.submit(self.do_import_entry, mailbox_type, file_name, skip_occur)
                    future.add_done_callback(restore_metadata_and_data_done_callback)
                except Exception as error:
                    log.error("multi_thread_import_entry error")
                    log.error(error, exc_info=True)
                break
            else:
                running_thread_num_lock.release()
                time.sleep(1)
        return future

    def do_import_entry(self, mailbox_type, file_name, skip_occur):
        try:
            file_json = read_temp_file(os.path.join(self.meta_hub, mailbox_type, file_name))
            total_path = file_json.get("total_path", "")
            subject = file_json.get("subject", "")
            email_id = file_json.get("id", "")

            log.debug(f"start restore {file_name}")
            # 创建当前邮件
            data_file_name = os.path.join(self.data_hub, mailbox_type, file_name)
            self.restore_iter_item(data_file_name, email_id,
                                   self.restore_folder_id_dict.get(f"/{mailbox_type}{total_path}", ""),
                                   total_path, skip_occur)
            return ExchangeCode.SUCCESS.value
        except ExchangeInternalException as ex:
            log.error(f"{file_name} restore_iter_email failed: {ex}", exc_info=True)
            return ex.code
        except Exception as ex:
            log.error(f"{file_name} restore_iter_email failed: {ex}", exc_info=True)
            return BodyErr.ERROR_INTERNAL.value

    def restore_iter_item(self, data_file_name, email_id, last_parent_id, last_parent_path, skip_occur):
        if skip_occur:
            # 忽略已存在的邮件
            # find_items不兼容改为get_item
            mail_item = self.exec_get_item(email_id)
            if not mail_item:
                log.debug(f"{data_file_name} not exist, wi create")
                self.upload_email(data_file_name, last_parent_id, email_id, "CreateNew")
            else:
                log.debug(f"{data_file_name} exist, skip")
        else:
            # 替换已存在的邮件
            log.debug(f"{data_file_name} will update or create")
            self.upload_email(data_file_name, last_parent_id, email_id, "UpdateOrCreate")

    def restore_folders(self) -> set[int]:
        """
        恢复文件夹，层序找到父文件夹，并把当前文件夹创建在其下面
        :return:
        """
        # 查找3个根目录：
        result = set({})
        for mailbox_type in MAILBOX_TYPE_LIST:
            copy_folder = os.path.join(self.meta_hub, mailbox_type)
            if not os.path.exists(copy_folder):
                log.info(f"copy_folder:{copy_folder} is not found")
                continue
            files = os.listdir(copy_folder)
            # 过滤掉文件
            files_list = list(filter(lambda x: x.startswith("folder"), files))
            # 按照文件名字排序
            files_list = sorted(files_list, key=lambda x: x)
            # 定义缓存：
            last_parent_path = None
            last_parent_id = None
            # 遍历meta仓，folder开头的文件，对每个文件
            for file_name in files_list:
                result.update(
                    self.restore_iter_folder(file_name, last_parent_id, last_parent_path, mailbox_type))
        return result

    def restore_iter_folder(self, file_name, last_parent_id, last_parent_path, mailbox_type):
        pre_parent_folder_path = ""
        try:
            file_json = read_temp_file(os.path.join(self.meta_hub, mailbox_type, file_name))
            total_path = file_json.get("total_path")
            if total_path.rfind(SPLIT_CHAR) == -1:
                pre_parent_folder_path = ""
            else:
                pre_parent_folder_path = total_path[0:total_path.rfind(SPLIT_CHAR)]  # 全路径 /drafts/test1/test2
            pre_name = file_json.get("name")
            log.info(f"pre_parent_folder_path:{pre_parent_folder_path},pre_name:{pre_name}")
            if pre_parent_folder_path != last_parent_path:
                # 当前父路径!=上一个父路径，先递归寻找父文件夹
                last_parent_id, last_parent_path = self.restore_iter_create_folder(mailbox_type,
                                                                                   pre_parent_folder_path)
            # 创建当前文件夹
            pre_folders = self.find_folders(
                EmailFolderModel._make([last_parent_id, file_name, "common", last_parent_path, ""]), pre_name)
            if not pre_folders:
                pre_folder_id = self.create_folder(
                    EmailFolderModel._make([last_parent_id, file_name, "common", last_parent_path, ""]), pre_name)
            else:
                pre_folder_id = pre_folders[0].id
            # 记录每个文件夹的id，恢复邮件时使用
            self.restore_folder_id_dict[f"/{mailbox_type}{total_path}"] = pre_folder_id
            return set({})
        except ExchangeInternalException as ex:
            log.error(f"{pre_parent_folder_path} restore_iter_email failed", exc_info=True)
            return {ex.code}
        except Exception as ex:
            log.error(f"{pre_parent_folder_path} restore_iter_email failed", exc_info=True)
            return {BodyErr.ERROR_INTERNAL.value}

    def restore_iter_create_folder(self, mailbox_type, pre_parent_folder_path):
        """
        恢复-迭代式创建文件夹
        :param mailbox_type: 根文件夹名称
        :param pre_parent_folder_path: 父文件夹全路径
        :return:
        """
        last_parent_path = ""  # 全路径，一层一层增加 /drafts/test1/test2
        last_parent_id = self.restore_folder_id_dict.get(f"/{mailbox_type}")
        sub_paths = pre_parent_folder_path.split(SPLIT_CHAR)
        for sub_path in sub_paths:
            if not sub_path:
                continue
            sub_folders = self.find_folders(
                EmailFolderModel._make([last_parent_id, sub_path, "common", last_parent_path, ""]), sub_path)
            if not sub_folders:
                sub_folder_id = self.create_folder(
                    EmailFolderModel._make([last_parent_id, sub_path, "common", last_parent_path, ""]), sub_path)
            else:
                sub_folder_id = sub_folders[0].id
            last_parent_path = last_parent_path + SPLIT_CHAR + sub_path
            last_parent_id = sub_folder_id
        return last_parent_id, last_parent_path

    def export_entry(self, total_email_item_meta_list: list[(str, EmailItemModel, int)], process_index: int,
                     process_shared_dict) -> set[int]:
        start_time = time.time()
        identifier = multiprocessing.current_process().name
        backup_success_count_list = process_shared_dict.get("backup_success_count_list")
        running_thread_num = [0]
        running_thread_num_lock = Lock()
        future_list = []
        # 多线程备数据
        with ThreadPoolExecutor(max_workers=BATCH_READ_THREAD_NUM) as pool:
            log.info(
                f"mailbox_name:{self.anonymization_mailbox}, username:{self.username}, "
                f"mail size:{len(total_email_item_meta_list)}")
            for _, (mailbox_type, email_item_meta, mail_index) in enumerate(total_email_item_meta_list):
                future_list.append(self.
                                   multi_thread_export_entry(running_thread_num_lock, running_thread_num, pool,
                                                             email_item=email_item_meta,
                                                             index=str(mail_index),
                                                             mailbox_type=mailbox_type,
                                                             process_index=process_index,
                                                             process_shared_dict=process_shared_dict,
                                                             max_workers=BATCH_READ_THREAD_NUM))
        error_code_set = set({})
        for future in as_completed(future_list):
            code, res, _ = future.result()
            if int(code) != ExchangeCode.SUCCESS.value:
                log.error(f"errorcode:{future.result()}, res:{res}, will go on job!")
                error_code_set.add(int(code))
        log.info(f"process {identifier} finished backup, size: {len(total_email_item_meta_list)}, "
                    f"backup_success_count_list: {backup_success_count_list}, "
                    f"time: {time.time() - start_time}")
        return error_code_set

    def multi_thread_export_entry(self, running_thread_num_lock, running_thread_num, pool, **kwargs):
        """
        功能描述：备份数据入口
        :param running_thread_num_lock: 修改正在执行的线程数的锁对象
        :param running_thread_num: 正在执行的线程数
        :param pool: 线程池
        :param kwargs: 入参map
        :return:
        """
        future = None
        process_index = kwargs.get("process_index")
        backup_success_count_list = kwargs.get("process_shared_dict").get("backup_success_count_list")
        backup_failed_count_list = kwargs.get("process_shared_dict").get("backup_failed_count_list")
        backup_total_size_list = kwargs.get("process_shared_dict").get("total_size_list")

        def backup_metadata_and_data_done_callback(future_result):
            running_thread_num_lock.acquire()
            code, res, file_name = future_result.result()
            if int(code) != ExchangeCode.SUCCESS.value:
                backup_failed_count_list[process_index] += 1
            else:
                backup_success_count_list[process_index] += 1
                backup_total_size_list[process_index] += get_file_size(file_name)
            running_thread_num[0] = running_thread_num[0] - 1
            running_thread_num_lock.release()

        while True:
            running_thread_num_lock.acquire()
            if running_thread_num[0] < kwargs.get('max_workers'):
                running_thread_num[0] = running_thread_num[0] + 1
                running_thread_num_lock.release()
                try:
                    future = pool.submit(self.get_export_item, kwargs.get("email_item"), kwargs.get("mailbox_type"),
                                         kwargs.get("index"))
                    future.add_done_callback(backup_metadata_and_data_done_callback)
                except Exception as error:
                    log.error("multi_thread_export_entry error")
                    log.error(error, exc_info=True)
                break
            else:
                running_thread_num_lock.release()
                time.sleep(1)
        return future

    def upload_email(self, file_name, parent_folder_id, item_id, restore_type):
        """
        上传邮件
        :param file_name: 邮件数据文件名
        :param parent_folder_id: 父文件夹ID
        :param item_id: 邮件ID
        :param restore_type:
        直接替换已经存在的邮件：UpdateOrCreate
        忽略已存在的邮件：先查，如果存在则忽略，如果不存在则CreateNew
        :return:
        """
        prefix_template = """<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope
        xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
        xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"
        xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
    <soap:Header>
        <t:RequestServerVersion Version="Exchange2010_SP1"/>
        <t:ExchangeImpersonation>
            <t:ConnectingSID>
                <t:PrimarySmtpAddress><![CDATA[{}]]></t:PrimarySmtpAddress>
            </t:ConnectingSID>
        </t:ExchangeImpersonation>
    </soap:Header>
    <soap:Body>
<m:UploadItems>
      <m:Items>
        <t:Item CreateAction="{}">
                    <t:ParentFolderId Id="{}"/>
                    {}
                    <t:Data>"""
        item_id_template = "" if restore_type == "CreateNew" else """<t:ItemId Id="{}"/>""".format(item_id)
        prefix = prefix_template.format(self.mailbox_name, restore_type, parent_folder_id, item_id_template).encode()
        suffix_template = """</t:Data>
                </t:Item>
            </m:Items>
        </m:UploadItems>
    </soap:Body>
</soap:Envelope>"""
        suffix = suffix_template.encode()
        retry_count = 0
        while True:
            try:
                res = get_post_response_stream(URL, "", self.username, self.password,
                                               file_obj=CustomFileLikeObject(file_name, prefix, suffix),
                                               windows_version=self.windows_version,
                                               compatibility_mode=self.compatibility_mode)
                # 暂时先不处理响应结果
                res_data = res.read()
                return res_data
            except Exception as ex:
                log.error(f"upload email failed: {ex}, retry time: {retry_count}")
                retry_count += 1
                wait_for_retry(ex, retry_count)

    def report_scan_finish(self):
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_SCAN_SUCCESS,
                               logInfoParam=[str(self.scan_email_count)], logLevel=LogLevel.INFO.value)
        output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=5,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value, dataSize=0).dict(
            by_alias=True)
        report_job_details_by_rpc(self.job_id, self.pid, self.sub_job_id, output)

    def report_backup_success(self, backup_success_count_list):
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_BACKUP_SUCCESS,
                               logInfoParam=[str(sum(backup_success_count_list))], logLevel=LogLevel.INFO.value)
        output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=5,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value, dataSize=0).dict(
            by_alias=True)
        report_job_details_by_rpc(self.job_id, self.pid, self.sub_job_id, output)

    def report_backup_partial_success(self, backup_success_count_list, backup_failed_count_list):
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_BACKUP_PARTIAL_SUCCESS,
                               logInfoParam=[str(sum(backup_success_count_list)), str(sum(backup_failed_count_list))],
                               logLevel=LogLevel.WARN.value)
        output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=5,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value, dataSize=0).dict(
            by_alias=True)
        report_job_details_by_rpc(self.job_id, self.pid, self.sub_job_id, output)

    def report_backup_progress(self, backup_success_count_list):
        while self.backup_state < 2:
            if self.backup_state == 0:
                log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_SCAN_PROGRESS,
                                       logInfoParam=[str(self.scan_email_count)],
                                       logLevel=LogLevel.INFO.value)
            else:
                log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_BACKUP_PROGRESS,
                                       logInfoParam=[str(self.scan_email_count),
                                                     str(sum(backup_success_count_list))],
                                       logLevel=LogLevel.INFO.value)
            output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=5,
                                   logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value, dataSize=0).dict(
                by_alias=True)
            report_job_details_by_rpc(self.job_id, self.pid, self.sub_job_id, output)
            log.info(f"report_backup_progress backup_success_count_list: "
                        f"{backup_success_count_list}, {sum(backup_success_count_list)}")
            time.sleep(30)

    def create_report_backup_progress_thread(self, backup_success_count_list):
        """
        功能描述：单独起个后台线程进度上报
        """
        progress_thread = Thread(target=self.report_backup_progress, args=(backup_success_count_list,))
        progress_thread.setDaemon(True)
        progress_thread.start()

    def report_restore_success(self, process_shared_dict):
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_RESTORE_SUCCESS,
                               logInfoParam=[str(sum(process_shared_dict.get('restore_email_success_count')))],
                               logLevel=LogLevel.INFO.value)
        output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=5,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value, dataSize=0).dict(
            by_alias=True)
        report_job_details_by_rpc(self.job_id, self.pid, self.sub_job_id, output)

    def report_restore_partial_success(self, process_shared_dict):
        log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_RESTORE_PARTIAL_SUCCESS,
                               logInfoParam=[str(sum(process_shared_dict["restore_email_success_count"])),
                                             str(sum(process_shared_dict.get("restore_email_fail_count")))],
                               logLevel=LogLevel.WARN.value)
        output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=5,
                               logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value, dataSize=0).dict(
            by_alias=True)
        report_job_details_by_rpc(self.job_id, self.pid, self.sub_job_id, output)

    def report_restore_progress(self, process_shared_dict):
        while self.restore_state < 1:
            log_detail = LogDetail(logInfo=ExchangeReportDBLabel.MAILBOX_RESTORE_PROGRESS,
                                   logInfoParam=[str(sum(process_shared_dict.get('restore_email_success_count')))],
                                   logLevel=LogLevel.INFO.value)
            output = SubJobDetails(taskId=self.job_id, subTaskId=self.sub_job_id, progress=5,
                                   logDetail=[log_detail], taskStatus=SubJobStatusEnum.RUNNING.value, dataSize=0).dict(
                by_alias=True)
            report_job_details_by_rpc(self.job_id, self.pid, self.sub_job_id, output)
            time.sleep(30)

    def create_report_restore_progress_thread(self, process_shared_dict):
        """
        功能描述：单独起个后台线程进度上报
        """
        progress_thread = Thread(target=self.report_restore_progress, args=[process_shared_dict, ])
        progress_thread.setDaemon(True)
        progress_thread.start()


# 自定义一个实现read()接口的类，用于恢复时分批发送数据
class CustomFileLikeObject(object):
    def __init__(self, file_name, prefix, suffix):
        self.prefix = prefix
        self.suffix = suffix
        self.file_length = os.path.getsize(file_name)
        with open(file_name, "rb") as f:
            pre = f.read(2048)
            self.start_length = pre.index(b"<m:Data>") + len(b"<m:Data>")
            f.seek(-300, 2)
            suf = f.read(1024)
            self.end_length = len(suf) - suf.index(b"</m:Data>")
        # 0: 可发送prefix, 1: 可发送body体, 2: 可发送suffix, 3: end
        self.status = 0
        self.file = None
        self.file_name = file_name
        self.remain_read_size = 0
        self.total_byte_len = len(self.prefix) + len(self.suffix) + (
                self.file_length - self.start_length - self.end_length)

    def read(self, length):
        if self.status == 0:
            self.status = 1
            self.file = open(self.file_name, "rb")
            self.file.seek(self.start_length, 0)
            self.remain_read_size = self.file_length - self.start_length - self.end_length
            return self.prefix
        elif self.status == 2:
            if self.file is not None:
                self.file.close()
            self.status = 3
            return self.suffix
        elif self.status == 3:
            return b''
        else:
            if self.remain_read_size > length:
                temp_bytes = self.file.read(length)
                self.remain_read_size -= length
            else:
                temp_bytes = self.file.read(self.remain_read_size)
                self.remain_read_size = 0
                self.status = 2
            return temp_bytes
