# -*- coding: utf-8 -*-
"""
@File:    import_service.py
@Author:  jk.dong
@Desc:    None
"""
import copy
import shutil
import uuid
# from decimal import Decimal
from enum import Enum

from docwork.service.base_service import BaseService
from docwork.utils import *
from docwork.dao.receive_dao import RECV_CONF, RECV_DB, ReceiveDao
from docwork.dao.import_dao import *


oss_region = RECV_CONF['import-setting'].get('use-oss')
if RECV_CONF['import-setting'].get('webapp_attachment_path') and \
    not RECV_CONF['import-setting'].get('webapp_attachment_path_pattern'):
    RECV_CONF['import-setting']['webapp_attachment_path_pattern'] = '%Y%m%d'


class AttachmentStoreType(Enum):
    Database: int = 0
    File: int = 1
    Backup: int = 2
    S3: int = 5


class DocImportService(BaseService):
    def __init__(self, name: str):
        super().__init__(name)
        self.doc_id = 0
        self.rawmaillog_id = 0

    @staticmethod
    def check_summary_length(content):
        logging.info(f"报告摘要文本长度：{len(content)}字")
        if len(content) > 0:
            summary_length_limit = RECV_CONF['import-setting'].get('summary_limits')
            if summary_length_limit > 0 and (len(content) > summary_length_limit):
                logging.info(
                    f"报告摘要长度超出摘要长度限制（{summary_length_limit}字），报告摘要将被切割。")
                content = content[:summary_length_limit]
                logging.info(f"切割后的新摘要内容：{content}\t长度：{len(content)}")
        else:
            logging.warning("邮件摘要内容为空！")

        return content

    def table_insert(self, conn, table_name, param1, *args):
        """
        表插入处理
        *args 这一步可能有参数类型等风险
        """
        rowcount = 0
        match table_name:
            case 'docreport':
                rowcount, docid = ImportDao.insert_docreport(conn=conn, msg=param1)
                if docid > 0:
                    self.doc_id = docid
            case 'docgeneral':
                if args:
                    rowcount = ImportDao.insert_docgeneral(conn=conn, msg=param1, objid=self.doc_id, attachment_flag=args[0])
                else:
                    rowcount = ImportDao.insert_docgeneral(conn=conn, msg=param1, objid=self.doc_id)
            case 'km_recvsendinfo':
                if args:
                    rowcount, _ = ImportDao.insert_km_recvsendinfo(conn=conn, msg=param1, docid=self.doc_id, send_flag=args[0])
                else:
                    logging.error(f"当前{table_name}表传参有误，请检查！")
            case 'km_docrecievelog':
                rowcount, _ = ImportDao.insert_km_docrecievelog(conn=conn, msg=param1, docid=self.doc_id, status=0)
        if rowcount != 1:
            logging.error(f'导入{table_name}表失败')
            if RECV_CONF['import-setting'].get('exception_sync'):
                ReceiveDao.delete_rawmaillog(self.rawmaillog_id)
            else:
                update_rawmaillog_xinfo(conn, self.rawmaillog_id, -10)
            conn.rollback()
            conn.close()
            return 'error'
        else:
            logging.info(f'{table_name}表插入成功')
            return 'success'

    def import_doc(self, msg):
        """
        导入报告，使用数据库线程池连接、异常触发事务回滚操作，保持入库数据的一致性
        :param msg:
        :return:
        """
        checked_file_types = ['pdf', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx']
        import_conn = RECV_DB.get_conn()
        self.rawmaillog_id = msg.rawmaillog_id
        try:
            self.check_summary_length(msg.content)

            # 服务端执行逻辑
            xml_attachment_file = ""
            write_time = ""
            if not RECV_CONF['receive-setting'].get('client_mode'):
                logging.info("(Import service)Server process")
                pos = -1
                logging.info(f"附件数量:{len(msg.attachments)}")
                for i in range(len(msg.attachments)):
                    logging.info(f"attach name:{msg.attachments[i].get('attachName')}")
                    if msg.attachments[i].get("attachName") == "recvinfo.xml":
                        # 尝试从该附件中获取UID
                        if Path(msg.attachments[i].get("path")).exists():
                            xml_attachment_file = msg.attachments[i].get("path")
                        ri = read_xml(msg.attachments[i].get("path"))
                        msg.uid = ri.get("uid")
                        msg.sellside_mailfrom = ri.get("mailfrom")
                        msg.sellside_docid = ri.get("sellsidedocid")
                        msg.buyside_docid = ri.get("buysidedocid")
                        if ri.get("writetime"):
                            write_time = ri.get("writetime")
                        pos = i
                        logging.info(
                            f"Client UID={msg.uid}\tSellSideMailFrom:{msg.sellside_mailfrom}\t"
                            f"BuySideDocId:{msg.buyside_docid}\tSellSideDocId:{msg.sellside_docid}")
                if pos >= 0:
                    msg.attachments.pop(pos)
                    # 检查该uid是否已经存在，如果存在，则说明是重复邮件，再检查是否为有效附件，无有效附件就跳过
                    sqla = f"""SELECT * FROM {table_km_recvsendinfo} WHERE "UID" = '{msg.uid}'"""
                    logging.info(sqla)
                    _dt = IMPORT_DB.search(sqla)
                    if _dt:
                        logging.info(f"UID[{msg.uid}] 已存在, 检查是否为有效附件...")
                        # valid_attachment
                        valid_attachment = False
                        if len(msg.attachments) == 1:
                            attachInfo = msg.attachments[0]
                            logging.info(
                                f'find 1 Attachment when check UID duplicate: {attachInfo["attachName"]}')
                            if attachInfo["attachName"] and check_suffix_compatible(
                                    attachInfo["attachName"], checked_file_types):
                                valid_attachment = True

                        elif len(msg.attachments) > 1:
                            logging.info(
                                f"find {len(msg.attachments)} Attachments when check UID duplicate")
                            _validAttachmentCount = 0
                            for ai in msg.attachments:
                                if ai["attachname"] and check_suffix_compatible(ai["attachname"], checked_file_types):
                                    _validAttachmentCount += 1
                            if _validAttachmentCount > 0:
                                valid_attachment = True

                        else:
                            logging.info("no Attachment when check UID duplicate")
                        '''
                        待添加：
                            是否公司端强制删除UID重复的报告而不论是否有附件，默认关闭
                        '''
                        logging.info(f"validAttachment: {valid_attachment}")

                        # 除recvinfo.xml外，无其他有效附件的情况：跳过当前邮件的处理，执行下一封的处理
                        if not valid_attachment:
                            # 删除本地邮件
                            logging.info(f"UID duplicate and have no valid attachment, not import: {msg.subject}")

                            doc_file = Path(RECV_CONF['receive-setting'].get('rawmail_path')).joinpath(f"{self.rawmaillog_id}.eml")
                            if doc_file.exists():
                                os.remove(doc_file)
                                logging.warning(f"Delete: {doc_file}")
                            # 删除本地附件
                            for attachInfo in msg.attachments:
                                if os.path.exists(attachInfo["path"]):
                                    os.remove(attachInfo["path"])
                                    logging.warning(f"delete attachment: {attachInfo['path']}")
                            # 删除xml附件
                            if xml_attachment_file:
                                if os.path.exists(xml_attachment_file):
                                    os.remove(xml_attachment_file)
                                    logging.warning(f"delete xml attachment: {xml_attachment_file}")
                            if self.rawmaillog_id > 0:
                                update_rawmaillog_xinfo(import_conn, self.rawmaillog_id, -3)
                            # 不进行数据库导入，跳出
                            return
                        # 删除重复UID对应km_recvsendinfo表的记录
                        for _dr in _dt:
                            # logging.info("begin to delete old! Last RECVTIME:"+_dr['OBJID']+"\tdoc:"+_dr["DOCID"]+"\tcustomer:"+_dr["CUSTOMERID"]+"\trecvtime:"+_dr["RECVTIME"])
                            sql1 = f"DELETE FROM {table_km_recvsendinfo} WHERE OBJID={_dr[00]}"
                            logging.info("ready to execute SQL:" + sql1)
                            rowcount = IMPORT_DB.delete(sql1)
                            if rowcount == 1:
                                logging.info("km_recvsendinfo deleted!")
                            else:
                                logging.info("km_recvsendinfo delete failed.")
                    # km_recvsendinfo表无记录，不是重复邮件
                    else:
                        logging.info("Server: table km_recvsendinfo no record. Not duplicate!")
                    # 无recvinfo.xml的情况
                else:
                    logging.warning(f"Continue! pos为: {pos}, there is no attachment named recvinfo.xml")
                    if self.rawmaillog_id > 0:
                        update_rawmaillog_xinfo(import_conn, self.rawmaillog_id, -10)
                    return
            # 客户端执行逻辑
            else:
                uid = uuid.uuid4()
                logging.info(f"(Import service)Client process: Generate UID==> {uid}")
                msg.uid = str(uid)

            # ====================附件处理========================
            is_duplicated = False
            # 只有一封附件(目前只对一封有效附件做重复逻辑判断)
            if len(msg.attachments) == 1:
                attachInfo = msg.attachments[0]
                exist = False
                existAttInfo = {}
                # 新增->XNCF_开头的邮件不作重复处理
                if msg.subject.startswith('XNCF_'):
                    logging.info("邮件标题以XNCF_开头，不做重复判断")
                else:
                    logging.info(f'存在一封附件: {attachInfo["attachName"]}')
                    validAttachment = False
                    if attachInfo['attachName'] and check_suffix_compatible(
                            attachInfo['attachName'], checked_file_types):
                        validAttachment = True
                    if validAttachment:
                        try:
                            logging.info(f'attachment need check md5: {attachInfo["attachName"]}')
                            md5file = RECV_CONF['receive-setting'].get('rawattach_path').joinpath(attachInfo["attachName"]) if oss_region.get('oss_name') else attachInfo['path']
                            md5 = get_md5(md5file)
                            logging.info(f'检查附件MD5值 (2):{md5}\t{attachInfo["attachName"]}\t邮件标题:{msg.subject}')
                            # logging.info("start check md5 if match in attachment table record 1")
                            existAttInfo = find_attachment_by_md5(import_conn, attachInfo['attachName'],
                                                                  md5)
                            foundValidDup = True
                            if existAttInfo:
                                logging.info("从attachment表匹配到md5相同的附件 1")
                                existedAttId = existAttInfo['Id']
                                existedDocId = existAttInfo['DocId']
                                attname = existAttInfo['Name']
                                logging.info(
                                    f"Attachment id in database:{existedAttId}\tDocId:{existedDocId}\tAttName:{attname}\tOriginalTitle:{existAttInfo['DocOriginalTitle']}")

                                if md5.lower() != existAttInfo['Digest'].lower():
                                    logging.info(
                                        f"MD5值不匹配，new: {md5}, \t existed: {existAttInfo['Digest']}")
                                    if abs(attachInfo['attachSize'] - existAttInfo['Size']) > 1024:
                                        logging.info(
                                            f"文件大小不同(size difference more than 1024 in attachment table record):{attachInfo['attachSize']}==>{existAttInfo['Size']}")
                                        foundValidDup = False
                                    else:
                                        logging.info(
                                            f"文件大小相近(size difference less than 1024 in attachment table record):{attachInfo['attachSize']}==>{existAttInfo['Size']}")
                                else:
                                    logging.info("MD5值匹配，判断为重复！")
                            else:
                                logging.info(
                                    "attachment表未匹配到md5相同的附件 1, 再根据附件名和附件大小(小于10000)检查是否重复")
                                existAttInfo = find_attachment_by_size(import_conn,
                                                                       attachInfo['attachName'],
                                                                       attachInfo['attachSize'])
                                if existAttInfo:
                                    logging.info(
                                        f"根据附件名和文件大小，找到一封重复附件 1:{existAttInfo['DocOriginalTitle']}\tAttName:{attachInfo['attachName']}\tSize:{attachInfo['attachSize']}")
                                else:
                                    logging.info("根据附件名和文件大小，未找到重复附件 1")
                                    foundValidDup = False
                            if foundValidDup:
                                if RECV_CONF['receive-setting'].get('client_mode') and existAttInfo['BrokerId'] > 1:
                                    exist = True
                                    is_duplicated = True
                                else:
                                    logging.info("内部邮件，不判重!")
                            else:
                                logging.info("attachment表无匹配，不是重复邮件")
                        except Exception as err:
                            logging.error(f"附件判重存在异常，Error:{err}")
                    else:
                        # 通过附件的MD5值查询数据库中是否存在来判重，不存在==>不重复
                        logging.info(f"附件：{attachInfo['attachName']}不是有效附件，不进行重复附件检查")

                # 查询数据库后，没有重复附件的情况
                if not exist:
                    logging.info('开始保存docreport/docgeneral表')
                    if self.table_insert(import_conn, 'docreport', msg) == 'error':
                        return
                    attachment_flag = 0 if existAttInfo else 1
                    if self.table_insert(import_conn, 'docgeneral', msg, attachment_flag) == 'error':
                        return

                    logging.info(f"邮件不存在重复附件的情况，save new doc 1:{self.doc_id}")

                    if not attachInfo['attachName']:
                        attachInfo['attachName'] = msg.subject + "附件(1)"
                    attachInfo['storeType'] = AttachmentStoreType.Database.value
                    if not os.path.exists(attachInfo['path']):
                        logging.error(f"不存在路径:{attachInfo['path']}")
                        # 若有备份目录的配置项，添加上这个备份目录
                    logging.info(f"attachment path:{attachInfo['path']}")
                    attachInfo['digest'] = get_md5(attachInfo['path'])
                    logging.info(
                        f"开始附件表数据导入\tattachmentStoreType:{attachInfo['storeType']}")
                    rowcount, attachmentid = ImportDao.insert_attachment(import_conn, self.doc_id, attachInfo)
                    if rowcount == 1:
                        attachInfo['id'] = attachmentid
                        logging.info(f"附件表导入成功:\tID:{attachInfo['id']}\tSize:{attachInfo['attachSize']}\tMD5:{attachInfo['digest']}")
                    else:
                        logging.error("attachment表数据插入失败")
                    secondDbOk = False
                    if RECV_CONF['import-setting'].get('save_attachment_second'):
                        logging.info(
                            f"开始附件表数据导入(second database)\tattachid:{attachInfo['id']}\tattachmentStoreType:{attachInfo['storeType']}")
                        rowcount = ImportDao.insert_attachment2(self.doc_id, attachInfo)
                        if rowcount == 1:
                            logging.info(
                                f"附件表导入(second database)成功:\tattachid:{attachInfo['id']}\tSize:{attachInfo['attachSize']}\tMD5:{attachInfo['digest']}")
                            secondDbOk = True
                        else:
                            logging.error("附件表导入(second database)失败")

                    attachContentSaved2AwsS3OSS = False
                    if oss_region.get('oss_name') == 's3':
                        from docwork.utils.oss import S3Client
                        logging.info("开始保存附件至AWS S3.")
                        try:
                            attach_fn = attachInfo['path'].split('\\')[-1]
                            keyname = f"/sinitek/attachment/{datetime.datetime.now().strftime('%Y%m%d')}/{attach_fn}"
                            attachInfo['storeType'] = AttachmentStoreType.S3.value
                            s3 = S3Client(oss_config=oss_region)
                            if s3.upload(bucket_name=oss_region.get('bucket_name'), file_data=attachInfo['path'],
                                         key_name=keyname, type='file'):
                                attachInfo[
                                    'path'] = oss_region.get('endpoint_url') + '|' + oss_region.get('bucket_name') + '|' + keyname
                                attachContentSaved2AwsS3OSS = True
                        except Exception as err:
                            logging.info(traceback.format_exc())
                            logging.error(f"上传AWS S3异常，Error:{err}")

                    attachContentSaved2File = False
                    if (RECV_CONF['import-setting'].get('webapp_attachment_path')) and not attachContentSaved2AwsS3OSS:
                        logging.info(f"开始保存附件至WebAppAttachmentPath:{RECV_CONF['import-setting'].get('webapp_attachment_path')}")

                        try:
                            webpath = Path(RECV_CONF['import-setting'].get('webapp_attachment_path')) / datetime.datetime.now().strftime(
                                RECV_CONF['import-setting'].get('webapp_attachment_path_pattern'))
                            check_directory_exist(webpath, not_empty=True, create=True)
                            attachName = attachInfo['attachName']
                            websavepath = str(attachInfo['id'])
                            if attachName and attachName.find('.') >= 0:
                                websavepath += '.' + attachName.split('.')[-1]
                            else:
                                websavepath += '.dat'
                            websavepath = webpath.joinpath(websavepath)
                            shutil.copyfile(attachInfo['path'], websavepath)
                            logging.info(f"已将附件拷贝至websavepath，删除{attachInfo['path']}")
                            os.remove(attachInfo['path'])
                            attachInfo['path'] = websavepath
                            logging.info(f"附件保存至WebAppAttachmentPath路径:{websavepath}")
                            attachContentSaved2File = True

                            if RECV_CONF['import-setting'].get('webapp_attachment_path2'):
                                logging.info(
                                    f"开始保存附件至WebAppAttachmentPath2: {RECV_CONF['import-setting'].get('webapp_attachment_path2')}")
                                webpath2 = Path(RECV_CONF['import-setting'].get('webapp_attachment_path2')) / datetime.datetime.now().strftime(
                                    RECV_CONF['import-setting'].get('webapp_attachment_path_pattern'))
                                check_directory_exist(webpath2, not_empty=True, create=True)
                                attachName = attachInfo['attachName']
                                websavepath2 = str(attachInfo['id'])
                                if attachName is not None and attachName.find('.') >= 0:
                                    websavepath2 += attachName[attachName.find('.'):]
                                else:
                                    websavepath2 += '.dat'
                                websavepath2 = webpath2.joinpath(websavepath2)
                                shutil.copyfile(attachInfo['path'], websavepath2)
                                logging.info(
                                    f"附件保存至WebAppAttachmentPath2路径:{websavepath2}")
                        except Exception as err:
                            logging.error(f"保存附件时出现异常, Error: {err}")

                    if RECV_CONF['import-setting'].get('save_attachment_content_db') or (
                            not attachContentSaved2File and not attachContentSaved2AwsS3OSS):
                        logging.info(f"开始保存附件至数据库:{attachInfo['id']}")
                        ImportDao.update_attachment(import_conn, attachInfo, save_content=True)
                        logging.info(f"附件内容已保存，attachmentID:{attachInfo['id']}")

                        if RECV_CONF['import-setting'].get('save_attachment_second') and secondDbOk:
                            logging.info(
                                f"开始保存附件至数据库2:{attachInfo['id']}")
                            rowcount = ImportDao.update_attachment(import_conn, attachInfo, save_content=True,
                                                                        database=DB.db_Second.value)
                            if rowcount == 1:
                                logging.info(
                                    f"附件内容已保存数据库2，attachmentID:{attachInfo['id']}")
                            else:
                                logging.error("第二数据库保存失败, 改为保存第一数据库!")
                                logging.info(
                                    f"开始附件内容的第一数据库保存:{attachInfo['id']}")
                                ImportDao.update_attachment(import_conn, attachInfo, save_content=True)
                                logging.info("附件内容数据库保存成功！")

                    else:
                        if attachContentSaved2AwsS3OSS:
                            attachInfo['storeType'] = AttachmentStoreType.S3.value
                        else:
                            attachInfo['storeType'] = AttachmentStoreType.File.value
                        logging.info(f"开始更新附件数据库:{attachInfo['id']}")
                        ImportDao.update_attachment(import_conn, attachInfo)
                        logging.info(f"更新附件数据库成功, attachment id:{attachInfo['id']}\tattachment path:{attachInfo['path']}")
                        if RECV_CONF['import-setting'].get('save_attachment_second') and secondDbOk:
                            logging.info(
                                f"开始更新附件数据库2，attachment id:{attachInfo['id']}")
                            rowcount = ImportDao.update_attachment(import_conn, attachInfo,
                                                                        database=DB.db_Second.value)
                            if rowcount == 1:
                                logging.info(f"更新附件数据库2成功, attachment id:{attachInfo['id']}")
                            else:
                                logging.error("更新附件数据库2失败!")

                    # msg.attachments.append(attachInfo)
                    logging.info(f"Single attachment save doc 2.\tDocId:{self.doc_id}")
                else:  # 存在重复附件
                    logging.info("邮件被判为重复，不再导入docreport、docgeneral和attachment表。")
                    if existAttInfo:  # 有效附件
                        logging.info(
                            f"单附件被判为重复:{existAttInfo['Name']}\tdocid:{existAttInfo['DocId']}")
                        self.doc_id = existAttInfo['DocId']
                    else:
                        logging.error("无有效附件")
                    if write_time != "":
                        logging.info(f"DocId={self.doc_id}，更新docreport表的writetime字段")
                        if not ImportDao.update_docreport_time(self.doc_id, write_time):
                            return
            # 两封及以上附件
            elif len(msg.attachments) > 1:
                exist = False
                existAttInfo = {}
                # 新增->XNCF_开头的邮件不作重复处理
                if msg.subject.startswith('XNCF_'):
                    logging.info("邮件原始标题以'XNCF_'开头, 不查重")
                else:
                    logging.info(f"存在{len(msg.attachments)}封附件, 检查是否重复")
                    checkAttach = {}
                    validAttachmentCount = 0
                    for attachment in msg.attachments:
                        if attachment['attachName'] and check_suffix_compatible(
                                attachment['attachName'], checked_file_types):
                            validAttachmentCount += 1
                            checkAttach = attachment

                    # 多附件的情况：只有单封有效附件重复才认定该邮件为重复邮件
                    if validAttachmentCount == 1:
                        logging.info(f"多附件查重. name:{checkAttach['attachName']}")
                        try:
                            md5file = RECV_CONF['receive-setting'].get('rawattach_path').joinpath(checkAttach['attachName']) if oss_region.get('oss_name') else checkAttach['path']
                            md5 = get_md5(md5file)
                            logging.info(
                                f"检查附件MD5 (1):{md5}\t{checkAttach['attachName']}\tMail Subject:{msg.subject}")
                            existAttInfo = find_attachment_by_md5(import_conn, checkAttach['attachName'], md5)
                            foundValidDup = True
                            if existAttInfo:
                                logging.info("find md5 match record in attachment table 2")
                                existedAttId = existAttInfo['Id']
                                existedDocId = existAttInfo['DocId']
                                attname = existAttInfo['Name']
                                logging.info(
                                    f"Attachment id in database:{existedAttId}\tDocId:{existedDocId}\tAttName:{attname}\t"
                                    f"OriginalTitle:{existAttInfo['DocOriginalTitle']}")
                                if md5.lower() != existAttInfo['Digest'].lower():
                                    logging.info(
                                        f"md5值不匹配，new:{md5}\texisted:{existAttInfo['Digest']}")
                                    if abs(checkAttach['attachSize'] - existAttInfo['Size']) > 1024:
                                        logging.info(
                                            "文件大小不同(size difference more than 1024 in attachment table record):"
                                            f"{checkAttach['attachSize']}==>{existAttInfo['Size']}")
                                        foundValidDup = False
                                    else:
                                        logging.info(
                                            "文件大小相近(size difference less than 1024 in attachment table record): "
                                            f"{checkAttach['attachSize']}==>{existAttInfo['Size']}")
                                else:
                                    logging.info("md5值匹配，判断为重复！")
                            else:
                                logging.info(
                                    "attachment表未匹配到相同MD5 2, 再根据附件名和文件大小来查重(size difference less than 10000)")
                                existAttInfo = find_attachment_by_size(import_conn,
                                                                       checkAttach['attachName'],
                                                                       checkAttach['attachSize'])
                                if existAttInfo:
                                    logging.info(
                                        "find duplicated attachment according to name and size 2:"
                                        f"{existAttInfo['DocOriginalTitle']}\tAttName:{checkAttach['attachName']}\tSize:{checkAttach['attachSize']}")
                                else:
                                    logging.info("根据附件名和文件大小，未发现重复附件 2")
                                    foundValidDup = False
                            if foundValidDup:
                                exist = True
                                is_duplicated = True
                                logging.info("附件重复!")
                            else:
                                logging.info("未发现有效附件, 当前邮件不判重 2")
                        except Exception as e:
                            logging.error(e)

                    else:
                        logging.info("存在多封有效附件或无有效附件, 不再进行重复检查.")

                # 查询数据库后，没有重复附件的情况
                if not exist:
                    logging.info('开始保存 docreport/docgeneral')
                    if self.table_insert(import_conn, 'docreport', msg) == 'error':
                        return
                    attachment_flag = 0 if existAttInfo else 1
                    if self.table_insert(import_conn, 'docgeneral', msg, attachment_flag) == 'error':
                        return

                    # logging.info(
                    #     "Doc not exists duplicate attachments or doc title start with XNCF_, then save new doc3:{}".format(
                    #         docid))
                    # 对无有效附件的情况不入附件库
                    if not existAttInfo:
                        index = 0
                        attachments = copy.deepcopy(msg.attachments)
                        msg.attachments.clear()
                        for attachment in attachments:
                            index += 1
                            logging.info(f"Attachment_{index}:{attachment['attachName']}")
                            if not attachment['attachName']:
                                attachment['attachName'] = f"{msg.subject}附件({index})"
                            attachment['storeType'] = AttachmentStoreType.Database.value
                            if not os.path.exists(attachment['path']):
                                logging.error(f"not existed:{attachment['path']}")
                                continue
                            logging.info(f"attachment path:{attachment['path']}")
                            attachment['digest'] = get_md5(attachment['path'])
                            logging.info(
                                f"开始附件表数据导入\tattachmentStoreType:{attachment['storeType']}")
                            rowcount, attachmentid = ImportDao.insert_attachment(import_conn, self.doc_id, attachment)
                            if rowcount == 1:
                                attachment['id'] = attachmentid
                                logging.info(
                                    f"附件表导入成功:{index}\tID:{attachment['id']}\t"
                                    f"Size:{attachment['attachSize']}\tMD5:{attachment['digest']}")
                            else:
                                logging.error("附件表导入失败")
                                continue
                            secondDbOk = False
                            if RECV_CONF['import-setting'].get('save_attachment_second'):
                                logging.info(
                                    f"开始附件表数据导入(second database)\tattachid:{attachment['id']}\tattachmentStoreType:{attachment['storeType']}")
                                rowcount = ImportDao.insert_attachment2(self.doc_id, attachment)
                                if rowcount == 1:
                                    logging.info(
                                        f"附件表导入(second database)成功:{index}\tattachid:{attachment['id']}\t"
                                        f"Size:{attachment['attachSize']}\tMD5:{attachment['digest']}")
                                    secondDbOk = True
                                else:
                                    logging.error("附件表导入(second database)失败")

                            attachContentSaved2AwsS3OSS = False
                            if oss_region.get('oss_name') == 's3':
                                from docwork.utils.oss import S3Client
                                logging.info("开始保存附件至AWS S3 OSS.")
                                try:
                                    attach_fn = attachment['path'].split('\\')[-1]
                                    keyname = RECV_CONF['IMPORT-SETTING'].get('use-oss').get('key_name')
                                    if not keyname:
                                        keyname = f"/sinitek/attachment/{datetime.datetime.now().strftime('%Y%m%d')}/{attach_fn}"
                                    attachment['storeType'] = AttachmentStoreType.S3.value
                                    s3 = S3Client(oss_config=oss_region)
                                    if s3.upload(oss_region.get('bucket_name'), file_data=attachment['path'],
                                                 key_name=keyname, type='file'):
                                        attachment[
                                            'path'] = oss_region.get('endpoint_url') + '|' + oss_region.get('bucket_name') + '|' + keyname
                                        attachContentSaved2AwsS3OSS = True
                                except Exception as err:
                                    logging.info(traceback.format_exc())
                                    logging.error(f"上传AWS S3异常, Error:{err}")

                            attachContentSaved2File = False
                            if RECV_CONF['import-setting'].get('webapp_attachment_path') and not attachContentSaved2AwsS3OSS:
                                logging.info(f"开始保存附件至WebAppAttachmentPath:{RECV_CONF['import-setting'].get('webapp_attachment_path')}")
                                try:
                                    webpath = Path(RECV_CONF['import-setting'].get('webapp_attachment_path')) / datetime.datetime.now().strftime(
                                        RECV_CONF['import-setting'].get('webapp_attachment_path_pattern'))
                                    check_directory_exist(webpath, not_empty=True, create=True)
                                    attachName = attachment['attachName']
                                    websavepath = str(attachment['id'])
                                    if attachName and attachName.find('.') >= 0:
                                        websavepath += '.' + attachName.split('.')[-1]
                                    else:
                                        websavepath += '.dat'
                                    websavepath = webpath.joinpath(websavepath)
                                    shutil.copyfile(attachment['path'], websavepath)
                                    logging.info(f"已将附件拷贝至websavepath，删除{attachment['path']}")
                                    os.remove(attachment['path'])
                                    attachment['path'] = websavepath
                                    logging.info(
                                        f"附件已存储至WebAppAttachmentPath路径:{websavepath}")
                                    attachContentSaved2File = True

                                    if RECV_CONF['import-setting'].get('webapp_attachment_path2'):
                                        logging.info(
                                            f"开始保存附件至WebAppAttachmentPath2: {RECV_CONF['import-setting'].get('webapp_attachment_path2')}")
                                        webpath2 = Path(RECV_CONF['import-setting'].get('webapp_attachment_path2')) / datetime.datetime.now().strftime(
                                            RECV_CONF['import-setting'].get('webapp_attachment_path_pattern'))
                                        check_directory_exist(webpath2, not_empty=True, create=True)
                                        attachName = attachment['attachName']
                                        websavepath2 = str(attachment['id'])
                                        if attachName is not None and attachName.find('.') >= 0:
                                            websavepath2 += attachName[attachName.find('.'):]
                                        else:
                                            websavepath2 += '.dat'
                                        websavepath2 = Path(webpath2).joinpath(websavepath2)
                                        shutil.copyfile(attachment['path'], websavepath2)
                                        logging.info(
                                            f"附件保存至WebAppAttachmentPath2路径:{websavepath2}")
                                except Exception as err:
                                    logging.error(f"保存附件时出现异常,Error{err}")

                            if RECV_CONF['import-setting'].get('save_attachment_content_db') or (
                                    not attachContentSaved2File and not attachContentSaved2AwsS3OSS):
                                logging.info(f"开始保存附件至数据库:{attachment['id']}")
                                ImportDao.update_attachment(import_conn, attachment, save_content=True)
                                logging.info(f"附件内容导入数据库成功，attachment id:{attachment['id']}")

                                if RECV_CONF['import-setting'].get('save_attachment_second') and secondDbOk:
                                    logging.info(
                                        f"开始保存附件至数据库2:{attachment['id']}")
                                    rowcount = ImportDao.update_attachment(import_conn, attachment,
                                                                                save_content=True,
                                                                                database=DB.db_Second.value)
                                    if rowcount == 1:
                                        logging.info(
                                            f"附件内容导入第二数据库成功，attachment id:{attachment['id']}")
                                    else:
                                        logging.error("save second db failed, try to save 1st db!")
                                        logging.info(
                                            f"开始保存附件内容至数据库:{attachment['id']}")
                                        ImportDao.update_attachment(import_conn, attachment, save_content=True)
                                        logging.info(f"附件内容导入数据库成功，attachment id:{attachment['id']}")
                            else:
                                if attachContentSaved2AwsS3OSS:
                                    attachment['storeType'] = AttachmentStoreType.S3.value
                                else:
                                    attachment['storeType'] = AttachmentStoreType.File.value
                                logging.info(f"开始更新附件数据库，attachment id:{attachment['id']}")
                                ImportDao.update_attachment(import_conn, attachment)
                                logging.info(f"更新附件数据库成功，attachment path:{attachment['path']}")
                                if RECV_CONF['import-setting'].get('save_attachment_second') and secondDbOk:
                                    logging.info(
                                        f"开始更新附件数据库2，attachment id:{attachment['id']}")
                                    rowcount = ImportDao.update_attachment(import_conn, attachment,
                                                                                database=DB.db_Second.value)
                                    if rowcount == 1:
                                        logging.info("更新附件数据库2成功")
                                    else:
                                        logging.error("更新附件数据库2失败")

                            # msg.attachments.append(attachment)
                        logging.info(f"多附件 save new doc 4\tDocId:{self.doc_id}")
                else:  # 存在重复附件
                    if existAttInfo:
                        logging.info(f"多附件查重流程，存在重复附件:{existAttInfo['Name']}\t{existAttInfo['DocId']}")
                        self.doc_id = existAttInfo['DocId']
                    else:
                        logging.error("无有效附件")
                    logging.info(f"DocId={self.doc_id}")
                    if write_time != "":
                        if not ImportDao.update_docreport_time(self.doc_id, write_time):
                            return
            # 无附件
            else:
                logging.info('开始保存docreport/docgeneral表')
                if self.table_insert(import_conn, 'docreport', msg) == 'error':
                    return
                if self.table_insert(import_conn, 'docgeneral', msg) == 'error':
                    return
                logging.info(f"no attachment save new doc 5\tDocId:{self.doc_id}")

            # ====================KM_RECVSENDINFO 和 KM_DOCRECIEVELOG表入库处理========================
            logging.info("开始保存KM_RECVSENDINFO/KM_DOCRECIEVELOG表")
            send_flag = 0
            if is_duplicated and RECV_CONF['import-setting'].get(''):
                logging.info(f"邮件判断为重复，并且为客户端程序，send_flag值设为-1")
                send_flag = -1
            if self.table_insert(import_conn, 'km_recvsendinfo', msg, send_flag) == 'error':
                return
            # if rsid and not RECV_CONF['receive-setting'].get('client_mode'):
            #     # Server版的数据库km_docrecievelog表id是根据km_recvsendinfo表的id字段来的。
            #     pass
            if self.table_insert(import_conn, 'km_docrecievelog', msg) == 'error':
                return

            if not is_duplicated:
                update_rawmaillog_xinfo(import_conn, self.rawmaillog_id, 2)
                logging.info("邮件不重复，更新km_rawmaillog表的xinfo字段为2")
            else:
                update_rawmaillog_xinfo(import_conn, self.rawmaillog_id, -1)
                logging.info("邮件重复！更新km_rawmaillog表的xinfo字段为-1")
            import_conn.commit()
            import_conn.close()
            logging.info(f"数据入库成功! Current rawmaillog id:{self.rawmaillog_id}")
        except Exception as err:
            logging.info(traceback.format_exc())
            logging.error(f"数据入库失败，Error:{err}")
            if RECV_CONF['import-setting'].get('exception_sync'):
                ReceiveDao.delete_rawmaillog(self.rawmaillog_id)
            import_conn.rollback()
            import_conn.close()

    def logic1(self):
        pass



