# -*- coding: utf-8 -*-
"""
@File:    import_dao.py
@Author:  jk.dong
@Desc:    邮件入库的sql模块
"""
import traceback
from enum import Enum
from docwork.dao.base_dao import (BaseDao, fit_sql, RUN_CONF, db1_type, db1_address,
                                  db1_name, db1_port, db1_username, db1_password)
from docwork.dao.receive_dao import get_entityconfig
from docwork import logging, datetime
from docwork.config import (second_db, db2_type, db2_address, db2_name,
                        db2_port, db2_username, db2_password, table_docreport,
                        table_docgeneral, table_attachment, table_km_recvsendinfo,
                        table_km_docrecievelog, table_km_rawmaillog)


# 初始化数据库对象
IMPORT_DB = BaseDao(db1_type, username=db1_username, password=db1_password, address=db1_address, dbname=db1_name,
                 port=db1_port)
if second_db:
    IMPORT_DB2 = BaseDao(db2_type, username=db2_username, password=db2_password, address=db2_address, dbname=db2_name,
                      port=db2_port)
if db1_type == 1:
    import cx_Oracle


class DB(Enum):
    db_Main: int = 1  # 主数据库
    db_Second: int = 2  # 副数据库


class ImportDao:

    @staticmethod
    def insert_docreport(conn, msg):
        current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        current_id = get_entityconfig('DOCREPORT', con=conn)
        sql = "INSERT INTO " + table_docreport + " (OBJID,CLSID,VERSION,SHAREMODE,REMOVETAG,CREATETIMESTAMP,UPDATETIMESTAMP," \
                                                 "STKCODE,INDUSTRYCODE,LASTSTOCKPRICE,LASTINDUSTRYPRICE,LASTMARKETPRICE,ISEVALUATE,INDUSTRYRANK," \
                                                 "INDUSTRYRANKORIGIN,LASTINDUSTRYRANK,LASTINDUSTRYRANKORIGIN,INVESTRANK,INVESTRANKORIGIN,LASTINVESTRANK," \
                                                 "LASTINVESTRANKORIGIN,TARGETPRICE,TARGETPRICEHIGH,ISESTIMATE,YEARE,NETINCOMET,NETINCOMET1,NETINCOMET2,OPCASHFLOWT,OPCASHFLOWT1,OPCASHFLOWT2,TOTALEQUITYT,TOTALEQUITYT1,TOTALEQUITYT2,CASHDIVIDENTST,CASHDIVIDENTST1,CASHDIVIDENTST2,NETDEBTT,NETDEBTT1,NETDEBTT2,EBITDAT,EBITDAT1,EBITDAT2,TOTALSHAREST,TOTALSHAREST1,TOTALSHAREST2,SALEST,SALEST1,SALEST2,MAINOPPROFITT,MAINOPPROFITT1,MAINOPPROFITT2,MAINOPINCOMINGT,MAINOPINCOMINGT1,MAINOPINCOMINGT2," \
                                                 "BROKERID,ORIGINALTITLE,SECONDTITLE,SUMMARY,WRITETIME,SUBMITTIME,APPROVETIME,ARCHIVETIME," \
                                                 "APPROVESTATUS,ISADJUSTSTKPOOL,TARGETSTKPOOL,TARGETSTKPOOLLEVEL) " \
                                                 "VALUES (" + current_id + ",157,1,0,'0','" + current_time + "','" + current_time + "'," \
              + fit_sql('stkcode') + "," + fit_sql('industrycode') + ",0,0,0,'1',0," \
              + fit_sql('industryrankorigin') + ",0," + fit_sql(
            'lastindustryrankorigin') + ",0," + fit_sql('investrankorigin') + ",0," + fit_sql(
            'lastinvestrankorigin') + ",0,0,'1',0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0," \
              + fit_sql('brokerid') + "," + fit_sql('originaltitle') + "," + fit_sql(
            'secondtitle') + "," + fit_sql('summary')

        if db1_type == 1 or db1_type == 4 or db1_type == 5:
            sql += ",TO_DATE(" + fit_sql('writetime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_DATE(" + fit_sql(
                'submittime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_DATE(" + fit_sql(
                'approvetime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_DATE(" + fit_sql(
                'archivetime') + ",'YYYY-MM-DD HH24:MI:SS'),0,'0',0,0)"
        elif db1_type == 2:
            sql += ",%(writetime)s,%(submittime)s,%(approvetime)s,%(archivetime)s,0,'0',0,0)"
        elif db1_type == 3:
            sql += ",TO_TIMESTAMP(" + fit_sql('writetime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP(" + fit_sql(
                'submittime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP(" + fit_sql(
                'approvetime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP(" + fit_sql(
                'archivetime') + ",'YYYY-MM-DD HH24:MI:SS'),0,'0',0,0)"
        values = {
            'stkcode': str(msg.stk_code),
            'industrycode': '',
            'industryrankorigin': str(msg.industry_rank_origin),
            'lastindustryrankorigin': '',  # 上次原始行业评级（现在不整理）
            'investrankorigin': str(msg.invest_rank_origin),
            'lastinvestrankorigin': '',  # 上次原始股票评级（现在不整理）
            'brokerid': int(msg.broker_id),
            'originaltitle': str(msg.subject),
            'secondtitle': '',  # 第二标题（不使用）
            'summary': str(msg.content.replace("'", "''")),
            'writetime': msg.receive_time.strftime('%Y-%m-%d %H:%M:%S') if msg.receive_time else None,
            # 编写时间（实际为邮件收发服务器接收时间，不能作为报告时间使用）
            'submittime': msg.receive_time.strftime('%Y-%m-%d %H:%M:%S') if msg.receive_time else None,  # 报告整理提交时间
            'approvetime': current_time,  # 报告整理通过时间
            'archivetime': current_time  # 报告整理通过时间
        }
        if db1_type == 1:
            args = {'summary': cx_Oracle.CLOB}
        elif db1_type == 2 or db1_type == 3 or db1_type == 5:
            args = {}
        else:
            args = {}
            sql = sql.format(**values)
            values = {}
        rowcount = IMPORT_DB.update_info(conn, sql, params=values, args=args)
        if rowcount == 1:
            return rowcount, int(current_id)
        else:
            logging.info("插入docreport表异常,rowcount为:%s" % rowcount)
            return rowcount, 0

    @staticmethod
    def insert_docgeneral(conn, msg, objid, attachment_flag=0):
        current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        sql = "INSERT INTO " + table_docgeneral + " (OBJID,CLSID,VERSION,SHAREMODE,REMOVETAG,CREATETIMESTAMP,UPDATETIMESTAMP,DOCTYPEID,TITLE,INPUTID,ORIGINALAUTHOR,KEYWORD,STATUS,DOCVERSION,ACCESSCOUNT,SECRET,IMPORTANCE,ATTACHMENTFLAG,COMMENTFLAG) " \
                                                  "VALUES (" + fit_sql(
            'objid') + ",157,1,0,'0','" + current_time + "','" + current_time + "'," + fit_sql(
            "doctypeid") + "," + fit_sql('title') + "," + fit_sql('inputid') \
              + "," + fit_sql('originalauthor') + "," + fit_sql('keyword') + "," + fit_sql(
            'status') + "," + fit_sql('docversion') + ",0," + fit_sql('secret') + ",3," + fit_sql(
            'attachmentflag') + ",0)"
        doc_fields = RUN_CONF['import-setting'].get('doc-fields')
        values = {
            'objid': int(objid),
            'doctypeid': int(msg.type_id),
            'title': str(msg.analysis_title),
            'inputid': doc_fields.get('input_id'),
            'originalauthor': str(msg.researcher),  # 作者（整理后的作者也存储在这个字段中）
            'keyword': '',  # 关键字（现在不整理）
            'status': doc_fields.get('doc_status'),
            'docversion': '',
            'secret': doc_fields.get('doc_secret'),
            'attachmentflag': int(attachment_flag)
        }
        if db1_type == 4:
            sql = sql.format(**values)
            values = {}
        rowcount = IMPORT_DB.update_info(conn, sql, params=values)
        if rowcount != 1:
            logging.warning(f"插入docgeneral表异常, rowcount:{rowcount}")
        return rowcount

    @staticmethod
    def insert_attachment(conn, docid, attachment):
        current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        current_id = get_entityconfig('ATTACHMENT', con=conn)
        sql = "INSERT INTO " + table_attachment + " (OBJID,CLSID,VERSION,SHAREMODE,REMOVETAG,CREATETIMESTAMP,UPDATETIMESTAMP,DOCID,NAME,CONTENTSIZE,DIGEST,STORETYPE,STOREPATH,CONVERTSTATUS,FILETYPE) VALUES (" \
              + current_id + ",186,1,0,'0','" + current_time + "','" + current_time + "'," + fit_sql(
            'docid') + "," + fit_sql(
            'name') + "," + fit_sql('contentsize') \
              + "," + fit_sql('digest') + "," + fit_sql('storetype') + "," + fit_sql('storepath') + "," + fit_sql(
            'convertstatus') + "," + fit_sql('filetype') + ")"
        values = {
            'docid': int(docid),
            'name': str(attachment['attachName']),
            'contentsize': int(attachment['attachSize']),
            'digest': str(attachment['digest']),
            'storetype': int(attachment['storeType']),
            'storepath': str(attachment['path']),
            'convertstatus': 0,  # 附件转换状态，是原始文档，还是转换后的文档
            'filetype': 0  # 附件类型（模型，模板或者其他）
        }

        if db1_type == 4:
            sql = sql.format(**values)
            values = {}
        rowcount = IMPORT_DB.update_info(conn, sql, params=values)
        if rowcount == 1:
            return rowcount, int(current_id)
        else:
            logging.info(f"插入attachment表异常, rowcount:{rowcount}")
            return rowcount, None

    @staticmethod
    def insert_attachment2(docid, attachment):
        current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        sql = "INSERT INTO " + table_attachment + " (OBJID,CLSID,VERSION,SHAREMODE,REMOVETAG,CREATETIMESTAMP,UPDATETIMESTAMP,DOCID,NAME,CONTENTSIZE,DIGEST,STORETYPE,STOREPATH,CONVERTSTATUS,FILETYPE) " \
                                                  "VALUES (" + fit_sql(
            'objid') + ",186,1,0,'0','" + current_time + "','" + current_time + "'," + fit_sql('docid') + "," + fit_sql(
            'name') + "," + fit_sql('contentsize') \
              + "," + fit_sql('digest') + "," + fit_sql('storetype') + "," + fit_sql(
            'storepath') + "," + fit_sql('convertstatus') + "," + fit_sql('filetype') + ")"

        values = {
            'objid': int(attachment['id']),
            'docid': int(docid),
            'name': str(attachment['attachName']),
            'contentsize': str(attachment['attachSize']),
            'digest': str(attachment['digest']),
            'storetype': int(attachment['storeType']),
            'storepath': str(attachment['path']),
            'convertstatus': 0,  # 附件转换状态，是原始文档，还是转换后的文档
            'filetype': 0  # 附件类型（模型，模板或者其他）
        }
        if db1_type == 4:
            sql = sql.format(**values)
            values = {}
        rowcount = IMPORT_DB2.update(sql, values)
        if rowcount == 1:
            return rowcount
        else:
            logging.info(f"插入attachment表异常(second database), rowcount:{rowcount}")
            return rowcount

    @staticmethod
    def insert_km_recvsendinfo(conn, docid, send_flag, msg):
        current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        current_id = get_entityconfig('KM_RECVSENDINFO', con=conn)
        sql = "INSERT INTO " + table_km_recvsendinfo + " ("
        sql += 'OBJID,CLSID,VERSION,SHAREMODE,REMOVETAG,CREATETIMESTAMP,UPDATETIMESTAMP,CUSTOMERID,DOCID,RECVTIME,IMPORTTIME,PROCESSTIME,SENDTIME,SENDFLAG,'
        if db1_type == 1 or db1_type == 4:
            sql += '"UID") VALUES (' + current_id + ','
        elif db1_type == 2 or db1_type == 3 or db1_type == 5:
            sql += 'UID) VALUES (' + current_id + ','
        sql += "104,1,0,'0','" + current_time + "','" + current_time + "'," + fit_sql('customerid') + "," + fit_sql(
            'docid')
        if db1_type == 1 or db1_type == 4 or db1_type == 5:
            sql += ",TO_DATE(" + fit_sql('recvtime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_DATE(" + fit_sql(
                'importtime') + ",'YYYY-MM-DD HH24:MI:SS')," \
                                "TO_DATE(" + fit_sql(
                'processtime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_DATE(" + fit_sql(
                'sendtime') + ",'YYYY-MM-DD HH24:MI:SS')," + fit_sql('sendflag') + "," + fit_sql(
                'msguid') + ")"
        elif db1_type == 2:
            sql += "," + fit_sql('recvtime') + "," + fit_sql('importtime') + "," + fit_sql(
                'processtime') + "," + fit_sql('sendtime') + "," + fit_sql(
                'sendflag') + "," + fit_sql('msguid') + ")"
        elif db1_type == 3:
            sql += ",TO_TIMESTAMP(" + fit_sql('recvtime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP(" + fit_sql(
                'importtime') + ",'YYYY-MM-DD HH24:MI:SS')," \
                                "TO_TIMESTAMP(" + fit_sql(
                'processtime') + ",'YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP(" + fit_sql(
                'sendtime') + ",'YYYY-MM-DD HH24:MI:SS')," + fit_sql('sendflag') + "," + fit_sql(
                'msguid') + ")"

        values = {'customerid': int(msg.customer_id),
                  'docid': int(docid),
                  'sendflag': int(send_flag),
                  'msguid': str(msg.uid),
                  'recvtime': current_time,
                  'importtime': current_time,
                  'processtime': '1980-01-01 00:00:00',
                  'sendtime': '1980-01-01 00:00:00'
                  }

        # rowcount = RecvDB.update(sql, values)
        if db1_type == 4:
            sql = sql.format(**values)
            values = {}
        rowcount = IMPORT_DB.update_info(conn, sql, params=values)
        if rowcount == 1:
            return rowcount, int(current_id)
        else:
            logging.warning(f"插入km_recvsendinfo表异常, rowcount:{rowcount}")
            return rowcount, None

    @staticmethod
    def insert_km_docrecievelog(conn, docid, status, msg):
        current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        current_id = get_entityconfig('KM_DOCRECIEVELOG', con=conn)
        sql = f"INSERT INTO {table_km_docrecievelog} (OBJID,CLSID,VERSION,SHAREMODE,REMOVETAG,CREATETIMESTAMP,UPDATETIMESTAMP,SELLSIDEDOCID,CUSTOMERID,DOCID,"
        if db1_type == 1 or db1_type == 4:
            sql += '"UID"'
        elif db1_type == 2 or db1_type == 3 or db1_type == 5:
            sql += 'UID'
        sql += ',RECIEVETIME,UPDATETIME,STATUS,MAILFROM,RAWMAILLOGID) VALUES ('
        sql += current_id + ",10047,1,0,'0','" + current_time + "','" + current_time + "'," + fit_sql(
            'sellsidedocid') + "," \
               + fit_sql('customerid') + "," + fit_sql('docid') + "," + fit_sql('msguid')

        if db1_type == 1 or db1_type == 4 or db1_type == 5:
            sql += ",TO_DATE('" + current_time + "','YYYY-MM-DD HH24:MI:SS'),TO_DATE('1980-01-01 00:00:00','YYYY-MM-DD HH24:MI:SS'),"
        elif db1_type == 2:
            sql += ",'" + current_time + "','1980-01-01 00:00:00',"
        elif db1_type == 3:
            sql += ",TO_TIMESTAMP('" + current_time + "','YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP('1980-01-01 00:00:00','YYYY-MM-DD HH24:MI:SS'),"

        sql += fit_sql('status') + ',' + fit_sql('mailfrom') + ',' + fit_sql('rawmaillogid') + ')'

        values = {
            'sellsidedocid': int(msg.sellside_docid),
            'customerid': int(msg.customer_id),
            'docid': int(docid),
            'msguid': str(msg.uid),
            'status': int(status),
            'mailfrom': str(msg.from_),
            'rawmaillogid': int(msg.rawmaillog_id)
        }

        if db1_type == 4:
            sql = sql.format(**values)
            values = {}
        rowcount = IMPORT_DB.update_info(conn, sql, params=values)
        if rowcount == 1:
            return rowcount, int(current_id)
        else:
            logging.info(f"插入km_docrecievelog表异常, rowcount:{rowcount}")
            return rowcount, None

    @staticmethod
    def update_attachment(conn, attachment, save_content=False, database=DB.db_Main.value):
        current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        sql = "UPDATE " + table_attachment + " SET VERSION=VERSION+1,STORETYPE=" + fit_sql(
            'storetype') + ",STOREPATH=" + fit_sql('storepath') + ",UPDATETIMESTAMP='" + current_time + "' "

        values = {
            'storetype': int(attachment['storeType']),
            'storepath': str(attachment['path']),
            'objid': int(attachment['id'])
        }

        args = {}
        if save_content:
            # 附件内容存数据库的情况，storepath置为空
            values['storepath'] = ''
            sql += ",CONTENT=" + fit_sql('content')
            if db1_type == 1:
                args['content'] = cx_Oracle.BLOB
            elif db1_type == 4:  # 达梦数据库的二进制处理与Oracle类似
                import dmPython
                args['content'] = dmPython.BLOB
            attachment_file = RUN_CONF['receive-setting'].get('rawattach_path') + str(attachment['path']).split('/')[-1] if str(attachment[
                'path']).startswith('http') else str(attachment['path'])
            with open(attachment_file, 'rb') as fp:
                content = fp.read()
                values['content'] = content

        sql += " WHERE OBJID=" + fit_sql('objid') + " AND CLSID=186"
        rowcount = 0
        if db1_type == 1 or db1_type == 3 or db1_type == 5:
            if database == DB.db_Main.value:
                rowcount = IMPORT_DB.update_info(conn, sql, params=values, args=args)
            else:
                rowcount = IMPORT_DB2.update(sql, values, args=args)
        elif db1_type == 2:
            if database == DB.db_Main.value:
                rowcount = IMPORT_DB.update_info(conn, sql, params=values)
            else:
                rowcount = IMPORT_DB2.update(sql, values)
        elif db1_type == 4:
            if save_content:  # 坑点：附件表content字段为blob，需要重写sql
                sql = f"UPDATE {table_attachment} SET VERSION=VERSION+1,STORETYPE=:1,STOREPATH=:2,UPDATETIMESTAMP=:3,CONTENT=:4 WHERE OBJID=:5 AND CLSID=186"
                values = (attachment['storeType'], str(attachment['path']), current_time, content, attachment['id'])
                logging.info(f"dmDB:更新数据库附件内容")
                if database == DB.db_Main.value:
                    rowcount = IMPORT_DB.update_info(conn, sql, params=values, args=args)
                else:
                    rowcount = IMPORT_DB2.update(sql, values, args=args)
            else:
                sql = sql.format(**values)
                values = {}
                if database == DB.db_Main.value:
                    rowcount = IMPORT_DB.update_info(conn, sql, params=values, args=args)
                else:
                    rowcount = IMPORT_DB2.update(sql, values, args=args)
        if rowcount == 1:
            return rowcount
        else:
            logging.info(f"更新attachment表异常, rowcount:{rowcount}")
            return rowcount

    @staticmethod
    def update_docreport_time(docid, writetime):
        try:
            if db1_type == 1 or db1_type == 4 or db1_type == 5:
                sql = "UPDATE DOCREPORT SET WRITETIME = TO_DATE('" + writetime + "','yyyy-mm-dd hh24:mi:ss') WHERE OBJID=" + str(
                    docid) + " AND WRITETIME > TO_DATE('" + writetime + "','yyyy-mm-dd hh24:mi:ss')"
            elif db1_type == 2:
                sql = "UPDATE DOCREPORT SET WRITETIME = " + str(writetime) + " WHERE OBJID=" + str(
                    docid) + " AND WRITETIME > " \
                      + str(writetime)
            elif db1_type == 3:
                sql = "UPDATE DOCREPORT SET WRITETIME = TO_TIMESTAMP('" + writetime + "','yyyy-mm-dd hh24:mi:ss') WHERE OBJID=" + str(
                    docid) + " AND WRITETIME > TO_TIMESTAMP('" + writetime + "','yyyy-mm-dd hh24:mi:ss')"
            p1 = IMPORT_DB.update(sql)
            if p1 == 1:
                logging.info("docreport表writetime字段更新成功")
                return True
            else:
                logging.error(f"docreport表writetime字段更新失败\trowcount={p1}")
                return False
        except Exception as err:
            logging.error(f"更新docreport表writetime字段时出现异常. Error:{err}")
            return False


def find_attachment_by_md5(conn, name, md5):
    try:
        sql = "SELECT a.objid as attachmentid, a.name as attachmentname,a.contentsize,b.title,b.objid as docid,a.digest,c.brokerid,c.originaltitle " \
              f"FROM {table_attachment} a,{table_docgeneral} b,{table_docreport} c WHERE a.docid=b.objid AND a.docid=c.objid AND a.removetag='0' AND b.title IS NOT NULL AND b.removetag='0' AND a.digest=" + fit_sql(
            'digest') + " ORDER BY b.objid DESC"
        values = {'digest': str(md5)}

        # results = RecvDB.search(sql, values)
        if db1_type == 4:
            sql = sql.format(**values)
            values = {}
        results = IMPORT_DB.get_info(conn, sql, params=values)
        if not results:
            return None
        attachment_info_find = None
        # 查询到md5值重复的有多条记录的情况下，再根据docid来查询附件是否只有一条记录，若满足就判重
        for result in results:
            attachment_info = {}
            if result[0]:
                existed_attid = result[0]
            else:
                existed_attid = -1
            logging.info(f"existedAttId:{existed_attid}")
            if result[4]:
                existed_docid = result[4]
            else:
                existed_docid = -1
            logging.info(f"existedDocId:{existed_docid}")
            if result[5]:
                existed_digest = result[5]
            else:
                existed_digest = ''
            logging.info(f"existedDigest:{existed_digest}")
            if result[2]:
                existed_size = result[2]
            else:
                existed_size = -1
            logging.info(f"existedSize:{existed_size}")
            if result[1]:
                attname = result[1]
            else:
                attname = ''
            logging.info(f"attname:{attname}")
            if result[3]:
                doc_title = result[3]
            else:
                doc_title = ''
            logging.info(f"doctitle:{doc_title}")
            if result[6]:
                existed_broker_id = result[6]
            else:
                existed_broker_id = -1
            logging.info(f"existedBrokerId:{existed_broker_id}")
            if result[7]:
                doc_original_title = result[7]
            else:
                doc_original_title = ''
            logging.info(f"docoriginaltitle:{doc_original_title}")

            attachment_info['Id'] = existed_attid
            attachment_info['DocId'] = existed_docid
            attachment_info['BrokerId'] = existed_broker_id
            attachment_info['Name'] = attname
            attachment_info['Size'] = existed_size
            attachment_info['Digest'] = existed_digest
            attachment_info['DocTitle'] = doc_title
            attachment_info['DocOriginalTitle'] = doc_original_title

            if name == attname:
                _existDocAttachmentCount = find_attachment_count(conn, existed_docid)
                logging.info(
                    f"查询到相似附件 docid: {existed_docid}\t附件数量: {_existDocAttachmentCount}")
                if _existDocAttachmentCount == 1:
                    logging.info("附件MD5匹配、附件名一致且附件数为1")
                    logging.info(f"存在重复附件 attachmentId: {existed_attid}\t{attname}")
                    attachment_info_find = attachment_info
                    break
                else:
                    logging.info("附件数量不匹配(not 1 attachment)")
            else:
                logging.info(
                    f"附件MD5匹配但附件名不一致({name}==>db:{attname}), 继续查重...")

        return attachment_info_find
    except Exception as err:
        logging.info(traceback.format_exc())
        logging.error(f"附件MD5查重时发生异常. Error: {err}")


def find_attachment_by_size(conn, name, content_size):
    sql = "SELECT a.objid as attachmentid, a.name as attachmentname,a.contentsize,b.title,b.objid as docid,a.digest,c.brokerid,c.originaltitle " \
          f" FROM {table_attachment} a, {table_docgeneral} b, {table_docreport} c WHERE a.docid=b.objid AND a.docid=c.objid AND a.removetag='0' AND  a.createtimestamp>"
    if db1_type == 1 or db1_type == 4 or db1_type == 5:
        sql += "to_char( sysdate-5,'yyyy-mm-dd hh24:mi:ss')"
    elif db1_type == 2:
        sql += "sysdate()-INTERVAL 5 DAY"
    elif db1_type == 3:
        sql += "to_char(now()-INTERVAL '5 DAY','yyyy-mm-dd hh24:mi:ss')"
    sql += " AND b.title IS NOT NULL AND b.removetag='0' AND  a.name=" + fit_sql('name')
    sql += " ORDER BY b.objid DESC"

    values = {'name': name}

    # results = RecvDB.search(sql, values)
    if db1_type == 4:
        sql = sql.format(**values)
        values = {}
    results = IMPORT_DB.get_info(conn, sql, params=values)
    if not results:
        return {}
    for result in results:
        attachment_info = {}
        if result[0]:
            existed_attid = result[0]
        else:
            existed_attid = -1
        logging.info(f"existedAttId:{existed_attid}")
        if result[4]:
            existed_docid = result[4]
        else:
            existed_docid = -1
        logging.info(f"existedDocId::{existed_docid}")
        if result[5]:
            existed_digest = result[5]
        else:
            existed_digest = ''
        logging.info(f"existedDigest::{existed_digest}")
        if result[2]:
            existed_size = result[2]
        else:
            existed_size = -1
        logging.info(f"existedSize:{existed_size}")
        if result[1]:
            attname = result[1]
        else:
            attname = ''
        logging.info(f"attname:{attname}")
        if result[3]:
            doc_title = result[3]
        else:
            doc_title = ''
        logging.info(f"doctitle:{doc_title}")
        if result[6]:
            existed_broker_id = result[6]
        else:
            existed_broker_id = -1
        logging.info(f"existedBrokerId:{existed_broker_id}")
        if result[7]:
            doc_original_title = result[7]
        else:
            doc_original_title = ''
        logging.info(f"docoriginaltitle:{doc_original_title}")

        attachment_info['Id'] = existed_attid
        attachment_info['DocId'] = existed_docid
        attachment_info['BrokerId'] = existed_broker_id
        attachment_info['Name'] = attname
        attachment_info['Size'] = existed_size
        attachment_info['Digest'] = existed_digest
        attachment_info['DocTitle'] = doc_title
        attachment_info['DocOriginalTitle'] = doc_original_title

        if abs(content_size - existed_size) <= 10000:
            logging.info("exist the same attach")
            # 获取附件数量，如果只有1封
            _existDocAttachmentCount = find_attachment_count(conn, existed_docid)
            logging.info(
                f"查询到相似附件 docid:{existed_docid}\t附件数量:{_existDocAttachmentCount}")
            if _existDocAttachmentCount == 1:
                logging.info("附件名匹配、附件大小匹配且附件数为1")
                logging.info(f"存在重复附件 attachmentId:{existed_attid}\tAttachment Name:{attname}")
                break
            else:
                logging.info("附件数量不匹配(not 1 attachment)")
            return attachment_info

    return {}


def find_attachment_count(conn, docid):
    sql = f"SELECT COUNT(*) FROM {table_attachment} a WHERE a.removetag='0' AND a.docid={docid} AND (lower(name) LIKE '%.pdf' OR lower(name) LIKE '%.xls' OR lower(name) LIKE '%.doc' OR lower(name) LIKE '%.ppt' " \
          "OR lower(name) LIKE '%.xlsx' OR lower(name) LIKE '%.docx' OR lower(name) LIKE '%.pptx')"
    results = IMPORT_DB.get_info(conn, sql)
    if not results:
        return None
    else:
        return results[0][0]


def update_rawmaillog_xinfo(conn, rawmaillog_id, status):
    try:
        sql = f"UPDATE {table_km_rawmaillog} SET XINFO={status} WHERE ID={rawmaillog_id}"
        IMPORT_DB.update_info(conn, sql)
    except Exception as err:
        logging.error(err)

