# import json
# from config import config
# from scrapy.exceptions import DropItem
# from crawler.utils.functools import md5
# from crawler.connection.mongo_connection import ConnectMongo
#
#
# class DetectionPipeline:
#     def __init__(self):
#         self.mongo_conn = ConnectMongo(host=config.MONGO_HOST, port=config.MONGO_PORT, db='detection',
#                                        collection="dcd_detection")
#
#     def process_item(self, dcd_item, spider):
#         if dcd_item.get('receiveTime'):  # 存入错误的库
#             try:
#                 dcd_item['_id'] = md5(dcd_item['url'] + str(dcd_item['receiveTime']))
#                 self.mongo_conn.insert_data(dict(dcd_item))
#                 spider.logger.info('Publish Insert Detection MongoDB : {}'.format(json.dumps(dict(dcd_item))))
#             except Exception as e:
#                 spider.logger.err(e)
#                 spider.logger.info('Publish Duplicate Detection MongoDB : {}'.format(json.dumps(dict(dcd_item))))
#             raise DropItem  # 不进入下一个管道
#         else:
#             return dcd_item  # 存入正常的库
