# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import hashlib
import json

import pymongo
from redis import StrictRedis
from scrapy.exceptions import DropItem


class CheckExist:
    def open_spider(self, spider):
        self.redis_client = StrictRedis(host='192.168.80.137', port=6379, db=1, password='123456')
        print('-----------------------> redis服务开启！！！')
        pass

    def close_spider(self, spider):
        self.redis_client.close()
        print('-----------------------> redis服务关闭！！！')
        pass

    def process_item(self, item, spider):
        # 判断当前item对象是否存在
        item_json_str = json.dumps(item).encode("utf-8")
        md5 = hashlib.md5()
        md5.update(item_json_str)
        hash_val = md5.hexdigest()

        if self.redis_client.get(hash_val):
            raise DropItem("该对象已存在redis...")
        else:
            self.redis_client.set(hash_val, item_json_str)
        return item

    pass


# class DownImg:
#     def process_item(self, item, spider):
#         pass


class FoodPipeline:
    def process_item(self, item, spider):
        #print(json.dumps(item,indent=2))
        return item

class MongoDdPipeline:
    def open_spider(self, spider):
        host = spider.settings['MONGODB_HOST']
        port = spider.settings['MONGODB_PORT']
        db_name = spider.settings['MONGODB_NAME']
        self.client = pymongo.MongoClient(host=host, port=port)
        self.db = self.client[db_name]
        self.collection = self.db[spider.settings['MONGODB_DOCNAME']]
        self.item_list = []

    def process_item(self, item, spider):
        self.item_list.append(dict(item))
        return item

    def close_spider(self, spider):
        self.collection.insert_many(self.item_list)
        print('{}条数据已存入数据库'.format(len(self.item_list)))
        self.client.close()
        print('数据库已关闭')
    pass
