# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import hashlib
import json

import pymongo
import redis
# useful for handling different item types with a single interface
from scrapy.exceptions import DropItem


class WinShangPipeline:
    def process_item(self, item, spider):
        return item


class WinShangCheckPipeline:
    """
    使用redis进行数据去重
    """

    def __init__(self):
        self.redis_client = None

    def open_spider(self, spider):
        if spider.name == 'WinShop':
            self.redis_client = redis.Redis(host='127.0.0.1', port=6379, db=1)

    def process_item(self, item, spider):
        if spider.name == 'WinShop':
            # 将传递过来的item数据转为字符串并加密成md5数
            hash_value = hashlib.md5(json.dumps(item, ensure_ascii=False).encode()).hexdigest()
            # redis去重，不存在才入库
            if self.redis_client.sadd("win_shop_filter", hash_value) == 0:
                raise DropItem('数据已存在...')
            return item

    def close_spider(self, spider):
        if spider.name == 'WinShop':
            self.redis_client.close()


class MongoPipeline:
    def __init__(self):
        self.collection = None
        self.mongo_client = None

    def open_spider(self, spider):
        if spider.name == 'WinShop':
            self.mongo_client = pymongo.MongoClient(host='127.0.0.1', port=27017)
            self.collection = self.mongo_client['py_spider']['win_shop_info']

    def process_item(self, item, spider):
        if spider.name == 'WinShop':
            self.collection.insert_one(item)
            print('数据保存成功:', item)
            return item

    def close_spider(self, spider):
        if spider.name == 'WinShop':
            self.mongo_client.close()
