# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import json, logging
from itemadapter import ItemAdapter
import pymongo


class CrawlcenterspidersPipeline:
    # MONGODB_CONFIG = 'mongodb://192.168.1.6:27017/'
    # myclient = pymongo.MongoClient(MONGODB_CONFIG)
    # mydb = myclient["wow_spider_db"]
    # db = mydb["tb_product"]
    crawl_count = 0

    def open_spider(self, spider):
        count = 0

    def process_item(self, item, spider):
        # self.db.save(item)
        # self.db.update_one({'_id': item['_id']}, {'$setOnInsert': item}, upsert=True)
        self.crawl_count += 1
        return item

    def close_spider(self, spider):
        # self.myclient.close()
        pass
