# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import pymongo
from itemadapter import ItemAdapter
from scrapycompositedemo.items import BookItem

class ScrapycompositedemoPipeline:
    def process_item(self, item, spider):
        return item


class MongoDBPipline(object):

    @classmethod
    def from_crawler(cls, crawler):
        cls.connection_string = crawler.settings.get('MONGODB_CONNECTION_STRING')
        cls.database = crawler.settings.get('MONGODB_DATABASE')
        cls.collection = crawler.settings.get("MONGODB_COLLECTION")
        return cls()


    def open_spider(self, spider):
        self.client = pymongo.MongoClient(self.connection_string)
        self.db = self.client[self.database]

    def process_item(self, item, spider):
        self.db[self.collection].update_one(
            {
                "authors": item['authors'],
                "catalog": item['catalog'],
                "cover": item['cover'],
                "id": item['id'],
                "introduction": item['introduction'],
                "isbn": item['isbn'],
                "name": item['name'],
                "page_number": item['page_number'],
                "price": item['price'],
                "published_at": item['published_at'],
                "publisher": item['publisher'],
                "score": item['score'],
                "tags": item['tags'],
                "translators": item['translators'],
                "comments": item['comments'],
            },{
                "$set": dict(item)
            }, True
        )
        return item

    def close_spider(self, spider):
        self.client.close()