# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
from pymongo import MongoClient
import pymongo
class TutorialPipeline(object):
    # def __init__(self):
    #     """获取mongoDB的连接，获取数据库doubanDetail,获取集合newBookDetail"""
    #     client = pymongo.MongoClient('localhost', 27017)
    #     db = client.doubanDetail
    #     self.newBookDetail = db.newBookDetail
    #
    # @classmethod
    # def from_crawler(cls, crawler):
    #     cls.DB_URL = crawler.settings.get('MONGO_DB_URI')
    #     cls.DB_PORT = crawler.settings.get('MONGO_DB_PORT')
    #     cls.DB_NAME = crawler.settings.get('MONGO_DB_NAME')
    #     return cls()
    #
    # def open_spider(self, spider):
    #     self.client = pymongo.MongoClient(self.DB_URL, self.DB_PORT)
    #     self.db = self.client[self.DB_NAME]
    #
    # def close_spider(self, spider):
    #     self.client.close()

    def process_item(self, item, spider):
        print("--------------")
        # self.table.insert(dict(item))
        # collection = self.db[spider.name]
        post = dict(item)
        # collection.insert_one(post)
        # self.newBookDetail.update(post, upsert=True)
        # self.newBookDetail.update({'bookName': item['title']}, {'$set': post}, upsert=True)
        # self.post.insert(post)
        # self.newBookDetail.update({'title': item['title']}, {'$set': dict(item)}, upsert=True)
        return item



