# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import re
import pymongo
import json

class DoubanPipeline(object):

    def open_spider(self, spider):
        # mongodb数据库初始化
        self.client = pymongo.MongoClient('mongodb://mongo:mongo123456@127.0.0.1:27017/')
        # 获得数据库(DB)
        db = self.client.nongye  # 或者 db = client['Test']
        # 获得集合(collection)
        self.mongo_collection_data = db.data
        print("init db")

    def process_item(self, item, spider):
        count = self.mongo_collection_data.find({'name': item['c_name']}).count()
        if count == 0:
            self.mongo_collection_data.insert_one(dict(item))

        return item
        # 百科用，废弃
        # item['summary'] =  re.sub('(\[)[0-9]*?-*?[0-9]*?(\])', '', item['summary'], 0).replace("\xa0",'').replace("\n",'')
        # arr = item['attrs']
        # info_dict = {}
        # arr = list(filter(lambda x: x and '[' not in x,arr.split("\n")))
        # for index in range(len(arr)):
        #     if(index%2==0):
        #         info_dict[arr[index]]=arr[index+1]
        # item['attrs'] = info_dict
        # count = self.mongo_collection_data.find({'title':item['title']}).count()
        # if count==0 :
        #     self.mongo_collection_data.insert_one(dict(item))
        # return item

    def close_spider(self, spider):
        self.client.close()
        print("close db")