# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import json

# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from pymongo import MongoClient
import csv
import redis

class sczPipeline:
    def open_spider(self, spider):
        if spider.name == '生产者':
        # 连接数据库
            self.c = redis.Redis(host='localhost', port=6379, db=0)

            self.c.delete('scz')
    def process_item(self, item, spider):
        if spider.name == '生产者':

            self.c.lpush('scz', json.dumps({
                'url':'https://httpbin.org/get',
                'item':item,
                'method':"POST",
                'meta':{}
            },ensure_ascii=False))
        return item

    def close_spider(self, spider):
        if spider.name == '生产者':
            self.c.close()


class csv11:
    def open_spider(self, spider):
        if spider.name == 'quotes':
            self.start_id = 100
            self.filename = 'people.csv'
            self.file = open(self.filename, 'w', newline='', encoding='utf-8')
            self.writer = csv.writer(self.file)
            self.writer.writerow(["id", "author", "text", "tags"])
            self.items = []
    def process_item(self,item,spider):
        if spider.name == 'quotes':
            self.start_id += 1
            self.items.append([self.start_id,   item['author'], item['text'], "|".join(item['tags']),  ])
            if len(self.items) == 15:
                self.writer.writerows(self.items)
                self.items.clear()

        return item
    def close_spider(self,spider):
        if spider.name == 'quotes':
            if len(self.items) > 0:
                self.writer.writerows(self.items)
                self.items.clear()
                self.file.close()


class ScrapyzzfPipeline:

    def open_spider(self, spider):
        self.client = MongoClient('localhost', 27017)
        self.db = self.client['管道']
        self.collection = self.db['quotes']

    def process_item(self, item, spider):
        print(item,'11111',spider.name)

        self.collection.insert_one(dict(item))

        return item
    def close_spider(self, spider):
        self.client.close()