# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html


import json

class ItcastJsonPipeline(object):

    def open_spider(self, spider):
        """此方法在爬虫启动时执行一次,而__init__()方法是在创建对象时执行一次"""
        self.f = open("itcast.json", "w")

    def process_item(self, item, spider):
        """此方法在每次由引擎传入一个item时,则执行一次"""
        #这里的spider参数包含了spider爬虫的信息,可通过spider识别爬虫请求

        #若在这里执行开启关闭文件来存储,则非常麻烦,因此最好是在爬虫启动时开启一次/爬虫结束时关闭一次
        #写入数据,dict(item) 将scrapy对象转换成字典类型,dumps可加参数ensure_ascii=False
        json_str = json.dumps(dict(item)) + "\n"
        self.f.write(json_str)
        #此return的item还会在终端上显示出来,返回的item再由引擎交给下一个管道处理,直到处理完毕
        return item

    def close_spider(self, spider):
        """此方法在爬虫结束时执行一次,而__del__()方法是在销毁对象时执行"""
        self.f.close()


#上面方法执行顺序如下:

# pipeline = ItcastPipeline()

# pipelines.open_spider(spdier)

# pipelines.process_item(item, spider)
# pipelines.process_item(item, spider)
# pipelines.process_item(item, spider)
# pipelines.process_item(item, spider)
# pipelines.process_item(item, spider)
# pipelines.process_item(item, spider)
# pipelines.process_item(item, spider)
# pipelines.process_item(item, spider)

# pipelines.close_spider(spider)