# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import json

import scrapy
# useful for handling different item types with a single interface


class ScrapyDdwPipeline:
    #爬虫文件开始就执行这个方法
    def open_spider(self, spider):
        self.fp = open('book.json', 'w')

    def process_item(self, item, spider):
        self.fp.write(json.dumps(item))
        return item

    def close_spider(self, spider):
        self.fp.close()


import  urllib.request
class ScrapyDdwDownloadSpider(scrapy.Spider):
    def process_item(self, item, spider):
        url = "http:"+item.get('src')
        filename ="./books"+item.get('src')+".jpg"
        urllib.request.urlretrieve(url, filename)
        return item