# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter

#如果想使用管道的话，需要先在setting.py中配置
class ScrapyDangdangDomePipeline:
    def open_spider(self,spider):
        #打开文件
        self.f = open("product.json","w",encoding="utf-8")
    #item就是传递过来的product对象
    def process_item(self, item, spider):
        #以下这种模式不推荐，对文件的操作过于频繁
        #因为每次打开文件都会占用资源，所以不推荐
        # with open ("product.json","a",encoding="utf-8") as f:
        #     f.write(str(item))
        self.f.write(str(item))
        return item
    def close_spider(self,spider):
        #关闭文件
        self.f.close()
import urllib.request
#多条管道同时开启
#1.定义管道类
#2.在settings中开启管道 "scrapy_dangdang_Dome.pipelines.productDownloadPipeline":301

class productDownloadPipeline:
    def process_item(self, item, spider):
        #下载图片
        url='https:'+item.get('src')
        name1=item.get('name').replace(' ','').replace('】','').replace('【','').replace(':','')
        print(name1)
        name='./products/'+name1+'.jpg'
        urllib.request.urlretrieve(url=url,filename=name)
        print('下载成功',name)
        return item