# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import os

class MyspiderPipeline:
    
    # def open_spider(self,spider):
    #     self.fp = open('book.csv','w',encoding='utf-8')

    # # item就是yield后面的对象
    # def process_item(self, item, spider):   
    #     self.fp.write(str(item))
    #     return item

    # def close_spider(self,spider):
    #     self.fp.close()
    pass
#图片管道
from scrapy.pipelines.images import ImagesPipeline
from scrapy.exceptions import DropItem
import scrapy
class PicturePipeline(ImagesPipeline):
    
    def get_media_requests(self, item, info):
        

        for img_url in item["image_urls"]:
            print("管道获取的url为：",img_url)
           
            yield scrapy.Request(img_url)
     #指定图片存储的路径
    def file_path(self, request, response=None, info=None, *, item=None):
        imgName = request.url.split('/')[-1]
        return imgName

        
    #全部下载完成后调用的方法
    # def item_completed(self, results, item, info):
    #     image_paths = [x['path'] for ok, x in results if ok]
    #     if not image_paths:
    #         raise DropItem("Item contains no images!!!!!")      
    #     return item
    
    