# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
from scrapy.pipelines.images import ImagesPipeline
from scrapy import Request
from scrapy.exceptions import DropItem

class DangdangPipeline:
    def process_item(self, item, spider):
        return item

class mysqlPipeline:
    def __init__(self,host,user,password,database,port):
        self.host=host
        self.user=user
        self.password=password
        self.database=database
        self.port=port

    @classmethod
    def from_crawler(cls,crawler):
        return cls(
            host=crawler.settings.get("MYSQL_HOST"),
            user=crawler.settings.get("MYSQL_USE"),
            password=crawler.settings.get("MYSQL_PASSWORD"),
            database=crawler.settings.get("MYSQL_DATABASE"),
            port=crawler.settings.get("MYSQL_PORT")
        )

    def open_spider(self,spider):
        self.db=pymysql.connect(host=self.host,user=self.user,password=self.password,database=self.database,port=self.port,charset='utf8')
        self.cursor=self.db.cursor()

    def process_item(self, item, spider):
        sql="insert into dangdang(title,author,pic,publish,price) values('{}','{}','{}','{}','{}')".format(item['title'],item['author'],item['pic'],item['publish'],item['price'])
        print(sql)
        self.cursor.execute(sql)
        self.db.commit()
        return item
    def close_spider(self,spider):
        self.db.close()

class ImagePipeline(ImagesPipeline):
    # 自定义图片储存类
    def get_media_requests(self, item, info):
#         通过抓取item对象获取图片信息，创建Request对象添加调度队列，执行下载
        yield Request("https:"+item['pic'])

    def file_path(self, request, response=None, info=None, *, item=None):
        # 指定保存的名字
        url=request.url
        fileName=url.split("/")[-1]
        return fileName

    def item_completed(self, results, item, info):
        image_paths=[x['path'] for ok,x in results if ok]

        if not image_paths:
            raise DropItem("contain no images")

        item['image_paths']=image_paths
        return item

