# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import pymysql
from itemadapter import ItemAdapter


class TaobaoSpiderPipeline:
    def open_spider(self, spider):
        self.conn = pymysql.connect(
            host='localhost',
            port=3306,
            user='root',
            password='111111',
            database='spiderdemo',
            charset='utf8'
        )
        self.cursor = self.conn.cursor()
        self.data = []

    def process_item(self, item, spider):
        title = item.get('title', '')
        price = item.get('price', '')
        shop_name = item.get('shop_name', '')
        pay_people = item.get('pay_people', '')
        print('item里面的', item)
        self.data.append(
            (title, price, shop_name, pay_people))
        # print('每一个data：',self.data)
        if len(self.data) == 5:
            self._write_db()
            self.data.clear()
        return item

    def close_spider(self, spider):
        if len(self.data) > 0:
            self._write_db()
        print('数据库已经写入完成')
        print('爬虫已结束：', spider.name)
        self.cursor.close()
        self.conn.close()

    def _write_db(self):
        sql = 'INSERT INTO taobao(`title`,`price`,`shop_name`,`pay_people`)' \
              'values (%s,%s,%s,%s)'
        self.cursor.executemany(sql, self.data)
        print('正在写入数据库')
        self.conn.commit()
