# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import openpyxl
import pymysql
from scrapy.crawler import Crawler


# Excel 表格数据管道
class ExcelPipeline:

    def __init__(self):
        """
        创建工作簿，激活默认工作表并生成表头
        """
        self.wb = openpyxl.Workbook()
        self.ws = self.wb.active
        self.ws.title = 'JD_Data'
        self.ws.append(('价格', '标题', '评价数', '店铺名称', '商品标签'))

    def close_spider(self, spider):
        """
        爬虫停止运行的时候执行该方法,钩子函数，自己执行不需要调用
        """
        self.wb.save('京东商品数据.xlsx')

    def process_item(self, item, spider):
        price = item.get('title', 'None')
        title = item.get('price', 'None')
        comment = item.get('comment', 'None')
        shop = item.get('shop', 'None')
        label = item.get('label', 'None')
        self.ws.append((price, title, comment, shop, label))
        return item


# 数据库管道
class DbPipeLine:
    @classmethod
    def from_crawler(cls, crawler: Crawler):
        host = crawler.settings['DB_HOST']
        port = crawler.settings['DB_PORT']
        username = crawler.settings['DB_USER']
        password = crawler.settings['DB_PASS']
        database = crawler.settings['DB_NAME']
        return cls(host, port, username, password, database)

    # 初始化数据库
    def __init__(self, host, port, username, password, database):
        self.conn = pymysql.connect(host=host, port=port, user=username, password=password, database=database,
                                    charset='utf8mb4', autocommit=True)
        self.cursor = self.conn.cursor()
        self.data = []

    # 关闭爬虫执行函数
    def close_spider(self, spider):
        # 若残留数据则写入数据库
        if len(self.data) > 0:
            self._write_to_db()
        self.conn.close()

    # 执行写入数据库
    def _write_to_db(self):
        self.cursor.executemany(
            'insert into `tb_jd_goods` '
            '(`title`, `price`, `comment`, `shop`, `label`) '
            'values (%s, %s, %s, %s, %s)',
            self.data
        )
        self.conn.commit()

    # 进程主函数
    def process_item(self, item, spider):
        # 将 item 中数据添加到列表
        self.data.append((item['title'], item['price'], item['comment'], item['shop'], item['label']))
        self._write_to_db()
        self.data.clear()
        return item
