# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql.cursors
from .items import MyDoubanMusicItem, MyItem
import settings

# 用于将item里的数据进一步处理
# 一般用于保存到外部系统
class MyspidersPipeline:
    # 每个爬虫程序会执行一次
    # 可以将建立连接的步骤放入该方法中，避免连接建立过多的问题
    def open_spider(self, spider):
        self.connection = pymysql.connect(host=settings.DB_HOST,
                                          user=settings.DB_USER,
                                          password=settings.DB_PASSWORD,
                                          database=settings.DB,
                                          # MySQL驱动
                                          cursorclass=pymysql.cursors.DictCursor)

        self.cursor = self.connection.cursor()

        """ MySQL的建表语句
        DROP TABLE IF EXISTS `music_info`;
        CREATE TABLE `music_info` (
          `id` int(11) NOT NULL AUTO_INCREMENT,
          `name` varchar(255) DEFAULT NULL COMMENT '音乐名称',
          `author` varchar(255) DEFAULT NULL COMMENT '音乐作者',
          `date` varchar(255) DEFAULT NULL COMMENT '出版日期',
          `category` varchar(255) DEFAULT NULL COMMENT '音乐种类',
          `quality` varchar(255) DEFAULT NULL COMMENT '音乐质量',
          `style` varchar(255) DEFAULT NULL COMMENT '音乐风格',
          `rating_nums` varchar(500) DEFAULT NULL COMMENT '音乐评分',
          PRIMARY KEY (`id`)
        ) ENGINE=InnoDB DEFAULT CHARSET=utf8 AUTO_INCREMENT=1;
        """
        self.insert_sql = 'insert into music_info (name,author,date,category,quality,style,rating_nums) values (%s,%s,%s,%s,%s,%s,%s)'

    # 每来一个item这个方法就会执行一次
    def process_item(self, item, spider):
        # 判断item到底是哪一类数据
        # 处理music的item
        if isinstance(item, MyDoubanMusicItem):
            values = (
                item['name']
                , item['author']
                , item['date']
                , item['category']
                , item['quality']
                , item['style']
                , item['rating_nums']
            )
            print("正在将数据写入MySQL", values)
            # 保存到数据库
            self.cursor.execute(self.insert_sql, values)
        # 处理装备的item
        elif isinstance(item, MyItem):
            pass
        return item

    # 在爬虫程序结束时会执行一次
    def close_spider(self, spider):
        self.connection.commit()
        # 手动关闭连接
        self.cursor.close()
        self.connection.close()
