# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import os
import csv
import pymysql
import logging


class CsvPipeline(object):
    def __init__(self):
        # csv文件的位置,无需事先创建
        store_file = os.path.dirname(os.path.dirname(__file__)) + '/dangdang.csv'
        # 打开(创建)文件
        self.file = open(store_file, 'w', encoding='utf_8_sig', newline='')
        # csv写法
        self.csv_header = ['name', 'author', 'pub', 'pub_date', 'price', 'url', 'desc' ]
        self.writer = csv.writer(self.file)
        self.writer.writerow(self.csv_header)

    def process_item(self, item, spider):
        if item['name'] is not None:
            self.writer.writerow((item['name'], item['author'], item['pub'], item['pub_date'], item['price'], item['url'], item['desc']))
        return item

    def close_spider(self, spider):
        self.file.close()


class MySQLPipeline(object):
    def __init__(self):
        self.book_list = list()
        db_params = {
            'host': self.DB_URI,
            'port': self.DB_PORT,
            'database': self.DB_NAME,
            'user': self.DB_USER,
            'password': self.DB_PASSWORD,
            'charset': self.DB_CHARSET
        }
        self.conn = pymysql.connect(**db_params)
        self.cursor = self.conn.cursor()
        self._sql = None

    # 获取settings配置
    @classmethod
    def from_crawler(cls, crawler):
        cls.DB_URI = crawler.settings.get('MYSQL_DB_URI', 'localhost')
        cls.DB_USER = crawler.settings.get('MYSQL_DB_USER')
        cls.DB_PASSWORD = crawler.settings.get('MYSQL_DB_PASSWORD')
        cls.DB_NAME = crawler.settings.get('MYSQL_DB_NAME')
        cls.DB_CHARSET = crawler.settings.get('MYSQL_DB_CHARSET', 'utf8')
        cls.DB_PORT = crawler.settings.get('MYSQL_DB_PORT', 3306)
        return cls()

    def process_item(self, item, spider):
        # 元组,指定顺序
        if item.get('name') is not None:
            item_tup = (item.get('name'), item.get('desc'), item.get('author'),
                        item.get('pub'), item.get('pub_date'), item.get('price'),
                        item.get('url'))
            self.book_list.append(item_tup)
        return item

    @property
    def sql(self):
        if not self._sql:
            self._sql = """
                insert into book_info(`name`,`desc`,author,pub,pub_date,price,url) values (%s,%s,%s,%s,%s,%s,%s)
                """
            return self._sql
        return self._sql

    def close_spider(self, spider):
        """
        爬虫关闭时批量插入
        :param spider:
        :return:
        """
        row_count = self.cursor.executemany(self.sql, self.book_list)
        self.conn.commit()
        logging.info('###################################')
        logging.info('成功插入 {} 条记录! '.format(row_count))


