# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import shortuuid
from twisted.enterprise import adbapi


class MySQLAsyncPipeline(object):
    def open_spider(self, spider):
        db = spider.settings.get("MYSQL_DB_NAME", "htcloud_dev")
        host = spider.settings.get("MYSQL_HOST", "hts-mysql")
        port = spider.settings.get("MYSQL_PORT", 33060)
        user = spider.settings.get("MYSQL_USER", "root")
        passwd = spider.settings.get("MYSQL_PASSWORD", "root@2020")
        # pymysql、Mysqldb
        self.dbpool = adbapi.ConnectionPool("pymysql", host=host, db=db, user=user, passwd=passwd, port=port,
                                            charset="utf8")

    def process_item(self, item, spider):
        query = self.dbpool.runInteraction(self.insert_record, item)
        # 添加异常处理
        query.addCallback(self.handle_error)
        return item

    def insert_record(self, tx, item):
        values = (
            item["asin"],
            item["ranking"],
            item["type"],
            item["ranking_val"],
            item["keyword"],
            item["platform"],
        )
        sql = """
            INSERT INTO t_ad_top_detail(`id`, `asin`, `ad_top`, `ad_type`, `week_top`, `keywords`, `store_code`, `update_time`) 
            VALUES (CONCAT('1', CEILING(RAND() * 100000000000000000)), %s, %s, %s, %s, %s, %s, now());
        """
        tx.execute(sql, values)

    def handle_error(self, failure):
        if failure:
            print("处理异常: ", failure)

    def close_spider(self, spider):
        self.dbpool.close()
