# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html


from twisted.enterprise import adbapi
from . import settings
import logging
import pymysql
import copy

class ScrapyDoubanPipeline(object):
    def __init__(self):
        #设置Mysql的参数
        dbparms = dict(
            host=settings.MYSQL_HOST,
            db=settings.MYSQL_DB,
            user=settings.MYSQL_USER,
            passwd=settings.MYSQL_PASSWORD,
            charset=settings.MYSQL_CHARSET,
            cursorclass=pymysql.cursors.DictCursor,
            use_unicode=True
        )
        #开启数据库连接池
        self.db_pool = adbapi.ConnectionPool("pymysql", **dbparms)

    def process_item(self, item, spider):
        #深拷贝，防止出现item被覆盖的情况
        asynItem = copy.deepcopy(item)
        query = self.db_pool.runInteraction(self.do_insert_line, asynItem, spider)
        query.addErrback(self.on_error, spider)
        return item

    def do_insert_line(self, cursor, item, spider):
        """
            插入一条车系信息
        """
        select_sql = """SELECT count(t.id) FROM top t WHERE t.id=%s"""
        cursor.execute(select_sql, (item["id"]))
        result = cursor.fetchone()
        if int(result["count(t.id)"]) == 0:
            insert_sql = """INSERT INTO top(`id`, `name`, `score`, `num`, `synopsis`, `ranking`) VALUES (%s,%s,%s,%s,%s,%s)"""
            cursor.execute(insert_sql, (item["id"], item["name"], item["score"], item["num"], item["synopsis"], item["ranking"]))
        else :
            logging.log(msg="此条信息已经存在：%s" % item["id"], level=logging.DEBUG)

    def on_error(self, failure, spider):
        """
            打印记录错误信息
        """
        logging.log(msg=failure, level=logging.ERROR)


    def close_spider(self, spider) :
        logging.log(msg="爬虫关闭：%s" % spider.name, level=logging.DEBUG)
        self.db_pool.close()
