# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import redis
from twisted.enterprise import adbapi
from utils import common
import time
import settings


class MysqlTwistedPipline(object):
    def __init__(self):
        self.dbparms = settings.DB_CONN_DICT
        self.dbpool = adbapi.ConnectionPool("pymysql", **self.dbparms)
        self.redis_cli = redis.StrictRedis(**settings.REDIS_CONN)
        self.std_spiders = settings.STD_SPIDERS
        self.max_idle_time = settings.DB_MAX_IDLE_TIME
        self.conn_last_use_time = time.time()

    def process_item(self, item, spider):
        # first check if mysql connection expired
        curr_time = time.time()
        idle_time = curr_time - self.conn_last_use_time
        if idle_time > self.max_idle_time:
            # reconnect silently
            time_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.conn_last_use_time))
            spider.log.logger.info("MySQL conn_last_use_time:{0}".format(time_str))
            log_info = "MySQL idle_time:{0}s > max_idle_time:{1}s, reconnect silently...".format(idle_time, self.max_idle_time)
            spider.log.logger.warning(log_info)
            self.dbpool = adbapi.ConnectionPool("pymysql", **self.dbparms)
        # always set last use time
        self.conn_last_use_time = curr_time
        # 使用twisted将mysql插入变成异步执行
        query = self.dbpool.runInteraction(self.do_insert, item, spider)
        query.addErrback(self.handle_error, item, spider) #处理异常

    def handle_error(self, failure, item, spider):
        # 处理异步插入的异常
        spider.log.logger.error(failure)

    def do_insert(self, cursor, item, spider):
        #根据不同的item 构建不同的sql语句并插入到mysql中
        if not spider.is_task_done(item['crawler_task_id']) or spider.name in self.std_spiders:
            insert_sql, params = item.get_insert_sql()
            spider.log.logger.debug(insert_sql)
            spider.log.logger.debug(params)
            cursor.execute(insert_sql, params)

            #将Redis中该task_id对应的crawled_cnt加1
            task_id = params[0]
            self.redis_cli.incr("task_id:{0}:crawled_cnt".format(task_id))
        else:
            spider.set_task_done(item['crawler_task_id'])