# -*- coding: utf-8 -*-
# @Time    : 2021/6/5 10:54
# @Author  : ZSQ
# @Email   : zsq199170918@163.com
# @FileName: extentions.py
# @Software: PyCharm
from scrapy import signals
from scrapy.exceptions import NotConfigured


class CustomRedisSpiderIdleClosedExtensions(object):

    def __init__(self, idle_num, crawler):
        self.crawler = crawler
        self.idle_num = idle_num
        self.idle_count = 0

    @classmethod
    def from_crawler(cls, crawler):
        # first check if the extension should be enabled and raise
        # NotConfigured otherwise
        if not crawler.settings.getbool('EXT_ENABLED'):
            raise NotConfigured
        if not 'redis_key' in crawler.spidercls.__dict__.keys():
            raise NotConfigured('Only supports RedisSpider')

        # get the number of items from settings
        idle_num = crawler.settings.getint('IDLE_NUM', 360)

        # instantiate the extension object
        ext = cls(idle_num, crawler)

        # connect the extension object to signals
        crawler.signals.connect(ext.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(ext.spider_closed, signal=signals.spider_closed)
        crawler.signals.connect(ext.spider_idle, signal=signals.spider_idle)
        return ext

    def spider_opened(self, spider):
        spider.logger.info("Opened spider {}, allow waiting time:{} seconds".format(spider.name, self.idle_num * 5))

    def spider_closed(self, spider):
        spider.logger.info(
            "Closed spider {}, waiting time exceeded {} seconds".format(spider.name, self.idle_num * 5))

    def spider_idle(self, spider):
        # 程序启动的时候会调用这个方法一次，之后每隔5秒再请求一次
        # 默认持续空闲半个小时都没有spider.redis_key，就关闭爬虫
        # 判断是否存在 redis_key
        if not spider.server.exists(spider.redis_key):
            self.idle_count += 1
        else:
            self.idle_count = 0

        if self.idle_count > self.idle_num:
            # 关闭爬虫
            self.crawler.engine.close_spider(spider, 'Waiting time exceeded')
