from scrapy import signals
from crawler.monitor import IdleLogRecorder
from crawler.connection import redis_conn
from scrapy.exceptions import NotConfigured


class ContainerIdle:
    """Log basic scraping stats periodically"""

    def __init__(self, stats, interval=60.0):
        self.stats = stats
        self.interval = interval
        self.task = None
        self.idle_times = 0
        self.idle_logger = IdleLogRecorder()

    @classmethod
    def from_crawler(cls, crawler):
        interval = crawler.settings.getfloat('CONTAINER_IDLE_INTERVAL')
        cls.project_type = crawler.settings.get('PROJECT_TYPE', 'victor')
        if not interval:
            raise NotConfigured
        o = cls(crawler.stats, interval)
        crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
        crawler.signals.connect(o.spider_idle, signal=signals.spider_idle)
        return o

    def spider_opened(self, spider):
        """通过启动爬虫信号执行活动信号记录"""
        self.idle_logger.record_idle_log(spider, self.project_type, step="spider_open")

    def spider_idle(self, spider):
        """通过idle态补充执行活动信号记录，需要限制执行次数"""
        self.idle_times += 1
        if self.idle_times == 50:
            self.idle_logger.record_idle_log(spider, self.project_type, step="spider_idle")
            self.idle_times = 0

    def spider_closed(self, spider, reason):
        spider_name = spider.name + '_' + self.project_type
        self.idle_logger.get_hostname()
        monitor_key = spider_name + '|' + self.idle_logger.hostname
        redis_conn.hdel('crawler_container_monitor', monitor_key)
        if self.task and self.task.running:
            self.task.stop()
