#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/5/8 09:22
# @Author  : 王凯
# @File    : crawlall.py
# @Project : scrapy_spider

import concurrent.futures

from scrapy.commands import ScrapyCommand
from scrapy.crawler import logger

from components.config import WFQ_DEV_REDIS_CONFIG
from utils.db.redisdb import RedisDB


def run_spider():
    from scrapy.crawler import CrawlerProcess
    from scrapy.utils.project import get_project_settings
    from components.config import WFQ_DEV_REDIS_CONFIG
    from utils.db.redisdb import RedisDB
    from scrapy.crawler import logger
    process = CrawlerProcess(get_project_settings())
    redis_db = RedisDB(
        ip_ports=WFQ_DEV_REDIS_CONFIG['REDISDB_IP_PORTS'],
        db=WFQ_DEV_REDIS_CONFIG['REDISDB_DB'],
        user_pass=WFQ_DEV_REDIS_CONFIG['REDISDB_USER_PASS'],
    )
    redis_key = process.settings.get("NEWSPIDER_MODULE") + ":spider_all"

    while True:
        one_spider_list = redis_db.sget(redis_key)
        if one_spider_list:
            extensions = {'scrapy.extensions.telnet.TelnetConsole': None}
            # Stop after one item or error.
            process.settings.set('CLOSESPIDER_ERRORCOUNT', 1)
            # Disable LogStats extension.
            process.settings.set('LOGSTATS_INTERVAL', None)
            # Disable custom and Telnet extensions.
            process.settings.set('EXTENSIONS', extensions)
            process.crawl(one_spider_list[0])
            process.start()
        else:
            logger.info("redis key: {} is empty".format(redis_key))
            break


class Command(ScrapyCommand):
    requires_project = True

    def __init__(self):
        super().__init__()
        self.redis_db = RedisDB(
            ip_ports=WFQ_DEV_REDIS_CONFIG['REDISDB_IP_PORTS'],
            db=WFQ_DEV_REDIS_CONFIG['REDISDB_DB'],
            user_pass=WFQ_DEV_REDIS_CONFIG['REDISDB_USER_PASS'],
        )
        self.logger = logger

    def syntax(self):
        return '[options] [spider ...]'

    def add_options(self, parser):
        ScrapyCommand.add_options(self, parser)
        parser.add_argument("-b", "--batch-size", dest="batch_size", default=5, type=int, help="set batch size, default is 5")

    def short_desc(self):
        return 'Runs all of the spiders'

    def run(self, args, opts):
        redis_key = self.settings.get("NEWSPIDER_MODULE") + ":spider_all"
        spider_list = self.crawler_process.spider_loader.list()

        if self.redis_db.exists_key(redis_key):
            spider_num = self.redis_db.sget_count(redis_key)
            logger.info("redis key: {} exists and it has {} spiders".format(redis_key, spider_num))
        else:
            spider_num = len(spider_list)
            self.redis_db.sadd(redis_key, spider_list)
            logger.info("redis key: {} is add done and it has {} spiders".format(redis_key, spider_num))

        for spider_name in spider_list:
            self.crawler_process.crawl(spider_name, **opts.__dict__)
        self.crawler_process.start()


if __name__ == '__main__':
    from scrapy import cmdline

    cmdline.execute("scrapy crawlall -b=10".split())
