from settings import PROXIES_SPIDERS, RUN_SPIDERS_INTERVAL, MAX_WORKERS_OF_THREAD_POOL
import importlib
from core.proxy_validate.httpbin_validator import check_proxy
from core.db.dbeaver_pool import DbeaverPool
from utils.log import logger
import schedule
import time
from concurrent.futures import ThreadPoolExecutor


class RunSpider(object):
    def __init__(self):
        self.dbeaver_pool = DbeaverPool()

    def get_spiders_from_settings(self):
        for full_class_name in PROXIES_SPIDERS:
            # 'core.proxy_spider.proxy_spiders.KuaiSpider'
            # 获取模块名和类名
            # 从右往左截一次
            module_name, class_name = full_class_name.rsplit('.', maxsplit=1)
            # 根据模块名导入模块
            module = importlib.import_module(module_name)
            # 根据类名从模块中获取类
            cls = getattr(module, class_name)
            # 创建爬虫对象
            spider = cls()
            yield spider

    def __execute_one_spider_task(self, spider):
        """
        抽离出每个spider的检验任务成为方法, 依次加入协程池
        """
        try:
            for proxy in spider.get_proxies():
                # print(proxy)
                # 检验代理ip可用性
                proxy = check_proxy(proxy)
                if proxy.speed != -1:
                    self.dbeaver_pool.insert_one(proxy)
        except Exception as e:
            logger.exception(e)

    def run(self):
        # 根据配置文件信息获取爬虫对象
        with ThreadPoolExecutor(max_workers=MAX_WORKERS_OF_THREAD_POOL) as pool:
            spiders = self.get_spiders_from_settings()
            args = [spider for spider in spiders]
            pool.map(self.__execute_one_spider_task, args)
        logger.info("The 'run' function call ends.")

    @classmethod
    def start(cls):
        rs = cls()
        rs.run()
        # 设定间隔时间
        schedule.every(RUN_SPIDERS_INTERVAL).hours.do(rs.run)
        while True:
            # 检测是否到达间隔时间
            schedule.run_pending()
            time.sleep(10)


if __name__ == '__main__':
    RunSpider.start()
