from loguru import logger
from proxypool.storages.storages import StorageClient
from proxypool.settings import PROXY_NUMBER_MAX
from proxypool.crawlers import __all__ as crawlers_cls


class Getter(object):
    """
    构建代理池
    """

    def __init__(self):
        """
        初始化代理者和代理池
        """
        self.sc = StorageClient()
        self.crawlers_cls = crawlers_cls
        self.crawlers = [crawler_cls() for crawler_cls in self.crawlers_cls]

    def is_full(self):
        """
        如果代理是否满
        """
        return self.sc.count() >= PROXY_NUMBER_MAX

    @logger.catch
    def run(self):
        """
        运行并爬取代理
        """
        if self.is_full():
            return
        for crawler in self.crawlers:
            logger.info(f'爬取者 {crawler} 在爬取代理')
            for proxy in crawler.crawle():
                self.sc.add(proxy)