# 打猴子补丁
from gevent import monkey

monkey.patch_all()
# 导入协程池
from gevent.pool import Pool

from settings import PROXY_SPIDERS
import importlib
from core.proxy_validate.httpbin_validate import check_proxy
from core.db.mongo_pool import MongoPool
from utils.log import logger


class RunSpider(object):

    def __init__(self):
        self.mongo_pool = MongoPool()
        self.coroutine_pool = Pool()

    @staticmethod
    def get_spiders_from_settings():
        """根据配置文件获取，spider对象"""
        for item in PROXY_SPIDERS:
            module_name, class_name = item.rsplit(".", maxsplit=1)
            module = importlib.import_module(module_name)
            cls = getattr(module, class_name)
            spider = cls()
            yield spider

    def run(self):
        spiders = RunSpider.get_spiders_from_settings()
        for spider in spiders:
            # self.__execute_spider(spider)
            self.coroutine_pool.apply_async(self.__execute_spider, args=(spider,))
        # 调用join方法
        self.coroutine_pool.join()

    def __execute_spider(self, spider):
        try:
            # 获取爬虫对象, 遍历爬虫对象的get_proxies方法, 获取代理IP
            for proxy in spider.get_proxies():
                # 检测代理IP(代理IP检测模块)
                checked = check_proxy(proxy)
                if checked.speed != -1:
                    # 如果可用,写入数据库(数据库模块)
                    print(spider)
                    self.mongo_pool.insert_one(checked)
        except Exception as e:
            logger.exception(e)
            logger.exception("爬虫{}出现错误".format(spider))


if __name__ == '__main__':
    context = RunSpider()
    context.run()
