from scrapy.downloadermiddlewares.retry import RetryMiddleware
from scrapyProject.spiders.proxy.proxyManage import proxyManageForMySQL
from scrapy.exceptions import NotConfigured
import logging

class ProxyRetryMiddleware(RetryMiddleware):
    logger = logging.getLogger(__name__)
    def __init__(self, settings):
        super().__init__(settings)
        self.manage = proxyManageForMySQL()
    def process_exception(self, request, exception, spider):
        if spider.name == "self_proxy":
            if isinstance(exception, self.EXCEPTIONS_TO_RETRY):
                # 删除该代理
                self.logger.warning('连接超时,删除链接 ...%(request)s %(proxy)s',{'request': request, 'proxy':request.meta['proxy']})
                self.manage.proxyUpdateTimeOut(request.meta['proxy'],request.meta['lastCheckTime'], 300)

