from scrapy.downloadermiddlewares.retry import RetryMiddleware
from scrapy.utils.response import response_status_message
from scrapyProject.spiders.proxy.proxyPool import proxyPool

import logging
import time
import random

class MyRetryMiddleware(RetryMiddleware):
    logger = logging.getLogger(__name__)
    m_proxyManage = proxyPool()

    def delete_proxy(self, proxy):
        if proxy:
            self.m_proxyManage.remove_proxy(proxy = proxy)
            # delete proxy from proxies pool


    def process_response(self, request, response, spider):
        if spider.name == "self_proxy":
            return response
        if request.meta.get('dont_retry', False):
            return response
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)
            # 删除该代理
            self.delete_proxy(request.meta.get('proxy', False))
            request.meta['retry_times'] = 0
            del request.meta['proxy']

            time.sleep(random.randint(3, 5))
            self.logger.warning('返回值异常, 进行重试...')
            return self._retry(request, reason, spider) or response
        return response


    def process_exception(self, request, exception, spider):
        if spider.name == "self_proxy":
            return
        if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
                and not request.meta.get('dont_retry', False):
            # 删除该代理
            self.delete_proxy(request.meta.get('proxy', False))
            time.sleep(random.randint(3, 5))
            self.logger.warning('连接异常, 进行重试...%(request)s %(proxy)s',{'request': request, 'proxy':request.meta['proxy']})
            request.meta['retry_times'] = 0
            request.meta['reset_proxy']= 1
            del request.meta['proxy']
            return self._retry(request, exception, spider)
