from scrapy.downloadermiddlewares.retry import RetryMiddleware

'''以下模块包其实无需导入'''
# 源于scrapy.downloadermiddlewares.retry
import time
import random
import logging
from twisted.internet import defer
from twisted.internet.error import TimeoutError, DNSLookupError, \
        ConnectionRefusedError, ConnectionDone, ConnectError, \
        ConnectionLost, TCPTimedOutError
from twisted.web.client import ResponseFailed
from scrapy.exceptions import NotConfigured
from scrapy.utils.response import response_status_message
from scrapy.core.downloader.handlers.http11 import TunnelError
from scrapy.utils.python import global_object_name

'''对该ip进行decrease操作'''
from ProxyPool.proxypool.storages.redis import RedisClient

class MyRetryMiddleware(RetryMiddleware):

    # 在Exception列表中添加IndexError覆盖原列表，从而在被重定向时重试
    EXCEPTIONS_TO_RETRY = (defer.TimeoutError, TimeoutError, DNSLookupError,
                           ConnectionRefusedError, ConnectionDone, ConnectError,
                           ConnectionLost, TCPTimedOutError, ResponseFailed,
                           IOError, TunnelError, IndexError)

    logger = logging.getLogger(__name__)

    # 建立一个 RedisClient 对象，供类中其他方法使用
    redis = RedisClient()


    def delete_proxy(self, proxy):
        if proxy:
            # delete proxy from proxies pool
            print("代理" + proxy + "就8行！！！(减分)")
            self.redis.decrease(proxy)
            print("-"*100)

            '''需要导入才能使用下面写法'''
            # from loguru import logger
            # from proxypool.schemas import Proxy

            # logger.debug(f'proxy {proxy.string()} is invalid, decrease score')

    def process_response(self, request, response, spider):
        if request.meta.get('dont_retry', False):
            return response
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)

            # 删除该代理
            self.delete_proxy(request.meta.get('proxy', False))

            time.sleep(random.randint(3, 5))
            self.logger.warning('返回值异常, 进行重试...')
            return self._retry(request, reason, spider) or response
        return response


    def process_exception(self, request, exception, spider):
        if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
                and not request.meta.get('dont_retry', False):

            # 删除该代理
            self.delete_proxy(request.meta.get('proxy', False))

            time.sleep(random.randint(3, 5))
            self.logger.warning('连接异常, 进行重试...')

            return self._retry(request, exception, spider)
