import redis
import logging
import random
from twisted.internet.error import ConnectionRefusedError

logger = logging.getLogger(__name__)

class RandomWaiwangProxy(object):

    def __init__(self, settings):
        redis_host= settings.get('REDIS_HOST')
        self.redis_cli = redis.Redis(redis_host)
        self.need_proxy_list = settings.get('NEED_PROXY_LIST')

    def get_random_proxy(self, domain):
        return self.redis_cli.srandmember(domain)

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings)

    def need_proxy(self, request):
        domain = request.meta.get('domain')
        if not domain:
            logger.error('no domain in request meta, url : %s' % request.url)
            return
        return domain in self.need_proxy_list

    def process_request(self, request, spider):
        # Don't overwrite with a random one (server-side state for IP)
        if not self.need_proxy(request):
            return

        domain = request.meta['domain']
        proxy = self.get_random_proxy(domain)
        if not proxy:
            logger.warn('failed to get proxy for domain : %s' % domain)
            return
        else:
            logger.debug('using get proxy [%s] for domain : %s' % (proxy, domain))

        request.meta['proxy'] = 'http://' + proxy

    def process_exception(self, request, exception, spider):
        pass

class RandomProxy(object):

    def __init__(self, settings):
        self.proxy_ips = settings.get('PROXY_IPS')
        self.use_proxy = settings.get('USE_PROXY')

    def get_random_proxy(self):
        #logger.info("--"*20)
        try:
            return random.choice(self.proxy_ips)
        except Exception:
            pass

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings)

    def process_request(self, request, spider):
        #if 'proxy' in request.meta:
        #    return

        if not self.use_proxy:
            return

        request.meta['proxy'] = self.get_random_proxy()


    def process_exception(self, request, exception, spider):
        if 'proxy' not in request.meta:
            return
        proxy = request.meta.get('proxy')
        logger.error('exception : %s, url :%s,  proxy : %s' % (repr(exception), request.url, proxy))
        if isinstance(exception, ConnectionRefusedError):
            try:
                self.proxy_ips.remove(proxy)
            except Exception:
                pass
            request.meta['proxy'] = self.get_random_proxy()
            return request
