# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html

from scrapy import signals
import random
import re
import time

import requests
from requests.exceptions import RequestException
from twisted.internet.error import ConnectInProgressError


class InterGameSpiderSpiderMiddleware(object):
    # Not all methods need to be defined. If a method is not defined,
    # scrapy acts as if the spider middleware does not modify the
    # passed objects.

    @classmethod
    def from_crawler(cls, crawler):
        # This method is used by Scrapy to create your spiders.
        s = cls()
        crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
        return s

    def process_spider_input(self, response, spider):
        # Called for each response that goes through the spider
        # middleware and into the spider.

        # Should return None or raise an exception.
        return None

    def process_spider_output(self, response, result, spider):
        # Called with the results returned from the Spider, after
        # it has processed the response.

        # Must return an iterable of Request, dict or Item objects.
        for i in result:
            yield i

    def process_spider_exception(self, response, exception, spider):
        # Called when a spider or process_spider_input() method
        # (from other spider middleware) raises an exception.

        # Should return either None or an iterable of Response, dict
        # or Item objects.
        pass

    def process_start_requests(self, start_requests, spider):
        # Called with the start requests of the spider, and works
        # similarly to the process_spider_output() method, except
        # that it doesn’t have a response associated.

        # Must return only requests (not items).
        for r in start_requests:
            yield r

    def spider_opened(self, spider):
        spider.logger.info('Spider opened: %s' % spider.name)


class RandomUserAgent(object):
    """Randomly rotate user agents based on a list of predefined ones"""

    def __init__(self, agents):
        self.agents = agents

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings.getlist('USER_AGENTS'))

    def process_request(self, request, spider):
        print "**************************" + random.choice(self.agents)
        request.headers.setdefault('User-Agent', random.choice(self.agents))


class ProxyMiddleware(object):
    def process_request(self, request, spider):
        proxy = get_proxy_retry()
        if proxy:
            request.meta['proxy'] = proxy
        else:
            raise ConnectInProgressError


def get_proxy_retry(retry=3):
    for i in range(retry):
        proxy = get_proxy()
        if proxy:
            return proxy
    return None


def get_proxy():
    ip_re = r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
    url = [u"http://dynamic.goubanjia.com/dynamic/get/4a6f4b2146e71e5d343716849f6a1468.html?random=yes",
           u'http://dynamic.goubanjia.com/dynamic/get/e9cb48131fcea3972d1ea24627330259.html?random=yes']
    ip_port = None
    for i in range(3):
        data = request_get(random.choice(url))
        if data and re.search(ip_re, data) and int(data.split(":")[1]) > 100:
            ip_port = data.split(",")[0]
            break
        else:
            time.sleep(1)
    if ip_port:
        return "http://%s" % ip_port
    return None


def request_get(url):
    try:
        r = requests.get(url)
        data = r.text.strip()
        return data
    except ValueError:
        return None
    except RequestException:
        return None
