import base64
import random
import requests
from config import config
from scrapy.utils.python import to_bytes
from urllib.parse import unquote, urlunparse
from crawler.connection import redis_conn
from scrapy.utils.response import response_status_message
from scrapy.downloadermiddlewares.retry import RetryMiddleware
from twisted.internet.error import TimeoutError, ConnectionLost, TCPTimedOutError

try:
    from urllib2 import _parse_proxy
except ImportError:
    from urllib.request import _parse_proxy
__all__ = ['ProxyMiddleware']


class ProxyMiddleware(RetryMiddleware):
    single_proxy = ''

    def _basic_auth_header(self, username, password):
        user_pass = to_bytes(
            '%s:%s' % (unquote(username), unquote(password)),
            encoding='latin-1')
        return base64.b64encode(user_pass)

    def _get_proxy(self, url, orig_type):
        proxy_type, user, password, hostport = _parse_proxy(url)
        proxy_url = urlunparse((proxy_type or orig_type, hostport, '', '', '', ''))

        if user:
            creds = self._basic_auth_header(user, password)
        else:
            creds = None
        return creds, proxy_url

    def _set_proxy_authorization(self, request, spider):
        creds, proxy_url = self._get_proxy(request.meta['proxy'], '')
        if creds:
            request.headers['Proxy-Authorization'] = b'Basic ' + creds

    @staticmethod
    def _get_distributed_proxy(proxy_list, redis_key):
        """
        分布式计数器获取代理，实现多条代理请求更加均匀
        """
        proxy_list_length = len(proxy_list)
        try:
            redis_proxy_number = redis_conn.incr(redis_key, 1)
        except:
            redis_proxy_number = random.randint(0, proxy_list_length - 1)
        distributed_number = redis_proxy_number % proxy_list_length
        proxy_info = proxy_list[distributed_number]
        return 'http://%(username)s:%(password)s@%(host)s' % {
            'username': proxy_info[0],
            'password': proxy_info[1],
            'host': proxy_info[2]
        }

    @staticmethod
    def _get_distributed_abroad(proxy_list, redis_key):
        """
        分布式计数器获取代理，实现多条代理请求更加均匀
        """
        proxy_list_length = len(proxy_list)
        try:
            redis_proxy_number = redis_conn.incr(redis_key, 1)
        except:
            redis_proxy_number = random.randint(0, proxy_list_length - 1)
        distributed_number = redis_proxy_number % proxy_list_length
        proxy_info = proxy_list[distributed_number].split('ips_pool:')[1]
        print(proxy_info)
        return 'http://%(username)s:%(password)s@%(host)s' % {
            'username': "T8R0DFJ5",
            'password': "B14B0C5BAFB5",
            'host': proxy_info
        }

    def _local_proxy(self, request, spider):
        """本地ip"""
        # request.meta['proxy'] = request.meta.get('proxy', '')
        request.meta['proxy'] = ''
        request.meta['monitor_proxy_type'] = 'local'
        request.meta['proxy_type'] = 'local'
        request.meta['requests_proxies'] = ''
        spider.logger.info('Use Local Proxy: ' + request.url)

    def _custom_proxy(self, request, spider):
        """自定义IP"""
        proxy = self._get_random_ip(spider)
        request.meta['proxy'] = proxy
        request.meta['monitor_proxy_type'] = 'custom'
        request.meta['proxy_type'] = 'custom'
        request.meta['requests_proxies'] = proxy
        spider.logger.info('Use Custom Proxy: ' + request.url)

    def _abroad_proxy(self, request, spider):
        # ip_list = redis_conn.getkeys('ips_pool:*')
        proxy = self._get_distributed_proxy(config.ABROAD_PROXY_LIST, 'tunnel_proxy_number')
        # proxy = self._get_distributed_abroad(ip_list, 'abroad_proxy_number')
        request.meta['proxy'] = proxy
        request.meta['monitor_proxy_type'] = 'abroad'
        request.meta['proxy_type'] = 'abroad'
        request.meta['requests_proxies'] = proxy
        spider.logger.info('Use Abroad Proxy: ' + request.url)

    @staticmethod
    def _clash_proxy(request, spider):
        proxy = 'http://127.0.0.1:7890'
        request.meta['proxy'] = proxy
        request.meta['monitor_proxy_type'] = 'clash'
        request.meta['proxy_type'] = 'clash'
        request.meta['requests_proxies'] = proxy
        spider.logger.info('Use Clash Proxy: ' + request.url)

    def _realtime_proxy(self, request, spider):
        """实时代理"""
        proxy = self._get_distributed_proxy(config.REALTIME_PROXY_LIST, 'tunnel_proxy_number')
        request.meta['proxy'] = proxy
        request.meta['monitor_proxy_type'] = 'tunnel'
        request.meta['proxy_type'] = 'tunnel'
        request.meta['requests_proxies'] = proxy
        spider.logger.info('Use Tunnel Proxy: ' + request.url)

    def _test_proxy(self, request, spider):
        """代理池ip"""
        res_json = requests.get('http://127.0.0.1:5010/get/').json()
        proxy = 'http://' + res_json['proxy']
        request.meta['proxy'] = proxy
        request.meta['monitor_proxy_type'] = 'test'
        request.meta['proxy_type'] = 'test'
        request.meta['requests_proxies'] = proxy
        spider.logger.info('Use Test Proxy: ' + request.url)

    @staticmethod
    def _get_random_ip(spider):
        headers = {
            'Accept-Language': 'zh-CN,zh;q=0.9',
            'Referer': 'https://rest.innodealing.com/proxy-pool-server/swagger-ui.html',
            'Sec-Fetch-Dest': 'empty',
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36',
            'accept': 'application/json',
        }

        params = (
            ('proxy_type', 'universal_proxy'),
        )
        response = requests.get('https://rest.innodealing.com/proxy-pool-server/internal/random-proxy/random',
                                headers=headers, params=params, timeout=(15, 15))
        if 'ip' in response.text:
            ip = response.json()['ip']
            proxy = 'http://{}'.format(ip)
            return proxy
        else:
            spider.logger.info('GET RANDOM IP ERROR!!!: {}'.format(response.text))

    # 设置代理
    def _set_proxy(self, request, spider):

        proxy_type = request.meta.get('proxy_type', 'local')
        if proxy_type == 'custom':
            self._custom_proxy(request, spider)
        elif proxy_type == 'local':
            self._local_proxy(request, spider)
        elif proxy_type == 'tunnel':
            self._realtime_proxy(request, spider)
        elif proxy_type == 'abroad':
            self._abroad_proxy(request, spider)
        elif proxy_type == 'clash':
            self._clash_proxy(request, spider)
        else:
            self._local_proxy(request, spider)

    def process_request(self, request, spider):
        self._set_proxy(request, spider)
        self._set_proxy_authorization(request, spider)
        # 所有请求全部不去重
        request.dont_filter = True

    def process_response(self, request, response, spider):
        if request.meta.get('dont_retry', False):
            return response
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)
            self._set_proxy(request, spider)
            return self._retry(request, reason, spider) or response
        return response

    def process_exception(self, request, exception, spider):
        if exception:
            if 'Too Many Requests' in str(exception) or 'Could not open CONNECT tunnel' in str(exception) or \
                    (isinstance(exception, self.EXCEPTIONS_TO_RETRY) and not request.meta.get('dont_retry', False)):
                error_msg = str(exception)
                if request.meta.get('request_url'):
                    request._set_url(request.meta.get('request_url'))
            elif isinstance(exception, TimeoutError) or isinstance(exception, TCPTimedOutError):
                error_msg = '请求超时，错误信息：{}'.format(str(exception))
            elif isinstance(exception, ConnectionLost) or 'lost in a non-clean fashion' in str(
                    exception) or 'Connection was closed cleanly' in str(exception):
                error_msg = '请求连接丢失，错误信息：{}'.format(str(exception))
            else:
                error_msg = '代理中间件其他错误：' + str(exception)
            self._set_proxy(request, spider)
            return self._retry(request, error_msg, spider)
        return None
