# -*- coding:utf8 -*-

import random
import base64

from twisted.internet import defer
from twisted.internet.error import TimeoutError, DNSLookupError, \
                ConnectionRefusedError, ConnectionDone, ConnectError, \
                ConnectionLost, TCPTimedOutError
#from scrapy.xlib.tx import ResponseFailed
from scrapy.core.downloader.handlers.http11 import TunnelError
from OpenSSL.SSL import Error as OpenSSLError

from gaokaopai.dao import *
from scrapy.conf import settings

# 阿布云配置
proxyServer = "http://proxy.abuyun.com:9020"
proxyUser   = settings['ABUYUN_USER']
proxyPass   = settings['ABUYUN_PASS']

proxyAuth = "Basic " + base64.urlsafe_b64encode(bytes((proxyUser + ":" + proxyPass), "ascii")).decode("utf8")

# proxyAuth = "Basic " +  base64.b64encode( bytes("H7U6815M699ESC0D:6DA169CE8FDA241B", "utf-8") )


class RandomUserAgentMiddleware(object):
    """Randomly rotate user agents based on a list of predefined ones"""
    def __init__(self, agents):
        self.agents = agents

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings.getlist('USER_AGENTS'))

    def process_request(self, request, spider):
        random_agent = random.choice(self.agents)
        request.headers.setdefault('User-Agent', random_agent)

class ProxyMiddleware(object):

    def __init__(self, settings):
        self.proxy = settings.get('PROXY_NAME', 'abuyun')

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings)

    def process_request(self, request, spider):


        if self.proxy == 'abuyun':
            print("=============abuyun=============")
            # 阿布云
            request.meta["proxy"] = proxyServer
            request.headers["Proxy-Authorization"] = proxyAuth