# Importing base64 library because we'll need it ONLY in case if the proxy we are going to use requires authentication
import base64


# Start your middleware class
import random

from multiprocessing import TimeoutError

from twisted.internet.error import ConnectionRefusedError, ConnectError
from twisted.web._newclient import ResponseNeverReceived

from MySpiders.proxy.agents import AGENTS
from MySpiders.proxy.proxy import PROXIES

class ProxyMiddleware(object):

    DONT_RETRY_ERRORS = (TimeoutError, ConnectionRefusedError, ResponseNeverReceived, ConnectError, ValueError)

    def process_request(self, request, spider):
        # Set the location of the proxy
        #request.meta['proxy'] = "http://123.56.74.13:8080"
        #proxy = random.choice(PROXIES)
        #logging.info('Using proxy ' + proxy['ip_port'])
        #request.meta['proxy'] = 'http://' + proxy['ip_port']
        request.meta['proxy'] = "http://123.56.74.13:8080"
        #print 'http://' + proxy['ip_port']
    #
    # def process_response(self, request, response, spider):
    #     if response.status != 200:
    #         new_request = request.copy()
    #         new_request.dont_filter = True
    #         return new_request
    #     else:
    #         return response
    #
    # def process_exception(self, request, exception, spider):
    #     if isinstance(exception, self.DONT_RETRY_ERRORS):
    #         new_request = request.copy()
    #         proxy = random.choice(PROXIES)
    #         new_request.meta['proxy'] = 'http://' + proxy['ip_port']
    #         new_request.dont_filter = True
    #         return new_request

class RandomUserAgent(object):
    def process_request(self, request, spider):
        #agent = random.choice(AGENTS)
        request.headers['User-Agent'] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/51.0.2704.79 Chrome/51.0.2704.79 Safari/537.36"