# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
import random
from followUserSpider.settings import IPPOOL
from scrapy.utils.response import response_status_message
from scrapy.contrib.downloadermiddleware.retry import RetryMiddleware

class UserAgentMiddleware(object):
    """Randomly rotate user agents based on a list of predefined ones"""
    def __init__(self, agents):
        self.agents = agents

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings.getlist('USER_AGENTS'))

    def process_request(self, request, spider):
        request.headers.setdefault('User-Agent', random.choice(self.agents))


class CookiesMiddleware(object):

    def __init__(self, cookiesPool):
        self.cookiesPool = cookiesPool

    def get_random_cookies(self):
        cookieStr = random.choice(self.cookiesPool)
        cookie = {}
        for line in cookieStr.split(';'):
            key, value = line.split('=', 1)
            cookie[key] = value
        return cookie

    def process_request(self, request, spider):
        cookies = self.get_random_cookies()
        if cookies:
            request.cookies = cookies

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            cookiesPool=crawler.settings.getlist('COOKIEPOOL')
        )


# class ProxyMiddleware(object):
#
#     def __init__(self):
#         pass
#         # self.proxy_url = proxy_url
#
#     def process_request(self, request, spider):
#         ip = random.choice(IPPOOL)
#         ip = ip['ipaddr']
#         # uri=  'https://140.143.96.216:80'
#         request.meta['proxy'] = ip
#         pass
#
#     @classmethod
#     def from_crawler(cls, crawler):
#         settings = crawler.settings
#         return cls(
#             # proxy_url=settings.get('PROXY_URL')
#         )

# 请求异常，进行重试,正常处理存回ip
# class LocalRetryMiddleWare(RetryMiddleware):
#
#     def process_response(self, request, response, spider):
#
#         if str(response.status).startswith('3') or str(response.status).startswith('4') or str(response.status).startswith('5'):
#             proxy = request.meta.get('proxy', False)
#             if proxy:
#                 print("访问失败-ip" + str(proxy))
#             reason = response_status_message(response.status)
#             return self._retry(request, reason, spider) or response
#         if str(response.status).startswith('200'):
#             proxy = request.meta.get('proxy', False)
#             if proxy:
#                 print("访问成功-ip" + str(proxy))
#         return response
#
#     def process_exception(self, request, exception, spider):
#         proxy = request.meta.get('proxy', False)
#         if proxy:
#             print("异常请求ip"+str(proxy))
#         if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
#                 and not request.meta.get('dont_retry', False):
#             return self._retry(request, exception, spider)