from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from scrapyProject.spiders.proxy.proxyPool import proxyPool

import logging

from fake_useragent import UserAgent

from collections import defaultdict



class randomHttpProxyMiddleware(HttpProxyMiddleware):
    logger = logging.getLogger(__name__)

    def __init__(self, auth_encoding='latin-1', proxy_list_file=None):
        super().__init__(auth_encoding)

        self.m_proPool = proxyPool()

        self.proxies = defaultdict(list)
        proPool = self.m_proPool.getData()
        for schemeData in  proPool:
            for proData in proPool[schemeData]:
                self.proxies[schemeData].append (self._get_proxy(proData, schemeData))

    @classmethod
    def from_crawler(cls, crawler):
           # 从配置文件中读取用户验证信息的编码
        auth_coding = crawler.settings.get('HTTPPROXY_AUTH_ENCODING', 'latin-1')

         # 从配置文件中读取代理服务器列表文件(json)的路径
        proxy_list_file = crawler.settings.get("HTTPPROXY_PROXY_LIST_FILE")

        return cls(auth_coding, proxy_list_file)

    def _set_proxy(self, request, scheme):
    # 随机选择一个代理
        creds, proxy = self._get_proxy(self.m_proPool.get_random_proxy(scheme), scheme)
        request.meta['proxy'] = proxy
        if creds:
            request.headers['Proxy-Authorization'] = b'Basic ' + creds

        if 'reset_proxy' in request.meta:
            if request.meta['reset_proxy'] == 1:
                self.logger.info('重新设置代理...%(request)s %(proxy)s', {'request': request, 'proxy': request.meta['proxy']})
        request.meta['reset_proxy'] = 0


class RandomUserAgentMiddleware(object):
    # 随机更换user-agent
    def __init__(self, crawler):
        super(RandomUserAgentMiddleware, self).__init__()
        self.ua = UserAgent()
        self.ua_type = crawler.settings.get("RANDOM_UA_TYPE", "random")

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)

    def process_request(self, request, spider):
        def get_ua():
            return getattr(self.ua, self.ua_type)

        request.headers.setdefault('User-Agent', get_ua())

'''
from faker import Faker
class RandomUserAgentMiddleware(object):
    def __init__(self):
        self.m_faker = Faker(local='zh_CN')
#        self.m_faker = Faker()
       # self.user_agent = ''

    @classmethod
    def from_crawler(cls, crawler):
        o = cls()
        crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
        return o

    def spider_opened(self, spider):
        self.user_agent = getattr(spider, 'user_agent',self.user_agent)

    def process_request(self, request, spider):
        self.user_agent = self.m_faker.user_agent()       #获得随机user_agent
       request.headers.setdefault(b'User-Agent', self.user_agent)
       
'''