# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from scrapy.http.headers import Headers
from crawler.core.service_owner import *
from fake_useragent import UserAgent
from scrapy import signals
import re, scrapy

class UserAgentProxy(UserAgentMiddleware):
    """
    @author Hongv
    @desc 用户代理 随机生成user-agent（Android及Linux浏览器除外）
    """

    def __init__(self, user_agent):
        location = './fake_useragent.json'
        self.user_agent = UserAgent(verify_ssl=False, path=location)

    @classmethod
    def from_crawler(cls, crawler):
        location = './fake_useragent.json'
        return cls(
            user_agent=UserAgent(verify_ssl=False, path=location)
        )

    def process_request(self, request, spider):
        agent = self.user_agent.random
        while 'Android' in agent or 'Linux' in agent:
            agent = self.user_agent.random

        header = {}
        header['User-Agent'], header['Accept-Language'] = agent, 'zh-CN,zh;q=0.8'
        header['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
        if request.meta.get('referer'):
            header['referer'] = request.meta.get('referer')

        request.headers = Headers(header)

class ProxyPoolsFirst(HttpProxyMiddleware):
    """
    @author Hongv
    @desc ip代理
    """
    def __init__(self, auth_encoding='latin-1'):
        self.esf_service = BaseService()


    def process_request(self, request, spider):
        try:
            # 为新请求添加代理
            redis_table = settings.REDIS_PROXIES_FIRST
            request.meta['proxy'] = 'http://{proxy}'.format(proxy=self.esf_service.get_ip_proxies(redis_table))
            logging.info('\n--->\n url: {0}\n header: {1}\n meta: {2}\n<---'.format(request.url, str(request.headers), str(request.meta)))
        except Exception as e:
            logging.exception('\n*** ***\n {0}\n proxy error {1}\n*** ***'.format(request.url, str(e)))

    def process_exception(self, request, exception, spider):
        url, proxy= request.url, re.search(r'\d+\.\d+\.\d+\.\d+:\d+', request.meta['proxy']).group()
        logging.error('*** ***\n {0} [{1}]\n process exception {2}\n*** ***'.format(request.url, proxy, str(exception)))

class ProxyPoolsSecond(HttpProxyMiddleware):
    """
    @author Hongv
    @desc ip代理
    """

    def __init__(self, auth_encoding='latin-1'):
        self.esf_service = EsfOwnerService()

    def process_request(self, request, spider):
        try:
            # 为新请求添加代理 为被重定向到验证页的请求替换代理并重新构建请求
            verifyProxy = re.search(r'\d+\.\d+\.\d+\.\d+:\d+', request.meta.get('proxy', ''))
            if 'verify' in request.url or 'callback' in request.url:
                request = request.replace(url=request.meta.get('first_call'))
            # 设置代理
            redis_table = settings.REDIS_PROXIES_FIRST
            proxy = self.esf_service.get_ip_proxies(redis_table)
            while verifyProxy and proxy == verifyProxy.group():
                proxy = self.esf_service.get_ip_proxies(redis_table)
            request.meta['proxy'] = 'http://{proxy}'.format(proxy=proxy)
            logging.info('\n--->\n url: {0}\n header: {1}\n meta: {2}\n<---'.format(request.url, str(request.headers), str(request.meta)))
        except Exception as e:
            logging.exception('\n*** ***\n {0}\n proxy error {1}\n*** ***'.format(request.url, str(e)))

    def process_exception(self, request, exception, spider):
        url, proxy= request.url, re.search(r'\d+\.\d+\.\d+\.\d+:\d+', request.meta['proxy']).group()
        logging.error('*** ***\n {0} [{1}]\n process exception {2}\n*** ***'.format(request.url, proxy, str(exception)))
        return request

class ProxyPoolsThird(HttpProxyMiddleware):
    """
    @author Hongv
    @desc ip代理
    """

    def __init__(self, auth_encoding='latin-1'):
        self.esf_service = EsfOwnerService()

    def process_request(self, request, spider):
        try:
            # 为新请求添加代理 为被重定向到验证页的请求替换代理并重新构建请求
            verifyProxy = re.search(r'\d+\.\d+\.\d+\.\d+:\d+', request.meta.get('proxy', ''))
            if 'verify' in request.url or 'callback' in request.url:
                request = request.replace(url=request.meta.get('first_call'))
            # 设置代理
            redis_table = settings.REDIS_PROXIES_SECOND
            proxy = self.esf_service.get_ip_proxies(redis_table, pool_type='set')
            while verifyProxy and proxy == verifyProxy.group():
                proxy = self.esf_service.get_ip_proxies(redis_table, pool_type='set')
            request.meta['proxy'] = 'http://{proxy}'.format(proxy=proxy)
            logging.info('\n--->\n url: {0}\n header: {1}\n meta: {2}\n<---'.format(request.url, str(request.headers), str(request.meta)))
        except Exception as e:
            logging.exception('\n*** ***\n {0}\n proxy error {1}\n*** ***'.format(request.url, str(e)))

    def process_exception(self, request, exception, spider):
        url, proxy= request.url, re.search(r'\d+\.\d+\.\d+\.\d+:\d+', request.meta['proxy']).group()
        logging.error('*** ***\n {0} [{1}]\n process exception {2}\n*** ***'.format(request.url, proxy, str(exception)))
        return request

