# 使用代理

from fake_useragent import UserAgent
from user import settings
import random
import base64
from user.pm import PM
import pymysql

# 随机代理中间件
class RandomProxy(object):
    def __init__(self):
        self.pm = PM()
        pass

    def process_request(self, request, spider):

        #从数据库取出ip，此方法在pm.py 写道
        proxy = self.pm.random()
        # print(proxy)
        # request.meta['proxy'] = 'http://%s' % proxy['host']
        p = 'http://%s:%s' % (proxy[1], proxy[2])
        # print(p)
        #把ip 传到请求头
        request.meta['proxy'] = p


# 随机认证代理中间件
class RandomAuthProxy(object):
    def process_request(self, request, spider):
        proxy = random.choice(settings.AUTH_PROXIES)
        auth = base64.b64encode(bytes(proxy['auth'], encoding="utf-8"))
        # 设置代理认证信息
        request.headers['Proxy-Authorization'] = b'Basic ' + auth
        # 设置代理信息
        request.meta['proxy'] = 'http://%s' % proxy['host']


# spidermiddleware
from scrapy.exceptions import CloseSpider

#爬虫异常处理
class MySpiderMiddleware(object):
    def process_spider_input(self, response, spider):
        print('响应码，，，，，，，，', response.status)
        if not 200 <= response.status <= 300:
            raise CloseSpider('爬虫异常，退出')

        return None

    def process_spider_output(self, response, result, spider):
        for res in result:
            yield res

class MyUserAgentMiddleware(object):
    def __init__(self):
        self.user_agent_list =[
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.26 Safari/537.36 Core/1.63.6726.400 QQBrowser/10.2.2265.400",
            "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:61.0) Gecko/20100101 Firefox/61",

        ]
    def process_request(self, request, spider):
        agent = random.choice(self.user_agent_list)
        request.headers['User-Agent'] = agent