# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
import random

from scrapy import signals

# 1.利用中间件 反反爬
# User-Agent 下载中间件 request
from .settings import USER_AGENT_LIST


class UserAgentMiddleware(object):
    def process_request(self, request, spider):
        # 1. 随机的user-agent
        random_user = random.choice(USER_AGENT_LIST)

        # 2. request.headers['User-Agent']
        request.headers['User-Agent'] = random_user

    # 演示 验证一下  传递的request 是生效的
    def process_response(self, request, response, spider):
        print('#' * 100)
        print(request.headers['User-Agent'])

        return response


# Proxy 下载中间件request
class ProxyMiddleware(object):
    def process_request(self, request, spider):

        # 1.代理IP
        proxy = 'http://110.110.120.11:8888'
        # 2.设置 request.meta['proxy'] =
        request.meta['proxy'] = proxy
