# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html

from scrapy import signals
import random

class MiddlieproDownloaderMiddleware(object):

    User_Agent_list = ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.64 Safari/537.36',
                       ]

    PROXY_http = [
        '122.9.101.6:8888'
        '124.220.0.182:40001'
    ]

    PROXY_https = [
        '122.9.101.6:8888'
        ]


    # 拦截请求
    def process_request(self, request, spider):

        # user-agent
        request.headers['User-Agent'] == random.choice(self.User_Agent_list)

        # 代理IP
        request.meta['proxy'] == random.choice(self.PROXY_http)

        return None

    # 拦截所有响应
    def process_response(self, request, response, spider):
        # Called with the response returned from the downloader.
        print(request)
        print(response)
        return response

    # 拦截异常请求
    def process_exception(self, request, exception, spider):
        # user-agent
        request.headers['User-Agent'] == random.choice(self.User_Agent_list)

        # 代理IP
        request.meta['proxy'] == random.choice(self.PROXY_http)

        return request




