# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html

from scrapy import signals
import random
import requests

# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
from dishonest.settings import USER_AGENTS

"""
实现随机User-Agent下载器中间
    1. 准备User-Agent列表
    2. 定义RandomUserAgent类
    3. 实现process_request方法, 设置随机的User-Agent
"""
class RandomUserAgent(object):

    def process_request(self, request, spider):
        # 如果spider是公示系统爬虫, 就直接跳过
        if isinstance(spider, GsxtSpider):
            return None

        #  3. 实现process_request方法, 设置随机的User-Agent
        request.headers['User-Agent'] = random.choice(USER_AGENTS)

        return None


"""
实现代理IP下载器中间件
    1. 定义ProxyMiddleware类
    2. 实现process_request方法, 设置代理IP
"""
class ProxyMiddleware(object):

    def process_request(self, request, spider):
        # 实现process_request方法, 设置代理IP
        # 如果spider是公示系统爬虫, 就直接跳过
        if isinstance(spider, GsxtSpider):
            return None

        # 1. 获取协议头
        protocol = request.url.split('://')[0]
        # 2. 构建代理IP请求的URL
        proxy_url = 'http://localhost:16888/random?protocol={}'.format(protocol)
        # 3. 发送请求, 获取代理IP
        response = requests.get(proxy_url)
        # 4. 把代理IP设置给request.meta['proxy']
        request.meta['proxy'] = response.content.decode()

        return None

