# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
import random
from .utils.proxy_ip import ProxyIpSpider

proxy_ip = ProxyIpSpider()


class UaAndProxyMiddleware:
    def process_request(self, request, spider):
        request.headers['User-Agent'] = random.choice(spider.settings.get('USER_AGENTS_LIST'))
        # request.meta['proxy'] = proxy_ip.get_random_ip(type='https')
        # request.meta['proxy'] = 'http://119.188.162.165:8081'


class CheckUserAgent:
    def process_response(self, request, response, spider):
        print('请求ua:' + request.headers["User-Agent"].decode())
        # print('使用的代理:' + request.meta['proxies'])
        return response
