# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
import time

import requests
from fake_useragent import UserAgent

class GzSpiderDownloaderMiddleware:
    def process_request(self, request, spider):
        ua = UserAgent()
        user_agent_str = ua.random
        request.headers['User-Agent'] = user_agent_str


class IPProxyDownloadMiddleware:
    def process_request(self, request, spider):
        time.sleep(2)
        zhima_proxy = 'http://webapi.http.zhimacangku.com/getip?num=1&type=2&pro=&city=0&yys=0&port=11&time=1&ts=1&ys=0&cs=1&lb=1&sb=0&pb=4&mr=1&regions='
        r = requests.get(zhima_proxy)
        data = r.json()
        ip = data['data'][0]['ip']
        port = data['data'][0]['port']
        request.meta['proxy'] = f'http://{ip}:{port}'
