# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
import time

import requests
from fake_useragent import UserAgent


# useful for handling different item types with a single interface


class RandomUserAgentMiddleware(object):
    def __init__(self, crawler):
        super(RandomUserAgentMiddleware, self).__init__()
        self.ua = UserAgent()
        self.ua_type = crawler.settings.get('RANDOM_UA_TYPE', 'random')

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)

    def process_request(self, request, spider):
        def get_ua():
            return getattr(self.ua, self.ua_type)

        request.headers.setdefault('User-Agent', get_ua())


class ProxyMiddleware(object):
    def __init__(self):
        self.last_fetch_time = 0
        self.proxy = None
        self.fetch_interval = 10  # 获取新代理的时间间隔（秒）

    def get_proxy(self):
        if not self.proxy or time.time() - self.last_fetch_time > self.fetch_interval:
            url = "https://api.xiaoxiangdaili.com/ip/get?appKey=1018437372628848640&appSecret=DlUFVYgz&cnt=&wt=json"
            response = requests.get(url)
            data = response.json()

            if data.get('code') == 200:
                proxy_data = data['data'][0]
                ip = proxy_data['ip']
                port = proxy_data['port']
                self.proxy = f"http://{ip}:{port}"
                self.last_fetch_time = time.time()
            else:
                raise Exception(f"Failed to fetch proxy. Code: {data.get('code')}, Message: {data.get('msg')}")
        return self.proxy

    def process_request(self, request, spider):
        proxy = self.get_proxy()
        print(f"set proxy: {proxy}")
        request.meta["proxy"] = proxy
