import random
import time
import requests
from scrapy import signals
from scrapy.downloadermiddlewares.retry import RetryMiddleware
from scrapy.utils.response import response_status_message

class RandomUserAgentMiddleware:
    """随机User-Agent中间件"""
    
    def __init__(self, user_agents):
        self.user_agents = user_agents

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            user_agents=crawler.settings.get('USER_AGENTS', [
                'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
                'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
                'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
                'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0',
                'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15'
            ])
        )

    def process_request(self, request, spider):
        request.headers['User-Agent'] = random.choice(self.user_agents)

class ProxyMiddleware:
    """代理IP中间件"""
    
    def __init__(self, proxy_list):
        self.proxy_list = proxy_list
        self.current_proxy = None

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            proxy_list=crawler.settings.get('PROXY_LIST', [])
        )

    def process_request(self, request, spider):
        if self.proxy_list and not request.meta.get('proxy'):
            proxy = random.choice(self.proxy_list)
            request.meta['proxy'] = proxy
            spider.logger.debug(f"使用代理: {proxy}")

class DelayMiddleware:
    """随机延迟中间件"""
    
    def process_request(self, request, spider):
        delay = random.uniform(1, 3)
        time.sleep(delay)

class CustomRetryMiddleware(RetryMiddleware):
    """自定义重试中间件"""
    
    def process_response(self, request, response, spider):
        if response.status in [403, 429]:
            spider.logger.warning(f"被禁止访问: {response.url}, 状态码: {response.status}")
            reason = response_status_message(response.status)
            return self._retry(request, reason, spider) or response
        return super().process_response(request, response, spider)