# -*- coding: utf-8 -*-

import random
import time
from scrapy import signals
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from scrapy.http import HtmlResponse
from scrapy.utils.response import response_status_message
import logging

class RandomUserAgentMiddleware(UserAgentMiddleware):
    """随机User-Agent中间件"""
    
    def __init__(self, user_agent='Scrapy'):
        super(RandomUserAgentMiddleware, self).__init__(user_agent)
        self.user_agents = [
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/121.0',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/120.0',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15',
            'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/121.0',
        ]
    
    def process_request(self, request, spider):
        ua = random.choice(self.user_agents)
        request.headers['User-Agent'] = ua
        spider.logger.debug(f'使用User-Agent: {ua}')
        return None

class RandomDelayMiddleware:
    """随机延迟中间件"""
    
    def __init__(self, delay=1):
        self.delay = delay
    
    def process_request(self, request, spider):
        delay = random.uniform(0.5, self.delay * 2)
        time.sleep(delay)
        spider.logger.debug(f'随机延迟: {delay:.2f}秒')
        return None

class ProxyMiddleware(HttpProxyMiddleware):
    """代理中间件"""
    
    def __init__(self, proxies=None):
        self.proxies = proxies or []
        self.current_proxy = None
    
    def process_request(self, request, spider):
        if self.proxies:
            self.current_proxy = random.choice(self.proxies)
            request.meta['proxy'] = self.current_proxy
            spider.logger.debug(f'使用代理: {self.current_proxy}')
        return None

class RetryMiddleware:
    """自定义重试中间件"""
    
    def __init__(self, max_retry_times=5):
        self.max_retry_times = max_retry_times
    
    def process_response(self, request, response, spider):
        if response.status in [403, 429, 500, 502, 503, 504]:
            retry_times = request.meta.get('retry_times', 0)
            if retry_times < self.max_retry_times:
                retry_times += 1
                request.meta['retry_times'] = retry_times
                spider.logger.warning(f'重试请求 {request.url} (第{retry_times}次)')
                # 增加延迟
                time.sleep(random.uniform(2, 5))
                return request
        return response

class AntiDetectionMiddleware:
    """反检测中间件"""
    
    def process_request(self, request, spider):
        # 添加更多真实的请求头
        request.headers.setdefault('Accept-Language', 'zh-CN,zh;q=0.9,en;q=0.8')
        request.headers.setdefault('Accept-Encoding', 'gzip, deflate, br')
        request.headers.setdefault('Connection', 'keep-alive')
        request.headers.setdefault('Upgrade-Insecure-Requests', '1')
        request.headers.setdefault('Sec-Fetch-Dest', 'document')
        request.headers.setdefault('Sec-Fetch-Mode', 'navigate')
        request.headers.setdefault('Sec-Fetch-Site', 'none')
        request.headers.setdefault('Sec-Fetch-User', '?1')
        
        # 添加Referer
        if not request.headers.get('Referer'):
            request.headers['Referer'] = 'https://www.cnhnb.com/'
        
        # 添加Cookie支持
        request.meta.setdefault('dont_merge_cookies', False)
        
        spider.logger.debug(f'添加反检测头: {request.url}')
        return None

class CustomHttpErrorMiddleware:
    """自定义HTTP错误处理中间件"""
    
    def process_response(self, request, response, spider):
        if response.status in [403, 429]:
            spider.logger.warning(f'HTTP {response.status} 错误: {request.url}')
            # 可以在这里添加特殊处理逻辑
            # 比如更换代理、增加延迟等
        return response

class LoggingMiddleware:
    """日志中间件"""
    
    def process_request(self, request, spider):
        spider.logger.info(f'发送请求: {request.url}')
        return None
    
    def process_response(self, request, response, spider):
        spider.logger.info(f'收到响应: {response.url} - {response.status}')
        return response
    
    def process_exception(self, request, exception, spider):
        spider.logger.error(f'请求异常: {request.url} - {exception}')
        return None 