# -*- coding: utf-8 -*-

import random
import logging
from scrapy.downloadermiddlewares.retry import RetryMiddleware as BaseRetryMiddleware
from scrapy.exceptions import NotConfigured
from fake_useragent import UserAgent





class ProxyMiddleware:
    """代理中间件"""
    
    def __init__(self, proxy_list=None, proxy_enabled=False):
        self.proxy_list = proxy_list or []
        self.proxy_enabled = proxy_enabled
        self.logger = logging.getLogger(__name__)
        
    @classmethod
    def from_crawler(cls, crawler):
        proxy_enabled = crawler.settings.getbool('PROXY_ENABLED', False)
        proxy_list = crawler.settings.getlist('PROXY_LIST', [])
        
        if proxy_enabled and not proxy_list:
            raise NotConfigured('PROXY_ENABLED is True but PROXY_LIST is empty')
        
        return cls(
            proxy_list=proxy_list,
            proxy_enabled=proxy_enabled
        )
    
    def process_request(self, request, spider):
        if self.proxy_enabled and self.proxy_list:
            proxy = random.choice(self.proxy_list)
            request.meta['proxy'] = proxy
            self.logger.debug(f'Using proxy {proxy} for {request.url}')


class UserAgentMiddleware:
    """用户代理中间件"""
    
    def __init__(self):
        self.ua = UserAgent()
        self.logger = logging.getLogger(__name__)
        
        # 预定义的用户代理列表作为备选
        self.fallback_user_agents = [
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:90.0) Gecko/20100101 Firefox/90.0',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15'
        ]
    
    def process_request(self, request, spider):
        try:
            # 尝试使用fake_useragent生成随机UA
            user_agent = self.ua.random
        except Exception:
            # 如果fake_useragent失败，使用备选列表
            user_agent = random.choice(self.fallback_user_agents)
        
        request.headers['User-Agent'] = user_agent
        self.logger.debug(f'Using User-Agent: {user_agent[:50]}...')


class RetryMiddleware(BaseRetryMiddleware):
    """重试中间件"""
    
    def __init__(self, settings):
        super().__init__(settings)
        self.logger = logging.getLogger(__name__)
        
        # 可重试的HTTP状态码
        self.retry_http_codes = set(int(x) for x in settings.getlist('RETRY_HTTP_CODES'))
        
        # 特殊处理的错误
        self.special_errors = {
            'timeout': '请求超时',
            'connection': '连接错误',
            'dns': 'DNS解析错误'
        }
    
    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings)
    
    def process_response(self, request, response, spider):
        if request.meta.get('dont_retry', False):
            return response
        
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)
            self.logger.warning(f'Retrying {request.url} (status {response.status}): {reason}')
            return self._retry(request, reason, spider) or response
        
        return response
    
    def process_exception(self, request, exception, spider):
        if isinstance(exception, self.EXCEPTIONS_TO_RETRY) and not request.meta.get('dont_retry', False):
            reason = self._get_exception_reason(exception)
            self.logger.warning(f'Retrying {request.url} (exception {exception.__class__.__name__}): {reason}')
            return self._retry(request, reason, spider)
    
    def _get_exception_reason(self, exception):
        """获取异常原因的友好描述"""
        exception_name = exception.__class__.__name__.lower()
        
        for key, description in self.special_errors.items():
            if key in exception_name:
                return description
        
        return str(exception)


 