import random
from bs4 import BeautifulSoup

class ProxyMiddleware:
    """动态代理中间件（示例使用固定代理，需替换为真实代理池）"""
    def __init__(self):
        # 实际使用时从代理池（如文件/数据库）加载，示例用固定代理
        self.proxies = [
            'http://user1:pass1@123.45.67.89:8080',
            'http://user2:pass2@98.76.54.32:3128',
        ]

    def process_request(self, request, spider):
        if self.proxies:
            proxy = random.choice(self.proxies)
            request.meta['proxy'] = proxy
            spider.logger.info(f'使用代理: {proxy}')

class UserAgentMiddleware:
    """随机User-Agent中间件"""
    def __init__(self):
        self.user_agents = [
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/114.0.0.0',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) Safari/605.1.15',
            'Mozilla/5.0 (iPhone; CPU iPhone OS 16_0 like Mac OS X) Mobile/15E148',
        ]

    def process_request(self, request, spider):
        user_agent = random.choice(self.user_agents)
        request.headers['User-Agent'] = user_agent
        spider.logger.info(f'使用User-Agent: {user_agent}')

class DataCleaningMiddleware:
    """响应数据清洗中间件（去除冗余内容）"""
    def process_response(self, request, response, spider):
        try:
            # 用BeautifulSoup清理HTML（移除脚本、样式等）
            soup = BeautifulSoup(response.text, 'html.parser')
            for tag in soup(['script', 'style', 'noscript', 'header', 'footer']):
                tag.decompose()
            # 去除多余空白字符
            cleaned_html = soup.get_text(separator='\n', strip=True)
            # 替换原始响应体
            response._body = cleaned_html.encode(response.encoding or 'utf-8')
            spider.logger.info('响应数据清洗完成')
        except Exception as e:
            spider.logger.error(f'数据清洗失败: {str(e)}')
        return response
