import random
from scrapy import signals

# 下载中间件
class CustomDownloadMiddleware:
    @classmethod
    def from_crawler(cls, crawler):
        s = cls()
        crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
        return s

    def process_request(self, request, spider):
        user_agents = [
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
            'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0'
        ]
        request.headers['User - Agent'] = random.choice(user_agents)
        print("Download Middleware: Processed request")
        return None

    def process_response(self, request, response, spider):
        print("Download Middleware: Processed response")
        return response

    def process_exception(self, request, exception, spider):
        print(f"Download Middleware: Exception occurred - {exception}")
        return None

    def spider_opened(self, spider):
        spider.logger.info('Spider opened: %s' % spider.name)

# 蜘蛛中间件
class CustomSpiderMiddleware:
    @classmethod
    def from_crawler(cls, crawler):
        s = cls()
        crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
        return s

    def process_spider_input(self, response, spider):
        print("Spider Middleware: Processed spider input")
        return None

    def process_spider_output(self, response, result, spider):
        print("Spider Middleware: Processed spider output")
        for item in result:
            yield item

    def process_spider_exception(self, response, exception, spider):
        print(f"Spider Middleware: Exception occurred - {exception}")
        return None

    def spider_opened(self, spider):
        spider.logger.info('Spider opened: %s' % spider.name)