import random

from scrapy import signals
from scrapy.http import HtmlResponse
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.chrome.service import Service
import time
from .settings import USER_AGENT_LIST

class SeleniumMiddleware:
    def __init__(self, driver_name, driver_executable_path, driver_arguments):
        self.driver_name = driver_name
        self.driver_executable_path = driver_executable_path
        self.driver_arguments = driver_arguments

        # 初始化浏览器选项
        options = webdriver.ChromeOptions()
        for arg in driver_arguments:
            options.add_argument(arg)
        options.add_argument(f'user-agent={random.choice(USER_AGENT_LIST)}')

        # 创建浏览器实例
        self.driver = webdriver.Chrome(
            service=Service(driver_executable_path),
            options=options
        )

    @classmethod
    def from_crawler(cls, crawler):
        middleware = cls(
            driver_name=crawler.settings.get('SELENIUM_DRIVER_NAME'),
            driver_executable_path=crawler.settings.get('SELENIUM_DRIVER_EXECUTABLE_PATH'),
            driver_arguments=crawler.settings.get('SELENIUM_DRIVER_ARGUMENTS')
        )
        crawler.signals.connect(middleware.spider_closed, signal=signals.spider_closed)
        return middleware

    def process_request(self, request, spider):
        # 使用 Selenium 处理请求
        self.driver.get(request.url)

        # 获取页面源码并返回给 Scrapy
        body = self.driver.page_source
        response = HtmlResponse(self.driver.current_url,
                                body=body.encode('utf-8'),
                                encoding='utf-8',
                                request=request)
        response.meta['driver'] = self.driver
        return response

    def spider_closed(self):
        # 爬虫关闭时退出浏览器
        self.driver.quit()


class RandomUserAgentMiddleware:
    """随机用户代理中间件"""

    def process_request(self, request, spider):
        request.headers['User-Agent'] = random.choice(USER_AGENT_LIST)