# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html

import random
from scrapy import signals

# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter


class RandomUserAgentMiddleware:
    def __init__(self, user_agent):
        self.user_agents = user_agent

    @classmethod
    def from_crawler(cls, crawler):
        # 从settings中获取user-agent列表
        user_agents = crawler.settings.get('USER_AGENT_LIST')
        if not user_agents:
            raise ValueError("USER_AGENT_LIST must be set in settings")
        return cls(user_agents)

    # 设置随机请求头
    def process_request(self, request, spider):
        if self.user_agents:
            # 随机选择请求头
            user_agent = random.choice(self.user_agents)
            # 设置请求头
            request.headers.setdefault('User-Agent', user_agent)
        return None

# 设置cookies
class FixedCookieMiddleware:
    def __init__(self):
        self.fixed_cookies = {
            'll': "108302",
            'bid': 'z4by299OTsQ',
            '__utma': '30149280.2015858714.1708392712.1718354495.1718456965.11',
            '__utmz': '30149280.1718456965.11.4.utmcsr=accounts.douban.com|utmccn=(referral)|utmcmd=referral|utmcct=/',
            '__utmv': '30149280.27849',
            '_pk_id.100001.4cf6':'5acb4ef24a15ca14.1708586440.',
            '__yadk_uid': '20pdzlNh7C0y6rRdq3VtIK6UVh5pZT65',
            '__utma': '223695111.744377072.1708586441.1718354495.1718456965.8',
            '__utmz': '223695111.1718456965.8.3.utmcsr=accounts.douban.com|utmccn=(referral)|utmcmd=referral|utmcct=/',
            '_vwo_uuid_v2': 'D5F62A22825379F59F1…=ALNI_Mb2JSbvK0FCCGpQdocDEN9nJwy1yw',
            '__eoi': "ID=b7f47c1c96ed4f25:T=1713772248:RT=1714099878:S=AA-AfjYhGc-7_2AzkNqWCAvHRIGc",
            '_pk_ref.100001.4cf6': "%5B%22%22%2C%22%22%2C1718456966%2C%22https%3A%2F%2Faccounts.douban.com%2F%22%5D",
            '_ga_RXNMP372GL': 'GS1.1.1716539227.3.1.1716539529.60.0.0',
            '_ga': 'GA1.1.2063939202.1716533523',
            'dbcl2': '278496185:EC7cBhPQ8oY',
            'ck': 'Lv4Z',
            '__utmb': '30149280.0.10.1718456965',
            '__utmc': '30149280',
            '__utmb': '223695111.0.10.1718456965',
            '__utmc': '223695111',
            '_pk_ses.100001.4cf6': '1',
            'push_noty_num': '0',
            'push_doumail_num': '0'
        }

    # @classmethod
    # def from_crawler(cls, crawler):
    #     return cls

    def process_request(self, request, spider):
        cookie_str = ';'.join(f"{key}={value}" for key, value in self.fixed_cookies.items())
        request.headers.setdefault(b'Cookie', cookie_str.encode('utf-8'))


# 设置代理IP
class ProxyMiddleware:
    def __init__(self, proxy_url):
        self.proxy_url = proxy_url

    @classmethod
    def from_crawler(cls, crawler):
        # 从settings中获取user-agent列表
        proxy_url = crawler.settings.get('Proxy_LIST')
        if not proxy_url:
            raise ValueError("Proxy_LIST must be set in settings")
        return cls(proxy_url)

    # 设置随机代理IP
    def process_request(self, request, spider):
        if self.proxy_url:
            # 随机选择请求头
            proxy_ip = random.choice(self.proxy_url)
            # 设置请求头
            request.meta['proxy'] = proxy_ip
        return None
