#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time    : 2021/7/29 17:09
# @Author  : Samge
from scrapy.settings import Settings
from itkz.spider_ext import settings


# 默认的配置，使用代理
DEFAULT_CUSTOM_SETTINGS = {
        'ITEM_PIPELINES': {},
        'DOWNLOADER_MIDDLEWARES': {
            'itkz.spider_ext.middlewares.m_agent.RandomUserAgentMiddleware': 542,
            'itkz.spider_ext.middlewares.m_proxy.ProxyMiddleware': 544,

            'itkz.spider_ext.middlewares.m_splash.RandomUserAgentMiddleware': 700,
            'itkz.spider_ext.middlewares.m_splash.SplashProxyMiddleware': 701,
            'scrapy_splash.SplashCookiesMiddleware': 723,
            'scrapy_splash.SplashMiddleware': 725,
            'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': 810,
        },

        'SPIDER_MIDDLEWARES': {'scrapy_splash.SplashDeduplicateArgsMiddleware': 100},
        'DUPEFILTER_CLASS': 'scrapy_splash.SplashAwareDupeFilter',
        'HTTPCACHE_STORAGE': 'scrapy_splash.SplashAwareFSCacheStorage',

        'SLEEP_TIME': 1,
        'DOWNLOAD_DELAY': 0.1,
        'DOWNLOAD_TIMEOUT': 12,
        'IS_USE_PROXY': True,
        'IS_USE_AGENT': True,
    }


def get_custom_settings(custom_settings=None):
    """
    获取爬虫自定义配置
    :param custom_settings: 子类中特殊的配置项
    :return:
    """
    return merge_settings(custom_settings, DEFAULT_CUSTOM_SETTINGS) if custom_settings else DEFAULT_CUSTOM_SETTINGS


def merge_settings(custom_settings, default_settings=settings.__dict__):
    """
    整合默认的 settings 与 子类中自定义的 custom_settings
    :param custom_settings: 整合的字典1，优先从这边取值
    :param default_settings: 整合的字典2，默认是爬虫的settings文件对应的字典
    :return:
    """
    for key in list(default_settings.keys()):
        if str(key).startswith('__'):
            del default_settings[key]
    return merge_dict(custom_settings, default_settings)


def merge_crawler_settings(custom_settings, crawler_settings=None):
    """
    整合 crawler_settings 与 子类中自定义的 custom_settings
    :param custom_settings: 整合的字典1，优先从这边取值
    :param crawler_settings:
    :return:
    """
    new_crawler_settings = Settings()
    for key in list(custom_settings.keys()):
        value = custom_settings.get(key) if custom_settings.get(key) is not None else crawler_settings.get(key)
        new_crawler_settings.set(key, value)
    new_crawler_settings.frozen = crawler_settings.frozen
    return new_crawler_settings


def merge_dict(custom_settings, default_settings=None):
    """
    合并字典
    :param custom_settings: 整合的字典1，优先从这边取值
    :param default_settings: 整合的字典2，默认是爬虫的settings文件对应的字典
    :return:
    """
    default_settings = default_settings or {}
    if custom_settings:
        all_keys = list(default_settings.keys()) + list(custom_settings.keys())
        for key in all_keys:
            value = custom_settings.get(key) if custom_settings.get(key) is not None else default_settings.get(key)
            default_settings[key] = value
    return default_settings