import os
from config import config

ROBOTSTXT_OBEY = False
TELNETCONSOLE_ENABLED = False

LOG_LEVEL = config.LOG_LEVEL
LOG_FILE = config.LOG_FILE
LOG_STDOUT = False
SCHEDULER_FLUSH_ON_START = config.SCHEDULER_FLUSH_ON_START

# redis配置
REDIS_STARTUP_NODES = [
    {"host": config.REDIS_HOST, "port": str(config.REDIS_PORT), 'password': config.REDIS_PASSWORD,
     'db': config.REDIS_DB, 'encoding': config.REDIS_ENCODING}
]

SCHEDULER = "crawler.scrapy_redis_custom.scheduler.Scheduler"
DUPEFILTER_CLASS = "crawler.scrapy_redis_custom.dupefilter.RFPDupeFilter"
SCHEDULER_PERSIST = True
# REDIS_CLUSTER_PASSWORD = config.REDIS_PASSWORD
# REDIS_CLUSTER_DB = config.REDIS_DB

# Download Config
CONCURRENT_REQUESTS = 8
DOWNLOAD_TIMEOUT = 30
DOWNLOAD_FAIL_ON_DATALOSS = False
DOWNLOADER_MIDDLEWARES = {
    # 请求头
    'crawler.middlewares.user_agent.UserAgentMiddleware': 100,

    # 请求代理
    'crawler.middlewares.proxy.ProxyMiddleware': 200,

    # 链路监控
    # 'crawler.middlewares.monitor.MonitorDownloaderMiddleware': 900,
}

# RETRY
RETRY_TIMES = 3
RETRY_HTTP_CODES = [500, 502, 503, 504, 400, 403, 407, 408, 418, 429, 414, 409]

# idle日志打印间隔时长
CONTAINER_IDLE_INTERVAL = 300.0

# Item Pipelines
ITEM_PIPELINES = {
    # 'crawler.pipelines.AreaEconomiesPipeline': 100,
    # 'crawler.pipelines.ReplyRocketMQPipeline': 200,
}

# Extensions
EXTENSIONS = {
    'crawler.monitor.extensions.ContainerIdle': 20,
}

# Download Handlers
DOWNLOAD_HANDLERS = {
    # 'http': 'crawler.middlewares.ja3.MyHTTPDownloadHandler',
    # 'https': 'crawler.middlewares.ja3.MyHTTPDownloadHandler',
}

# Spider Middlewares
SPIDER_MIDDLEWARES = {
    # 错误状态码捕获
    # 'crawler.middlewares.http_error.HttpErrorCatchMiddleware': 400,
}

# PROJECT_TYPE（
PROJECT_TYPE = os.environ['victor_spider_project_type']
