import redis

# For standalone use.
DUPEFILTER_KEY = 'scrapy:dupefilter:%(timestamp)s'

PIPELINE_KEY = 'scrapy:%(spider)s:items'

STATS_KEY = 'scrapy:%(spider)s:stats'

REDIS_CLS = redis.StrictRedis
REDIS_ENCODING = 'utf-8'
# Sane connection defaults.
REDIS_PARAMS = {
    'socket_timeout': 30,
    'socket_connect_timeout': 30,
    'retry_on_timeout': True,
    'encoding': REDIS_ENCODING,
}
REDIS_CONCURRENT_REQUESTS = 16

SCHEDULER_QUEUE_KEY = 'scrapy:%(spider)s:requests'
SCHEDULER_QUEUE_CLASS = 'components.component.scrapy_redis_custom.queue.PriorityQueue'
SCHEDULER_DUPEFILTER_KEY = 'scrapy:%(spider)s:dupefilter'
SCHEDULER_DUPEFILTER_CLASS = 'components.component.scrapy_redis_custom.dupefilter.RFPDupeFilter'
SCHEDULER_PERSIST = False
START_URLS_KEY = 'scrapy:%(name)s:start_urls'
START_URLS_AS_SET = False
START_URLS_AS_ZSET = False
MAX_IDLE_TIME = 0

# 请求队列的生命周期 TTL 不配置默认为86400*7,即7天
SCHEDULER_QUEUE_KEY_EXPIRE_TIME = 86400 * 7
