# -*- coding: utf-8 -*-

# Scrapy settings for jianshuBloomfilter project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
#     http://doc.scrapy.org/en/latest/topics/settings.html
#     http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#     http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html

BOT_NAME = 'jianshuBloomfilter'

SPIDER_MODULES = ['jianshuBloomfilter.spiders']
NEWSPIDER_MODULE = 'jianshuBloomfilter.spiders'


SCHEDULER = 'jianshuBloomfilter.scrapy_redis.scheduler.Scheduler'
SCHEDULER_PERSIST = True
SCHEDULER_QUEUE_CLASS = 'jianshuBloomfilter.scrapy_redis.queue.SpiderPriorityQueue'
# SCHEDULER_QUEUE_CLASS = 'jianshuBloomfilter.scrapy_redis.queue.SpiderSimpleQueue'

# 种子队列的信息
REDIE_URL = None
REDIS_HOST = '*.*.*.*' #redis ip
REDIS_PORT = 6379  #端口
REDIS_PASS = '密码' 

# 去重队列的信息
FILTER_URL = None
FILTER_HOST = '*.*.*.*' #redis ip
FILTER_PORT = 6379 #端口
FILTER_DB = 0
FILTER_PASS = '密码' 
# REDIS_QUEUE_NAME = 'OneName'   # 如果不设置或者设置为None，则使用默认的，每个spider使用不同的去重队列和种子队列。如果设置了，则不同spider共用去重队列和种子队列


# LOG_LEVEL = 'INFO'




DOWNLOADER_MIDDLEWARES = {
   'jianshuBloomfilter.middlewares.UserAgentmiddleware': 543,
}



ITEM_PIPELINES = {
    'jianshuBloomfilter.pipelines.MongoPipeline': 800,
}

#mongoDb
MONGO_URI = '*.*.*.*:27017' #mongodb的ip和端口
MONGO_DATABASE = 'test' #mongodb的数据库
MONGO_USER = 'admin' #账号
MONGO_PASSWORD = 'admin'#账号