BOT_NAME = 'sdWeatherSpider'
# 禁用robots.txt
ROBOTSTXT_OBEY = False

# 增加超时时间
DOWNLOAD_TIMEOUT = 60
SPIDER_MODULES = ['sdWeatherSpider.spiders']
NEWSPIDER_MODULE = 'sdWeatherSpider.spiders'

# Redis配置
REDIS_HOST = 'localhost'   # Redis服务器地址
REDIS_PORT = 6379          # Redis端口
REDIS_DB = 0               # Redis数据库号
REDIS_ITEMS_KEY = 'sdweather:items'  # 存放爬取数据的Redis键
REDIS_START_URLS_KEY = 'sdweather:start_urls'  # 存放初始URL的Redis键
REDIS_START_URLS_AS_DICT = False
# 目标省份列表
PROVINCES = ['guangdong', 'shandong', 'guangxi', '', 'beijing', 'shanghai', 'hunan', 'jiangxi']

# 中间件配置
DOWNLOADER_MIDDLEWARES = {
    'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': None,
    'sdWeatherSpider.middlewares.UserAgentMiddleware': 543,
    #'sdWeatherSpider.middlewares.ProxyMiddleware': 545,
}

SPIDER_MIDDLEWARES = {
    'sdWeatherSpider.middlewares.DataCleaningMiddleware': 543,
}

# 启用Item Pipeline
ITEM_PIPELINES = {
    'sdWeatherSpider.pipelines.SdweatherspiderPipeline': 300,
}

# 启用Redis调度器
SCHEDULER = 'scrapy_redis.scheduler.Scheduler'
SCHEDULER_PERSIST = True  # 暂停后保持状态
SCHEDULER_QUEUE_CLASS = 'scrapy_redis.queue.PriorityQueue'

# 去重过滤器
DUPEFILTER_CLASS = 'scrapy_redis.dupefilter.RFPDupeFilter'
DUPEFILTER_DEBUG = True

# 其他优化配置

DOWNLOAD_DELAY = 2
CONCURRENT_REQUESTS = 8
RETRY_TIMES = 3
RETRY_HTTP_CODES = [500, 502, 503, 504, 408]
HTTPCACHE_ENABLED = False  # 禁用HTTP缓存，使用Redis替代


