import random
BOT_NAME='bsws_spider'
SPIDER_MODULES=['bsws_spider.spiders']
NEWSPIDER_MODULES='bsws_spider.spiders' # 通过scrapy genspider 命令生成的爬虫文件构建在哪个目录中

ROBOTSTXT_OBEY=False #不启动robots.txt 文件验证

ITEM_PIPELINES={
    'bsws_spider.pipelines.DataCleanPipeline':100,
    'bsws_spider.pipelines.DataDropPipeline': 50,
    'bsws_spider.pipelines.DataMysqlSavePipeline': 200,
}
USER__AGENT_LIST=[
    'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36'
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
    "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.63 Safari/537.36 Qiyu/2.1.1.1'
]
USER_AGENT=random.choice(USER__AGENT_LIST)
DOWNLOADER_MIDDLEWARES={
    'bsws_spider.middlewares.RandomUserAgentMiddleware':400,
}