from scrapy import cmdline

# cmdline.execute("scrapy crawl books".split())

cmdline.execute("scrapy crawl boss_zhipin".split())
# cmdline.execute("scrapy crawl FujianLongyanEnvPunish".split())

# args = ""
# args += "scrapy crawl MultiplexEnvPunish -a req_settings="
# #base_url
# args +="http://www.luan.gov.cn,"
# #index_url
# args += "http://www.luan.gov.cn/opennessSearch/?keywords=%E8%A1%8C%E6%94%BF%E5%A4%84%E7%BD%9A%E5%86%B3%E5%AE%9A%E4%B9%A6&way=title&branch_id=5212bc2d682e09147c7c4aa2&from_date=&to_date=&button=%E6%9F%A5%E8%AF%A2&page=,"
# #page_position
# args += "span.currentpage,"
# #page_position_number
# args +="1,"
# #list_position
# args +="div.is-search-list>li>a,"
# #encryptkey_position
# args +="p,"
# #content_position
# args += "div.is-newscontnet,"
# #province
# args += "anhui"
# cmdline.execute(args.split())
# cmdline.execute("scrapy crawl school_directory -a mq_args=producer".split())
# cmdline.execute("scrapy crawl school_directory -a mq_args=consumer".split())
