from scrapy.cmdline import execute


# 多个爬虫运行的方式
def multi_crawl():
  from scrapy.crawler import CrawlerProcess
  from scrapy.utils.project import get_project_settings
  process = CrawlerProcess(get_project_settings())
  # myspd1是爬虫名
  process.crawl('myspd1')
  process.crawl('myspd2')
  process.crawl('myspd3')
  process.start()


if __name__ == '__main__':
  execute('scrapy crawl guazi'.split())

