from scrapy.cmdline import execute
# 通过cmdline 运行 日志更加清晰
# execute("scrapy crawl quotes".split())

# execute("scrapy crawl quotes_next_page".split())

# execute("scrapy crawl quotes_pipline".split())


# -o 追加
# execute("scrapy.exe crawl quotes -o data.json".split())
# -O 覆盖
# execute("scrapy.exe crawl quotes -O data.json".split())

execute("scrapy.exe crawl quotes_cookies".split())




