# -*- coding: utf-8 -*-
"""
@author:xieyabin
@file: crawler_process.py
@time: 2019/01/28
"""
import scrapy
from scrapy.crawler import CrawlerProcess


# from KeywordSpider.spiders.baidukeyword import BaidukeywordSpider



# # 创建一个CrawlerProcess对象
# process = CrawlerProcess() # 括号中可以添加参数
#
# process.crawl(BaidukeywordSpider)
# process.start()
# print("$"*50)

# from twisted.internet import reactor
# import scrapy
# from scrapy.crawler import CrawlerRunner
# from scrapy.utils.log import configure_logging
#
# # 必须执行下面的，否则命令行中没有数据输出，555，
# configure_logging({'LOG_FORMAT': '%(levelname)s: %(message)s'})
#
# # 创建一个CrawlerRunner对象
# runner = CrawlerRunner()
#
# d = runner.crawl(BaidukeywordSpider) # 返回一个Twisted中的Deferred对象
# d.addBoth(lambda _: reactor.stop()) # addBoth参考Derrerred的文档
# reactor.run()



from scrapy.cmdline import execute

spiders = [
    'scrapy crawl sogoukeyword',
]

if __name__ == '__main__':
    for i in spiders:
        print(i.split())
        execute(i.split())
    print("*"* 20)

