# -*- coding = utf-8 -*-
#@Time: 2020/10/20 11:47
#@Author: 卜白
#@File: start.py
#@Software: PyCharm

from twisted.internet import reactor    # # 事件循环（终止条件，所有的socket都已经移除）
from scrapy.crawler import CrawlerRunner
from scrapy.utils.project import get_project_settings
from scrapy.utils.log import configure_logging
import os,sys
curPath = os.path.abspath(os.path.dirname(__file__))
rootPath = os.path.split(curPath)[0]
sys.path.append(os.path.split(rootPath)[0])
# 引入spider文件
from conch_findroom.spiders.esf_spider import EsfSpiderSpider
from conch_findroom.spiders.xf_spider import XfSpiderSpider
from conch_findroom.spiders.zf_spider import ZfSpiderSpider
import logging


logger = logging.getLogger(__name__)

settings = get_project_settings()
configure_logging(settings)
runner = CrawlerRunner(settings)


def start_spider():
    # 装载爬虫
    runner.crawl(XfSpiderSpider)
    # 如果有多个爬虫需要启动可以一直装载下去
    runner.crawl(EsfSpiderSpider)
    runner.crawl(ZfSpiderSpider)
    # runner.crawl(TestSpider4)
    # ... ...

    # 爬虫结束后停止事件循环
    d = runner.join()
    d.addBoth(lambda _: reactor.stop())

    # 启动事件循环
    reactor.run()


def main():
    start_spider()


if __name__ == '__main__':
    main()