# -*- coding = utf-8 -*-
#@Time: 2021/2/16 15:05
#@Author: 卜白
#@File: start_spider.py
#@Software: PyCharm

from twisted.internet import reactor    # # 事件循环（终止条件，所有的socket都已经移除）
from scrapy.crawler import CrawlerRunner
from scrapy.utils.project import get_project_settings
from scrapy.utils.log import configure_logging
# import os,sys
# curPath = os.path.abspath(os.path.dirname(__file__))
# rootPath = os.path.split(curPath)[0]
# sys.path.append(os.path.split(rootPath)[0])
# 引入spider文件
from drug_data.spiders.conedos import ConedosSpider
from drug_data.spiders.healthyk import HealthykSpider
from drug_data.spiders.onemedicine import OnemedicineSpider

import logging


logger = logging.getLogger(__name__)

settings = get_project_settings()
configure_logging(settings)
runner = CrawlerRunner(settings)


def start_spider():
    # 装载爬虫
    runner.crawl(ConedosSpider)
    # 如果有多个爬虫需要启动可以一直装载下去
    runner.crawl(HealthykSpider)
    runner.crawl(OnemedicineSpider)
    # runner.crawl(TestSpider4)
    # ... ...

    # 爬虫结束后停止事件循环
    d = runner.join()
    d.addBoth(lambda _: reactor.stop())

    # 启动事件循环
    reactor.run()

if __name__ == '__main__':
    start_spider()