
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings

# 导入你的爬虫类
from spiders.Wan500LiveZucaiBqcSpider import Wan500LiveZucaiBqcSpider
from spiders.Wan500LiveZucaiSfcSpider import Wan500LiveZucaiSfcSpider
from spiders.Wan500LiveZucaiTtgSpider import Wan500LiveZucaiTtgSpider
from spiders.Wan500LiveJingcaiSpider import Wan500LiveJingcaiSpider
from spiders.Wan500JingcaiSpfSpider import Wan500JingcaiSpfSpider
from spiders.Wan500JingcaiTtgSpider import Wan500JingcaiTtgSpider
from spiders.Wan500JingcaiBqcSpider import Wan500JingcaiBqcSpider
from spiders.Wan500JingcaiCrsSpider import Wan500JingcaiCrsSpider
from spiders.SportteryFootballSpider import SportteryFootballSpider
from spiders.SportteryFootballBonusSpider import SportteryFootballBonusSpider
from spiders.SportteryBasketballSpider import SportteryBasketballSpider


def run_spiders():
    settings = get_project_settings()
    process = CrawlerProcess(settings)

    # # 添加多个爬虫到进程中
    # process.crawl(Wan500LiveJingcaiSpider)      # 500wan - 竞猜
    # process.crawl(Wan500LiveZucaiSfcSpider)      # 500wan - 胜负彩
    # process.crawl(Wan500LiveZucaiBqcSpider)       # 500wan - 六场半全场
    # process.crawl(Wan500LiveZucaiTtgSpider)       # 500wan - 四场总进球
    #
    # process.crawl(Wan500JingcaiSpfSpider)  # 500wan - 竞猜-胜平负
    # process.crawl(Wan500JingcaiTtgSpider)  # 500wan - 竞猜-总进球
    # process.crawl(Wan500JingcaiBqcSpider)  # 500wan - 竞猜-半全场进球
    # process.crawl(Wan500JingcaiCrsSpider)  # 500wan - 竞猜-比分进球

    process.crawl(SportteryFootballSpider)  # 中国竞猜网 - 竞猜足球数据
    process.crawl(SportteryFootballBonusSpider)  # 中国竞猜网 - 竞猜足球开奖数据
    # process.crawl(SportteryBasketballSpider)  # 中国竞猜网 - 竞猜篮球数据

    # 启动所有爬虫
    process.start()


if __name__ == "__main__":
    run_spiders()