#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pro_job.spiders.job51 import Job51Spider
from pro_job.spiders.lagou import LagouSpider
from pro_job.spiders.zhaopin import ZhaopinSpider
from pro_job.spiders.zhipin import ZhipinSpider

__author__ = 'Txc001'


from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings

# 获取settings.py模块的设置
settings = get_project_settings()
process = CrawlerProcess(settings=settings)

# 可以添加多个spider
# process.crawl(Job51Spider)
process.crawl(ZhaopinSpider)
# process.crawl(ZhipinSpider)
# process.crawl(LagouSpider)

# 启动爬虫，会阻塞，直到爬取完成
process.start()
