from scrapy import cmdline
import schedule
import subprocess
import logging

LOG = logging.getLogger()


# 定时任务执行
def schedule_fun():
    # 每1小时执行一次任务
    schedule.every(60).minutes.do(job)
    pass


# 启动爬虫爬取内容
def job():
    LOG.info('开始爬虫内容~~')
    # cmdline.execute("scrapy crawl seven_days_weather".split())
    subprocess.Popen('scrapy crawl weather && scrapy crawl seven_days_weather', shell=True)
    # subprocess.Popen('scrapy crawl seven_days_weather', shell=True)
    LOG.info('爬取完成~~')


if __name__ == '__main__':
    print('scrapy启动成功~~😁')
    LOG.info('scrapy启动成功~~')
    # 初始化执行一次
    job()
    schedule_fun()
    while True:
        schedule.run_pending()
