from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ProcessPoolExecutor
from config import setting
from utils.logger import logger

scheduler = None

def scheduler_config():
    global scheduler
    jobstores = {
        # 可以配置多个存储
        'default': SQLAlchemyJobStore(url="mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8mb4".\
                                        format(
                                        setting.DB_USER,
                                                setting.DB_PASSWORD,
                                                setting.DB_HOST,
                                                setting.DB_PORT,
                                                setting.DB_DATABASE))  # SQLAlchemyJobStore指定存储链接
        }
    executors = {
        'default': {'type': 'threadpool', 'max_workers': 20},  # 最大工作线程数20
        'processpool': ProcessPoolExecutor(max_workers=5)  # 最大工作进程数为5
    }
    job_defaults = {
        'coalesce': False,  # 关闭新job的合并，当job延误或者异常原因未执行时
        'max_instances': 3  # 并发运行新job默认最大实例多少
    }
    scheduler = BackgroundScheduler()
    scheduler.configure(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone="Asia/Shanghai")
    return scheduler


# 开启定时任务
def init_scheduler():
    logger.info("初始化定时任务")
    global scheduler
    scheduler = scheduler_config()
    scheduler.start()


# 关闭定时任务
def stop_scheduler():
    logger.info("定时任务已停止运行...")
    global scheduler
    scheduler.shutdown()


