from celery.schedules import crontab
broker_url = 'amqp://guest:guest@10.62.170.203:5672//'
result_backend = "redis://10.62.170.203:6379/1"

task_serializer = "json"
result_serializer = "json"
accept_content = ["json"]

timezone = "Asia/Shanghai"
worker_hijack_root_logger = False
result_expires = 60

include = [
    "tasks.crawl.lims_task"
]

beat_schedule = {
    # 每天凌晨两点爬取50页project数据
    "crawl_lims": {
        "task": "tasks.crawl.lims_task.crawl_lims",
        "schedule": crontab(hour='2', minute='0', day_of_week='mon-sat'),
        "args": ("50", "false"),
    },
    # 每个月1号， 16号全量爬取一次数据库
    "crawl_all_lims": {
        "task": "tasks.crawl.lims_task.crawl_lims",
        "schedule": crontab(hour='2', minute='0', day_of_month='1, 16'),
        "args": ("50", "true"),
    },
}

# python -m celery -A tasks worker -l info -P eventlet
# from tasks.crawl.lims_task import crawl_lims 