
broker_url = 'redis://:phWOgP05ymg01GaR@electricity-service.redis.hotgrid.cn:6379/1'
result_backend = 'redis://:phWOgP05ymg01GaR@electricity-service.redis.hotgrid.cn:6379/1'

# beat_schedule = {
#     'crontab_year_task': {
#         'task': 'scheduler.tasks.crontab_year_task',
#         'schedule': crontab(hour='5,14', minute=15)
#     }
# }

imports = ('scheduler.tasks',)

task_routes = {'scheduler.tasks.crontab_*': {"queue": "celery_report_crontab"}}

timezone = 'Asia/Shanghai'

# task_serializer: 'json'
# accept_content: ['json']  # Ignore other content
# result_serializer: 'json'

worker_hijack_root_logger = False

result_expires = 60 * 60

task_serializer = 'msgpack'
result_serializer = 'msgpack'
accept_content = ["msgpack"]

# beat_scheduler = 'celery_sqlalchemy_scheduler.schedulers:DatabaseScheduler'
# beat_sync_every = 0
# The maximum number of seconds beat can sleep between checking the schedule.
# default: 0
# beat_max_loop_interval = 10
# 非celery和beat的配置，配置beat_dburi数据库路径
# beat_dburi = 'sqlite:///schedule.db'

# OR
beat_dburi = 'mysql+mysqlconnector://electricity_data:EF2zUl1GHss3yqay@192.168.195.202:3317/electricity_data'
# 配置时区
timezone = 'Asia/Shanghai'

# 默认每个worker跑完10个任务后，自我销毁程序重建来释放内存
# 防止内存泄漏
# worker_max_tasks_per_child = 20
beat_scheduler = 'scheduler.scheduler:CustomScheduler'
beat_max_loop_interval = 10
beat_dburi = beat_dburi
worker_max_tasks_per_child = 20