import os
# 1、配置异步任务
# 设置结果存储
# result_backend = 'redis://172.16.5.154:6379/1'
result_backend = 'django-db'
# 设置代理人broker
broker_url = f'redis://{os.getenv("REDIS_ADDR", default="ops-redis:6379")}/0'
# celery 的启动工作数量设置
CELERY_WORKER_CONCURRENCY = 10
# 任务预取功能，就是每个工作的进程／线程在获取任务的时候，会尽量多拿 n 个，以保证获取的通讯成本可以压缩。
WORKER_PREFETCH_MULTIPLIER = 20
# 非常重要,有些情况下可以防止死锁
CELERYD_FORCE_EXECV = True
# celery 的 worker 执行多少个任务后进行重启操作
CELERY_WORKER_MAX_TASKS_PER_CHILD = 100
# 禁用所有速度限制，如果网络资源有限，不建议开足马力。
WORKER_DISABLE_RATE_LIMITS = True
# 明确指示在启动时进行连接重试
# BROKER_CONNECTION_RETRY_ON_STARTUP = True
broker_connection_retry_on_startup = True

# task_serializer = 'json'
# result_serializer = 'json'
# accept_content = ['json']

# celery 内容等消息的格式设置
if os.name != "nt":
    # Mac and Centos
    # worker 启动命令：celery -A joyoo worker -l info
    accept_content = ['json']
    task_serializer = 'json'
    result_serializer = 'json'
    # CELERY_RESULT_SERIALIZER = 'json'
else:
    # windows
    # pip install eventlet
    # worker 启动命令：celery -A joyoo worker -l info -P eventlet
    accept_content = ['json']
    task_serializer = 'json'
    result_serializer = 'json'
    # CELERY_RESULT_SERIALIZER = 'pickle'


enable_utc = False

# 2、配置定时任务
timezone = 'Asia/Shanghai'
DJANGO_CELERY_BEAT_TZ_AWARE = False
CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'

# CELERY_BEAT_SCHEDULE = {
#     'task_name': {
#         'task': 'your_app.tasks.task_name',  # 指定任务函数路径
#         'schedule': 10,  # 指定任务的执行频率，单位为秒
#     },
# }