# !/usr/bin/python3
# -*- coding: utf-8 -*-
# @Time    : 2021/12/15 14:56
# @Author  : zuodengbo
# @Email   : zuodb@amoydx.com
# @File    : celery_config
# @Software: PyCharm

# 任务存储
CELERY_BROKER_URL = r"redis://127.0.0.1:6379/14"
# 结果存储
CELERY_RESULT_BACKEND = r"redis://127.0.0.1:6379/15"

# 使用rebbitmq来做消息中间件
# 默认是使用guest账号来登录的
# broker_url = "amqp://guest:guest@localhost:5672//"
# broker_url = "amqp://admin:admin@192.168.10.1:5672//"
# result_serializer = "json"
# 过期时间
# event_queue_ttl = 5
# celery不回复结果
CELERY_TASK_IGNORE_RESULT = True

# 错误 DatabaseWrapper objects created in a thread can only be used in that same thread
CELERY_TASK_ALWAYS_EAGER = True

# import djcelery
#
# djcelery.setup_loader()
# BROKER_URL = "redis://127.0.0.1:6379/0"
# broker_pool_limit=None
# BROKER_POOL_LIMIT=None
# CELERY_RESULT_BACKEND = "amqp"  # 官网优化的地方也推荐使用c的librabbitmq
CELERY_TASK_RESULT_EXPIRES = 1200  # celery任务执行结果的超时时间，我的任务都不需要返回结果,只需要正确执行就行
CELERYD_CONCURRENCY = 50  # celery worker的并发数 也是命令行-c指定的数目,事实上实践发现并不是worker也多越好,保证任务不堆积,加上一定新增任务的预留就可以
CELERYD_PREFETCH_MULTIPLIER = 4  # celery worker 每次去rabbitmq取任务的数量，我这里预取了4个慢慢执行,因为任务有长有短没有预取太多
CELERYD_MAX_TASKS_PER_CHILD = 40  # 每个worker执行了多少任务就会死掉，我建议数量可以大一些，比如200
CELERY_IMPORTS = ("base.tasks",)
CELERY_TIMEZONE = "Asia/Shanghai"
# CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
CELERY_ENABLE_UTC = False
# DJANGO_CELERY_BEAT_TZ_AWARE = False
CELERYBEAT_SCHEDULE = {  # 定时器策略
    # 定时任务一：　每隔30s运行一次
    # u"测试定时器1": {
    #     "task": "base.tasks.hello_world",
    #     # "schedule": crontab(minute="*/2"),  # or "schedule":   timedelta(seconds=3),
    #     "schedule": timedelta(seconds=30),
    #     "args": (),
    # },
}
