from celery_tasks.beat_schedule import beat_schedule
from core.settings import REDIS_URL


broker_url = f'{REDIS_URL}/7'
broker_pool_limit = 100  # Borker 连接池, 默认是10

timezone = 'Asia/Shanghai'
accept_content = ['pickle', 'json']
task_serializer = 'pickle'

result_backend = f'{REDIS_URL}/8'
result_serializer = 'json'
result_cache_max = 100  # 任务结果最大缓存数量
result_expires = 3600  # 任务过期时间

task_time_limit = 1200
task_soft_time_limit = 80

worker_redirect_stdouts_level = 'INFO'
beat_schedule = beat_schedule
imports = [
    'celery_tasks.tasks.beat_tasks',
]
# 定义任务队列
task_queues = beat_schedule.task_queues
# 定义路由
task_routes = beat_schedule.task_routes
