import time

from celery import Celery, shared_task
from celery import Task

broker_url_mq = 'pyamqp://vipcj:cqcoll123@192.168.31.123:5672/test_celery'
broker_url_redis = 'redis://192.168.31.36:6378/4'
result_backend = 'redis://192.168.31.36:6378/5'
# celery -A test worker --loglevel=info
app = Celery("tasks",
             broker=broker_url_redis,
             backend=result_backend)
# Redis的默认可见性超时为1小时。
app.conf.broker_transport_options = {'visibility_timeout': 3600}
# app.conf.result_backend = result_backend
app.conf.result_backend_transport_options = {
    'retry_policy': {
        'timeout': 5.0
    }
}
app.conf.task_track_started = True
# 序列化配置
app.conf.task_serializer = 'json'


@app.task(track_started=True)
@shared_task
def add(x, y):
    print("***************")
    time.sleep(1)
    return x + y

# tasks为文件名
# celery -A tasks worker --pool=eventlet --loglevel=info
