"""
Celery配置文件
定义Redis连接、序列化、时区等设置
"""
import os
from datetime import timedelta

# Redis连接配置
REDIS_HOST = os.getenv('REDIS_HOST', 'redis')
REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
REDIS_DB_BROKER = int(os.getenv('REDIS_DB_BROKER', 0))
REDIS_DB_BACKEND = int(os.getenv('REDIS_DB_BACKEND', 1))

# Broker和Backend配置
broker_url = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB_BROKER}'
result_backend = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB_BACKEND}'

# 序列化配置
task_serializer = 'json'
result_serializer = 'json'
accept_content = ['json']

# 时区配置
timezone = 'Asia/Shanghai'
enable_utc = True

# Worker配置
worker_prefetch_multiplier = 4
worker_max_tasks_per_child = 100
worker_disable_rate_limits = False
worker_send_task_events = True

# 任务配置
task_track_started = True
task_time_limit = 3600  # 任务硬超时：1小时
task_soft_time_limit = 3000  # 任务软超时：50分钟
task_acks_late = True
task_reject_on_worker_lost = True

# 重试配置
task_default_retry_delay = 60  # 默认重试延迟：60秒
task_max_retries = 3  # 默认最大重试次数

# 结果后端配置
result_expires = 3600  # 结果过期时间：1小时
result_persistent = True

# Beat调度器配置
beat_schedule = {
    # 健康检查任务
    'health-check-every-5-minutes': {
        'task': 'src.tasks.health.health_check',
        'schedule': timedelta(minutes=5),
        'options': {'queue': 'health_queue', 'priority': 8}
    },
    # 数据新鲜度检查
    'freshness-check-every-30-minutes': {
        'task': 'src.tasks.health.check_data_freshness',
        'schedule': timedelta(minutes=30),
        'options': {'queue': 'health_queue', 'priority': 7}
    },
}

# 并发控制
worker_concurrency = int(os.getenv('CELERY_CONCURRENCY', 4))
worker_pool = 'prefork'  # 使用prefork池以支持优先级队列

# 任务路由
task_routes = {
    'src.tasks.crawler.*': {
        'queue': 'crawler_queue',
        'routing_key': 'crawler.tasks'
    },
    'src.tasks.scheduler.*': {
        'queue': 'scheduler_queue',
        'routing_key': 'scheduler.tasks'
    },
    'src.tasks.health.*': {
        'queue': 'health_queue',
        'routing_key': 'health.tasks'
    },
}

# 任务速率限制
task_annotations = {
    'src.tasks.crawler.crawl_weibo': {'rate_limit': '10/m'},
    'src.tasks.crawler.crawl_zhihu': {'rate_limit': '15/m'},
    'src.tasks.crawler.crawl_toutiao': {'rate_limit': '20/m'},
    'src.tasks.crawler.crawl_baidu': {'rate_limit': '20/m'},
    'src.tasks.crawler.crawl_xiaohongshu': {'rate_limit': '10/m'},
    'src.tasks.crawler.crawl_douyin': {'rate_limit': '10/m'},
    'src.tasks.crawler.crawl_twitter': {'rate_limit': '15/m'},
    'src.tasks.crawler.crawl_reddit': {'rate_limit': '20/m'},
    'src.tasks.crawler.crawl_youtube': {'rate_limit': '10/m'},
}

# 监控配置
worker_send_task_events = True
task_send_sent_event = True

# 日志配置
worker_log_format = '[%(asctime)s: %(levelname)s/%(processName)s] %(message)s'
worker_task_log_format = '[%(asctime)s: %(levelname)s/%(processName)s][%(task_name)s(%(task_id)s)] %(message)s'

# 优先级队列配置
broker_transport_options = {
    'priority_steps': list(range(11)),  # 0-10优先级
    'sep': ':',
    'queue_order_strategy': 'priority',
}