#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
# 导入 os 模块，用于与操作系统进行交互，例如获取环境变量

from celery.schedules import crontab
# 从 celery.schedules 模块导入 crontab，用于定义定时任务的调度周期

from myapp.config import REDIS_HOST, REDIS_PASSWORD, REDIS_PORT, REDIS_SENTINEL_CONF, REDIS_TYPE
# 从 myapp.config 模块导入 Redis 相关的配置信息，包括主机、密码、端口、Sentinel 配置和类型


# 根据 Redis 类型配置 Celery Redis 连接
# Configure Celery Redis connection based on Redis type
if REDIS_TYPE == 'sentinel':
    # 如果 Redis 类型是 sentinel (哨兵模式)
    # If Redis type is sentinel mode
    CELERY_REDIS_SENTINEL_CONF = REDIS_SENTINEL_CONF['connection_conf'].copy()
    # 复制 Redis Sentinel 的连接配置
    # Copy Redis Sentinel connection configuration
    CELERY_REDIS_SENTINEL_CONF.pop('password')
    # 移除密码，避免敏感信息泄露
    # Remove password to prevent sensitive information leakage
    CELERY_REDIS_SENTINEL_CONF.pop('db')
    # 移除数据库信息
    # Remove database information
    CELERY_REDIS = f'sentinel://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/1'
    # 构建 Celery Redis 连接字符串，使用 sentinel 协议
    # Construct Celery Redis connection string using sentinel protocol
    BROKER_TRANSPORT_OPTIONS = {
        'master_name': REDIS_SENTINEL_CONF['master_group_name'],
        # 设置 Redis Sentinel 的主节点名称
        # Set Redis Sentinel master name
        'sentinel_kwargs': CELERY_REDIS_SENTINEL_CONF,
        # 设置 Sentinel 相关的关键字参数
        # Set Sentinel related keyword arguments
    }
else:
    # 如果 Redis 类型不是 sentinel (普通模式)
    # If Redis type is not sentinel mode (standalone mode)
    CELERY_REDIS = (
        f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{str(REDIS_PORT)}/0'
        # 如果存在 Redis 密码，则构建带密码的连接字符串
        # If Redis password exists, construct connection string with password
        if REDIS_PASSWORD
        else f'redis://{REDIS_HOST}:{str(REDIS_PORT)}/0'
        # 否则构建不带密码的连接字符串
        # Otherwise, construct connection string without password
    )
    BROKER_TRANSPORT_OPTIONS = {}
    # 普通模式下传输选项为空
    # Transport options are empty in standalone mode

DATASET_FORCE_DELETE = os.getenv('DATASET_FORCE_DELETE', 'false')
# 从环境变量获取 DATASET_FORCE_DELETE 的值，默认为 'false'
# Get the value of DATASET_FORCE_DELETE from environment variables, default to 'false'
DATASET_FORCE_DELETE = DATASET_FORCE_DELETE.lower() in ['true', '1', 't', 'y', 'yes']
# 将 DATASET_FORCE_DELETE 转换为布尔值，判断是否强制删除数据集
# Convert DATASET_FORCE_DELETE to a boolean value to determine if datasets should be force deleted
MODEL_FORCE_DELETE = os.getenv('MODEL_FORCE_DELETE', 'false')
# 从环境变量获取 MODEL_FORCE_DELETE 的值，默认为 'false'
# Get the value of MODEL_FORCE_DELETE from environment variables, default to 'false'
MODEL_FORCE_DELETE = MODEL_FORCE_DELETE.lower() in ['true', '1', 't', 'y', 'yes']
# 将 MODEL_FORCE_DELETE 转换为布尔值，判断是否强制删除模型
# Convert MODEL_FORCE_DELETE to a boolean value to determine if models should be force deleted


class CeleryConfig:
    # Celery 配置类
    # Celery configuration class

    # 任务队列的 Broker URL
    # Broker URL for the task queue
    BROKER_URL = CELERY_REDIS
    BROKER_TRANSPORT_OPTIONS = BROKER_TRANSPORT_OPTIONS
    # Broker 传输选项
    # Broker transport options
    # Celery 任务的定义模块
    # Definition module for Celery tasks
    CELERY_IMPORTS = ('myapp.tasks',)
    # 任务结果的存储后端
    # Backend for storing task results
    CELERY_RESULT_BACKEND = CELERY_REDIS
    CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = BROKER_TRANSPORT_OPTIONS
    # 结果存储后端的传输选项
    # Transport options for the result backend
    CELERYD_LOG_LEVEL = 'DEBUG'
    # Celery worker 的日志级别设置为 DEBUG
    # Set Celery worker log level to DEBUG
    # Celery worker 每次从 Redis 获取任务的数量
    # Number of tasks Celery worker fetches from Redis at a time
    CELERYD_PREFETCH_MULTIPLIER = 10
    # 每个 worker 执行多少次任务后会死掉，建议数量大一些
    # Number of tasks a worker executes before dying, a larger number is recommended
    # CELERYD_MAX_TASKS_PER_CHILD = 200
    # Celery 任务执行结果的超时时间
    # Timeout for Celery task results
    # CELERY_TASK_RESULT_EXPIRES = 1200
    # 单个任务的运行时间限制，否则会被杀死
    # Time limit for a single task to run, otherwise it will be killed
    # CELERYD_TASK_TIME_LIMIT = 60
    # 任务发送完成后是否需要确认，对性能会稍有影响
    # Whether acknowledgment is required after task submission, which may slightly affect performance
    CELERY_ACKS_LATE = True
    # 任务发送时是否发送任务已发送事件
    # Whether to send a task-sent event when a task is sent
    CELERY_SEND_TASK_SENT_EVENT = True
    # Celery worker 的并发数，默认是服务器的内核数目，也是命令行 -c 参数指定的数目
    # Concurrency of Celery workers, defaults to the number of CPU cores on the server, also specified by the -c command-line argument
    # CELERYD_CONCURRENCY = 4
    CELERY_TIMEZONE = 'Asia/Shanghai'
    # 设置 Celery 的时区为 'Asia/Shanghai'
    # Set Celery timezone to 'Asia/Shanghai'
    CELERY_ENABLE_UTC = False
    # 禁用 UTC 时间，使用本地时间
    # Disable UTC time, use local time

    # 任务的限制，key 是 celery_task 的 name，值是限制配置
    # Task limits, where the key is the celery_task name and the value is the limit configuration
    CELERY_ANNOTATIONS = {
        # 删除历史 workflow，以及相关任务
        # Delete historical workflows and related tasks
        'task.delete_workflow': {
            # 'rate_limit': '1/h',  # 速率限制为每小时 1 次
            # Rate limit to 1 per hour
            # 'time_limit': 1200,   # 硬时间限制，不可捕获
            # Hard time limit, cannot be caught
            'soft_time_limit': 60,  # 运行时长软限制，可以捕获
            # Soft time limit for runtime, can be caught
            'ignore_result': True,  # 忽略任务结果
            # Ignore task result
        },
        # 检查运行定时 pipeline
        # Check running scheduled pipelines
        'task.run_workflow': {
            'rate_limit': '1/s',  # 速率限制为每秒 1 次
            # Rate limit to 1 per second
            # 'time_limit': 1,
            'soft_time_limit': 600,  # 软时间限制为 600 秒，只在 prefork pool 里支持
            # Soft time limit of 600 seconds, only supported in prefork pool
            'ignore_result': True,
            # Ignore task result
        },
        # 上传 workflow 信息
        # Upload workflow information
        'task.upload_workflow': {
            'rate_limit': '10/s',  # 速率限制为每秒 10 次
            # Rate limit to 10 per second
            'ignore_result': True,
            # Ignore task result
        },
    }

    # 定时任务的配置项，key 为 celery_task 的 name，值是调度配置
    # Scheduled task configurations, where the key is the celery_task name and the value is the scheduling configuration
    CelerySchedule = {
        'task_delete_workflow': {
            'task': 'task.delete_workflow',  # 定时删除旧的 workflow
            # Scheduled task to delete old workflows
            # 'schedule': 10.0,  # 每 10 秒执行一次
            # Execute every 10 seconds
            'schedule': crontab(minute='1'),
            # 每小时的第 1 分钟执行
            # Execute at the 1st minute of every hour
        },
        'task_make_timerun_config': {
            'task': 'task.make_timerun_config',  # 定时产生定时任务的 yaml 信息
            # Scheduled task to generate YAML information for scheduled tasks
            # 'schedule': 10.0,     # 10s 中执行一次
            # Execute every 10 seconds
            'schedule': crontab(minute='*/5'),
            # 每 5 分钟执行一次
            # Execute every 5 minutes
        },
        # 'task_delete_old_data': {
        #     'task': 'task.delete_old_data',  # 定时删除旧数据（废弃）
        #     # Scheduled task to delete old data (deprecated)
        #     # 'schedule': 100.0,     # 10s 中执行一次
        #     # Execute every 10 seconds
        #     'schedule': crontab(minute='1', hour='1'),
        #     # 每天凌晨 1 点 1 分执行
        #     # Execute at 1:01 AM every day
        # },
        'task_delete_notebook': {
            'task': 'task.delete_notebook',  # 定时停止 notebook
            # Scheduled task to stop notebooks
            # 'schedule': 10.0,
            'schedule': crontab(minute='1', hour='4'),
            # 每天凌晨 4 点 1 分执行
            # Execute at 4:01 AM every day
        },
        # 'task_push_workspace_size': {
        #     'task': 'task.push_workspace_size',   # 定时推送用户文件大小
        #     # Scheduled task to push user file sizes
        #     # 'schedule': 10.0,
        #     'schedule': crontab(minute='10', hour='10'),
        #     # 每天上午 10 点 10 分执行
        #     # Execute at 10:10 AM every day
        # },
        'task_check_pipeline_run': {
            'task': 'task.check_pipeline_run',  # 定时检查 pipeline 的运行时长
            # Scheduled task to check pipeline runtime duration
            'schedule': crontab(minute='10', hour='11'),
            # 每天上午 11 点 10 分执行
            # Execute at 11:10 AM every day
        },
        'task_delete_debug_docker': {
            'task': 'task.delete_debug_docker',  # 定时删除 debug 的 pod
            # Scheduled task to delete debug pods
            # 'schedule': 10.0,
            'schedule': crontab(minute='30', hour='22'),
            # 每天晚上 10 点 30 分执行
            # Execute at 10:30 PM every day
        },
        'task_watch_gpu': {
            'task': 'task.watch_gpu',  # 定时推送 gpu 的使用情况
            # Scheduled task to push GPU usage status
            # 'schedule': 10.0,
            'schedule': crontab(minute='10', hour='8-23/2'),
            # 每天 8 点到 23 点之间，每隔 2 小时在第 10 分钟执行
            # Execute at the 10th minute every 2 hours between 8 AM and 11 PM daily
        },
        'task_adjust_node_resource': {
            'task': 'task.adjust_node_resource',  # 定时在多项目组间进行资源均衡
            # Scheduled task to balance resources among multiple project groups
            # 'schedule': 10.0,
            'schedule': crontab(minute='*/10'),
            # 每 10 分钟执行一次
            # Execute every 10 minutes
        },
        'task_update_aihub': {
            'task': 'task.update_aihub',  # 更新 aihub
            # Scheduled task to update aihub
            # 'schedule': 10.0,
            'schedule': crontab(minute='30', hour='4'),
            # 每天凌晨 4 点 30 分执行
            # Execute at 4:30 AM every day
        },
        # 'task_sync_job_status': {  # 更新训练任务状态
        #     'task': 'task.sync_job_status',
        #     'schedule': crontab(minute='*/1')
        # },
        # 'task_sync_model_export': {  # 更新推理服务状态
        #     'task': 'task.sync_model_export',
        #     'schedule': crontab(minute='*/1'),
        # },
        'task_service_auto_stop': {  # 推理服务自动停止
            'task': 'task.service_auto_stop',
            'schedule': crontab(minute='*/1'),
            # 每 1 分钟执行一次
            # Execute every 1 minute
        },
        'task_notebook_auto_stop': {  # 推理服务自动停止
            'task': 'task.notebook_auto_stop',
            'schedule': crontab(minute='*/1'),
            # 每 1 分钟执行一次
            # Execute every 1 minute
        },
        'task_dataset_flow_back': {  # 数据回流
            'task': 'task.dataset_flow_back',
            'schedule': crontab(minute='5', hour='0'),
            # 每天凌晨 0 点 5 分执行
            # Execute at 0:05 AM every day
        },
        'task_sync_notebook_status': {  # 更新 jupyter 状态
            'task': 'task.sync_notebook_status',
            'schedule': crontab(minute='*/1'),
            # 每 1 分钟执行一次
            # Execute every 1 minute
        },
        'task_docker_commit': {  # 更新 jupyter 状态
            'task': 'task.docker_commit',
            'schedule': crontab(minute='*/1'),
            # 每 1 分钟执行一次
            # Execute every 1 minute
        },
        'task_count_user_space': {  # 统计用户空间大小
            'task': 'task.count_user_space',
            'schedule': crontab(minute='*/15'),
            # 每 15 分钟执行一次
            # Execute every 15 minutes
        },
        'task_init_user_storage_size': {  # 初始化用户存储额度
            'task': 'task.init_user_storage_size',
            'schedule': crontab(minute='*/15'),
            # 每 15 分钟执行一次
            # Execute every 15 minutes
        },
        'task_get_out_annotation_info': {  # 获取 37 度标注平台数据集信息
            'task': 'task.get_out_annotation_info',
            'schedule': crontab(minute='*/10'),
            # 每 10 分钟执行一次
            # Execute every 10 minutes
        },
        'task_download_out_annotation_file': {  # 下载 37 度标注平台数据集文件
            'task': 'task.download_out_annotation_file',
            'schedule': crontab(minute='*/5'),
            # 每 5 分钟执行一次
            # Execute every 5 minutes
        },
    }
    # 如果 GPU 类型是 Ascend
    # If GPU type is Ascend
    if os.getenv('GPU_TYPE') == 'Ascend':
        CelerySchedule['task_delete_ring_cfgmap'] = {  # 定时删除 rank_table
            # Scheduled task to delete rank_table
            'task': 'task.delete_ring_cfgmap',
            'schedule': crontab(minute='1', hour='0'),
            # 每天凌晨 0 点 1 分执行
            # Execute at 0:01 AM every day
        }
    # 如果存储介质不是 OBS
    # If storage media is not OBS
    if os.getenv('STORAGE_MEDIA') != 'OBS':
        CelerySchedule['task_clear_failed_service'] = {  # 定时删除部署失败的推理服务的 k8s 资源
            # Scheduled task to delete k8s resources of failed inference services
            'task': 'task.clear_failed_service',
            'schedule': crontab(minute='*/5'),
            # 每 5 分钟执行一次
            # Execute every 5 minutes
        }

    # DATASET_FORCE_DELETE 为 True，才会强制删除
    # Force deletion only if DATASET_FORCE_DELETE is True
    if os.getenv('STORAGE_MEDIA') != 'OBS' and DATASET_FORCE_DELETE is True:
        CelerySchedule['task_clear_dataset_tmp_files'] = {  # 定时删除临时文件
            # Scheduled task to delete temporary files
            'task': 'task.clear_dataset_tmp_files',
            'schedule': crontab(minute=0, hour=4),
            # 每天凌晨 4 点 0 分执行
            # Execute at 4:00 AM every day
        }
    # MODEL_FORCE_DELETE 为 True，才会强制删除
    # Force deletion only if MODEL_FORCE_DELETE is True
    if os.getenv('STORAGE_MEDIA') != 'OBS' and MODEL_FORCE_DELETE is True:
        CelerySchedule['task_clear_train_model_files'] = {  # 定时删除训练出来的模型权重文件
            # Scheduled task to delete trained model weight files
            'task': 'task.clear_train_model_files',
            'schedule': crontab(minute=0, hour=5),
            # 每天凌晨 5 点 0 分执行
            # Execute at 5:00 AM every day
        }

    ObsCelerySchedule = CelerySchedule.copy()
    # 复制 CelerySchedule 配置，用于 OBS 存储介质的特殊调度
    # Copy CelerySchedule configuration for OBS storage media specific scheduling
    ObsCelerySchedule['task_sync_modelarts_model'] = {
        'task': 'task.sync_modelarts_model',
        'schedule': crontab(minute='*/1'),
        # 每 1 分钟执行一次
        # Execute every 1 minute
    }
    ObsCelerySchedule['task_recycle_modelarts_service'] = {
        'task': 'task.recycle_modelarts_service',
        'schedule': crontab(minute=0, hour='*/1'),
        # 每小时的第 0 分钟执行
        # Execute at the 0th minute of every hour
    }
    ObsCelerySchedule['task_compensate_modelarts_service'] = {
        'task': 'task.compensate_modelarts_service',
        'schedule': crontab(minute=0, hour='*/4'),
        # 每 4 小时执行一次，在第 0 分钟
        # Execute every 4 hours at the 0th minute
    }
    CELERYBEAT_SCHEDULE = (
        ObsCelerySchedule if os.getenv('STORAGE_MEDIA') == 'OBS' else CelerySchedule
        # 根据 STORAGE_MEDIA 环境变量的值，选择使用 ObsCelerySchedule 或 CelerySchedule
        # Choose between ObsCelerySchedule and CelerySchedule based on the STORAGE_MEDIA environment variable
    )