import tensorflow as tf
from tensorflow.keras.optimizers.schedules import LearningRateSchedule

class OneCycleLR(LearningRateSchedule):
    def __init__(self, max_lr, total_steps, pct_start=0.3, div_factor=25.0, name=None):
        """
        Args:
            max_lr: 峰值学习率 (e.g. 3e-3)
            total_steps: 总训练步数 (epochs * steps_per_epoch)
            pct_start: 学习率上升阶段占比 (默认30%)
            div_factor: 初始学习率 = max_lr/div_factor
            name:为了兼容 Keras 的命名规范
        """
        super().__init__()
        self.max_lr = max_lr
        self.total_steps = total_steps
        self.pct_start = pct_start  # 上升阶段占比
        self.div_factor = div_factor  # 初始学习率 = max_lr / div_factor
        self.name = name

    def __call__(self, step):
        step = tf.cast(step, tf.float32)
        up_steps = tf.cast(self.total_steps * self.pct_start, tf.float32)
        down_steps = tf.cast(self.total_steps - up_steps, tf.float32)

        initial_lr = self.max_lr / self.div_factor

        # 上升阶段
        lr = tf.cond(
            step < up_steps,
            lambda: initial_lr + (self.max_lr - initial_lr) * (step / up_steps),
            # 下降阶段
            lambda: self.max_lr - (self.max_lr - initial_lr) * ((step - up_steps) / down_steps),
        )
        return lr

    def get_config(self):
        return {
            "max_lr": self.max_lr,
            "total_steps": self.total_steps,
            "pct_start": self.pct_start,
            "div_factor": self.div_factor,
            "name": self.name,
        }

# 初始化 OneCycleLR
lr_schedule = OneCycleLR(max_lr=0.001, total_steps=(len(train_ds)//BATCH_SIZE)*base_epochs)
