import math

import paddle.fluid as fluid
from paddle.fluid.layers.learning_rate_scheduler import _decay_step_counter


def CosineLR(learning_rate, step_each_epoch, epochs, min_lr, warmup_min_lr=None, warmup_epoch=5, begin=0):
    if warmup_min_lr is None:
        warmup_min_lr = learning_rate
    warmup_epoch = fluid.layers.fill_constant(
        shape=[1],
        value=float(warmup_epoch),
        dtype='float32',
        force_cpu=True)

    global_step = _decay_step_counter(begin)
    learning_rate_var = fluid.layers.tensor.create_global_var(
        shape=[1],
        value=0.0,
        dtype='float32',
        persistable=True,
        name="learning_rate")
    epoch = fluid.layers.floor(global_step / step_each_epoch)
    with fluid.layers.control_flow.Switch() as switch:
        with switch.case(epoch < warmup_epoch):
            decayed_lr = (learning_rate - warmup_min_lr) * (global_step / (step_each_epoch * warmup_epoch)) + warmup_min_lr
            fluid.layers.tensor.assign(
                input=decayed_lr, output=learning_rate_var)
        with switch.default():
            current_step = global_step - warmup_epoch * step_each_epoch
            total_step = (epochs - warmup_epoch) * step_each_epoch
            decayed_lr = (learning_rate - min_lr) * (fluid.layers.cos(current_step * math.pi / total_step) + 1) / 2 + min_lr
            fluid.layers.tensor.assign(
                input=decayed_lr, output=learning_rate_var)

    return learning_rate_var
