import math


def lr_setter(optimizer, epoch, bl=False):
    """Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""

    lr = 0.01
    lrbl = 1.0
    epochb = 20
    epochs = 80
    cos = 1
    epochs_decay = [24, 30]
    if bl:
        lr = lrbl * (0.1 ** (epoch // (epochb * 0.5)))
    else:
        if cos:
            lr *= ((0.01 + math.cos(0.5 * (math.pi * epoch / epochs))) / 1.01)
        else:
            if epoch >= epochs_decay[0]:
                lr *= 0.1
            if epoch >= epochs_decay[1]:
                lr *= 0.1
    for param_group in optimizer.param_groups:
        param_group['lr'] = lr
