import numpy as np

def get_lr(global_step, lr_max, total_epochs, steps_per_epoch, train_all):
    """
    generate learning rate
    """
    lr_each_step = []
    total_steps = steps_per_epoch * total_epochs
    if train_all:
        for i in range(total_steps):
            if i < 10 * steps_per_epoch:
                lr = lr_max
            elif i < 20 * steps_per_epoch:
                lr = lr_max * 0.1
            elif i < 30 * steps_per_epoch:
                lr = lr_max * 0.01
            elif i < 40 * steps_per_epoch:
                lr = lr_max * 0.001
            elif i < 50 * steps_per_epoch:
                lr = lr_max * 0.0001
            else:
                lr = lr_max * 0.00001
            lr_each_step.append(lr)
    else:
        for i in range(total_steps):
            if i < 60 * steps_per_epoch:
                lr = lr_max
            elif i < 90 * steps_per_epoch:
                lr = lr_max * 0.1
            else:
                lr = lr_max * 0.01
            lr_each_step.append(lr)
    current_step = global_step
    lr_each_step = np.array(lr_each_step).astype(np.float32)
    learning_rate = lr_each_step[current_step:]
    print("learning_rate is {}".format(str(learning_rate)))
    return learning_rate