#循环学习率调整
import numpy as np
import matplotlib.pyplot as plt


def get_triangular_lr(iteration, stepsize, min_lr, max_lr):
    """
    Given the inputs, calculates the lr that should be
    applicable for this iteration
    """
    cycle = np.floor(1 + iteration / (2 * stepsize))
    #一个cycle在这里表现为stepsize的1/2
    x = np.abs(iteration / stepsize - 2 * cycle + 1)
    lr = min_lr + (max_lr - min_lr) * np.maximum(0, (1 - x))
    return lr


if __name__ == '__main__':
    # Demo of how the LR varies with iterations
    num_iterations = 10000 #总的次数
    stepsize = 1000 #通常为一次高低的一半
    min_lr = 0.0001
    max_lr = 0.001
    lr_trend = list()

    for iteration in range(num_iterations):
        lr = get_triangular_lr(iteration, stepsize, min_lr, max_lr)
        # Update your optimizer to use this learning rate in this iteration
        lr_trend.append(lr)

    plt.plot(lr_trend)
    plt.show()
#https://blog.csdn.net/weixin_43896398/article/details/84762886
#参考文献