# 第epoch值进行计算并更新学习率
# 第epoch值进行计算并更新学习率





#循环学习率调整
import numpy as np
import matplotlib.pyplot as plt


def get_triangular_SGDR(epoch ,T_mul,T_0, min_lr, max_lr):
    """
    Given the inputs, calculates the lr that should be
    applicable for this iteration
    """
    i = np.log2(epoch / T_0 + 1).astype(np.int64)
    T_cur = epoch - T_0 * (T_mul ** (i) - 1)
    T_i = (T_0 * T_mul ** i)

    cur_lr = min_lr + 0.5 * (max_lr - min_lr) * (1 + np.cos(np.pi * T_cur / T_i))
    return cur_lr


if __name__ == '__main__':
    # Demo of how the LR varies with iterations
    epochs=100
    T_mul = 2
    T_0 = 10
    min_lr = 0.0001
    max_lr = 0.001
    lr_trend = list()

    for epoch in range(epochs):
        lr = get_triangular_SGDR(epoch, T_mul,T_0, min_lr, max_lr)
        # Update your optimizer to use this learning rate in this iteration
        lr_trend.append(lr)

    plt.plot(lr_trend)
    plt.show()
#https://blog.csdn.net/weixin_43896398/article/details/84762886
#参考文献

#参考链接https://blog.csdn.net/bule_sky_wait_me/article/details/119004756

