# 准备环境
# https://blog.csdn.net/junqing_wu/article/details/93248190
import torch
import torch.optim as optim
from torch.optim import lr_scheduler
from torchvision.models import AlexNet
import matplotlib.pyplot as plt

model = AlexNet(num_classes=2)
optimizer = optim.SGD(params=model.parameters(), lr=0.05)

#  https://arxiv.org/abs/1608.03983
# To 初始周期
# T_mult 每次循环 周期改变倍数  T_0 = T_0*T_mult
'''
  def __init__(self, optimizer, T_0, T_mult=1, eta_min=0, last_epoch=-1):
        if T_0 <= 0 or not isinstance(T_0, int):
            raise ValueError("Expected positive integer T_0, but got {}".format(T_0))
        if T_mult < 1 or not isinstance(T_mult, int):
            raise ValueError("Expected integer T_mul >= 1, but got {}".format(T_mul))
        self.T_0 = T_0
        self.T_i = T_0
        self.T_mult = T_mult
        self.eta_min = eta_min
        super(CosineAnnealingWarmRestarts, self).__init__(optimizer, last_epoch)
        self.T_cur = last_epoch
'''

# scheduler = lr_scheduler.CosineAnnealingWarmRestarts(optimizer,T_0=10,T_mult=2,eta_min=0)
#
# plt.figure()
# x = list(range(100))
# y = []
# for epoch in range(100):
#     scheduler.step()
#     lr = scheduler.get_lr()
#     y.append(scheduler.get_lr()[0])
#
# plt.plot(x, y)

#
# scheduler = lr_scheduler.CyclicLR(optimizer,base_lr=0.05,max_lr=0.1,step_size_up=10,step_size_down=10)
#
# plt.figure()
# x = list(range(100))
# y = []
# for epoch in range(100):
#     scheduler.step()
#     lr = scheduler.get_lr()
#     y.append(scheduler.get_lr()[0])
#
# plt.plot(x, y)


# milestones = [step for step in step_list]   gamma 衰减系数

# scheduler = lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.5)
scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[40,60,80],gamma= 0.8)
plt.figure()
x = list(range(100))
y = []
for epoch in range(100):
    scheduler.step()
    lr = scheduler.get_lr()
    y.append(scheduler.get_lr()[0])

plt.plot(x, y)
plt.show()
'''
 参数 lr_lambda 给定一个lambda函数，将epoch作用于该函数，生成scale*init_lr，可以用来定义自己的lamda函数

Args:
    optimizer (Optimizer): Wrapped optimizer.
    lr_lambda (function or list): A function which computes a multiplicative factor given an integer parameter epoch, or a list of such functions, one for each group in optimizer.param_groups.
    last_epoch (int): The index of last epoch. Default: -1.
'''

# epoch//30 * init_lr（0.05）
# lambda1 = lambda epoch: epoch // 30
lambda2 = lambda epoch: 0.9999 ** epoch

scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda2)
plt.figure()
x = list(range(100000))
y = []
for epoch in range(100000):
    scheduler.step()
    lr = scheduler.get_last_lr()
    y.append(scheduler.get_last_lr())

plt.plot(x, y)
plt.show()