# -*- coding: utf-8 -*-
"""
# @file name    : lr_decay_scheduler.py
# @author       : QuZhang
# @date         : 2020-12-20 10:20
# @brief        : 学习率下降策略
"""
import torch
import torch.optim as optim
from matplotlib import pyplot as plt
import os


os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"

if __name__ == '__main__':
    LR = 0.1  # 初始学习率
    iteration = 10
    max_epoch = 200

    # ---- fake data and optimizer ---------
    weights = torch.randn((1), requires_grad=True)
    target = torch.zeros((1))

    optimizer = optim.SGD([weights], lr=LR, momentum=0.9)

    # ----------- 1 Step LR ----------
    # 等间隔调整学习率
    # flag = True
    flag = False
    if flag:
        # 设置学习率调整策略
        """ 
        optimizer: 该策略关联的优化器
        step_size：表示多少个epoch后调整学习率
        gamma：lr = lr × gamma
        """
        scheduler_lr = optim.lr_scheduler.StepLR(optimizer, step_size=50, gamma=0.1)

        lr_list, epoch_list = list(), list()
        for epoch in range(max_epoch):

            lr_list.append(scheduler_lr.get_last_lr())  # 保存当前lr
            epoch_list.append(epoch)  # 保存epoch次数

            for i in range(iteration):
                loss = torch.pow((weights - target), 2)
                loss.backward()
                optimizer.step()
                optimizer.zero_grad()

            # 在epoch里面调整学习率
            scheduler_lr.step()  # 更新学习率, 在step_size个epoch后下降学习率

        plt.plot(epoch_list, lr_list, label="Step LR Scheduler")
        plt.xlabel('Epoch')
        plt.ylabel("Learning rate")
        plt.legend()
        plt.show()

    # -------- 2 Multi Step LR -------------
    # 在指定的epoch后调整
    # flag = True
    flag = False
    if flag:
        # 设置学习率下降的时机,
        milestones = [50, 125, 160]  # 在50，125,160个epoch后调整学习率
        # lr = lr * gamma
        scheduler_lr = optim.lr_scheduler.MultiStepLR(optimizer, milestones=milestones, gamma=0.1)

        lr_list, epoch_list = list(), list()
        for epoch in range(max_epoch):
            lr_list.append(scheduler_lr.get_last_lr())
            epoch_list.append(epoch)

            for i in range(iteration):
                loss = torch.pow((weights - target), 2)
                loss.backward()
                optimizer.step()
                optimizer.zero_grad()

            scheduler_lr.step()

        plt.plot(epoch_list, lr_list, label="Multi Step LR scheduler")
        plt.xlabel("Epoch")
        plt.ylabel("Learning rate")
        plt.legend()
        plt.show()

    # ------------- 3 Exponential LR --------------
    # 每一个epoch后都会调整学习率,学习率为指数衰减
    # flag = True
    flag = False
    if flag:
        gamma = 0.95  # 指数衰减的底
        # lr = lr * (gamma^epoch)
        scheduler_lr = optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.9)

        lr_list, epoch_list = list(), list()
        for epoch in range(max_epoch):
            lr_list.append(scheduler_lr.get_last_lr())
            epoch_list.append(epoch)
            for i in range(iteration):
                loss = torch.pow((weights - target), 2)
                loss.backward()
                optimizer.step()
                optimizer.zero_grad()

            scheduler_lr.step()
        plt.plot(epoch_list, lr_list, label="Multi Step LR scheduler")
        plt.xlabel("Epoch")
        plt.ylabel("Learning rate")
        plt.legend()
        plt.show()

    # ------------- 4 Cosine Annealing LR ----------
    # 学习率曲线为余弦曲线规律
    # flag = True
    flag = False
    if flag:
        t_max = 50  # 下降到最低点的epoch
        """ 
        学习率经过T_max个epoch后下降到最小值eta_min，再进过T_max后学习率又回到最大值，周期为2T_max
        """
        lr_scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=t_max, eta_min=0.)

        lr_list, epoch_list = list(), list()
        for epoch in range(max_epoch):
            lr_list.append(lr_scheduler.get_last_lr())
            epoch_list.append(epoch)

            for i in range(iteration):
                loss = torch.pow((weights - target), 2)
                loss.backward()
                optimizer.step()
                optimizer.zero_grad()

            lr_scheduler.step()

        plt.plot(epoch_list, lr_list, label="CosineAnnealingLR scheduler\nT_max:{}".format(t_max))
        plt.xlabel("Epoch")
        plt.ylabel("Learning rate")
        plt.legend()
        plt.show()

    # ------------ 5 Reduce LR On plateau -----------
    # 用于监控某个指标，该指标没有变化则调整学习率
    # flag = True
    flag = False
    if flag:
        loss_value = 0.5
        accuracy = 0.9

        # 设置超参数
        factor = 0.1
        mode='min'
        patience = 10  # 连续10次被监控的指标都没有变化
        cooldown = 10
        min_lr = 1e-4
        verbose = True
        lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, factor=factor, mode=mode,cooldown=cooldown,
                                                            patience=patience, min_lr=min_lr, verbose=verbose,)

        for epoch in range(max_epoch):
            for i in range(iteration):

                # train(....)
                optimizer.step()
                optimizer.zero_grad()
            if epoch == 5:
                loss_value = 0.4
            lr_scheduler.step(loss_value)  # 在更新时，填入要监控的指标

    # ---------- 6 Lambda --------------
    # 自定义学习率调整策略
    # 不同参数组：设置不同的学习率调整策略
    flag = True
    if flag:
        lr_init = 0.1
        weights_1 = torch.randn((6, 3, 5, 5))
        weights_2 = torch.ones((5, 5))

        optimizer = optim.SGD([
           {'params': [weights_1]},
           {"params": [weights_2]}], lr=lr_init)

        # 为不同的参数组设置不同的学习率调整测试
        lambda_1 = lambda epoch: 0.1 ** (epoch // 20)  # 参数组1
        lambda_2 = lambda epoch: 0.95 ** epoch  # 参数组2

        lr_scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=[lambda_1, lambda_2])

        lr_list, epoch_list = list(), list()
        for epoch in range(max_epoch):
            for i in range(iteration):
                # train(...)

                optimizer.step()
                optimizer.zero_grad()

            lr_scheduler.step()  # 更新学习率

            lr_list.append(lr_scheduler.get_lr())
            epoch_list.append(epoch)

            print('epoch:{:5d}, lr:{}'.format(epoch, lr_scheduler.get_lr()))

        plt.plot(epoch_list, [i[0] for i in lr_list], label="lambda 1")
        plt.plot(epoch_list, [i[1] for i in lr_list], label="lambda 2")
        plt.xlabel("Epoch")
        plt.ylabel("Learning Rate")
        plt.title("LambdaLR")
        plt.legend()
        plt.show()

