from torch.optim.lr_scheduler import LambdaLR

class OSMScheduler(LambdaLR):
    def __init__(self, optimizer, **kwargs):
        self.batch_size = kwargs["batch_size"] if "batch_size" in kwargs else 16
        self.nums = kwargs["nums"] if "nums" in kwargs else 4424
        self.epochs = kwargs["epochs"] if "epochs" in kwargs else 20
        self.last_epoch = -1

        fn = lambda x: 1.0 - float(x) \
            * float(4*self.batch_size) / \
            (self.nums * 4 * float(self.epochs))
            # 63338 for spring, 233244 for all

        super(OSMScheduler, self).__init__(
            optimizer, fn, last_epoch=self.last_epoch, verbose=False
        )


if __name__ == "__main__":
    import torch
    import torch.optim as optim
    import torch.nn as nn
    import torch.nn.functional as F
    import random
    from torch.nn import CrossEntropyLoss
    import matplotlib.pyplot as plt


    # 定义模型
    class Net(nn.Module):
        def __init__(self, n_feature, n_hidden, n_out):
            super(Net, self).__init__()
            self.hidden = nn.Linear(n_feature, n_hidden)
            self.out = nn.Linear(n_hidden, n_out)
            self.init_weights()

        def init_weights(self):
            initrange = 0.5
            self.hidden.weight.data.uniform_(-initrange, initrange)
            self.hidden.bias.data.zero_()
            self.out.weight.data.uniform_(-initrange, initrange)
            self.out.bias.data.zero_()

        def forward(self, x, y=None):
            x = self.hidden(x)
            x = torch.sigmoid(x)
            x = self.out(x)
            out = F.log_softmax(x, dim=1)
            loss = None
            if y is not None:
                loss_fct = CrossEntropyLoss()
                loss = loss_fct(out, y)
            return out, loss


    # 构造数据
    data_x = [torch.randn(32, 50)] * 16
    data_y = [[1 if random.random() > 0.5 else 0 for j in range(32)]] * 16

    # 模型
    net = Net(n_feature=50, n_hidden=10, n_out=2)

    # 优化器
    optimizer = optim.Adam(net.parameters(), lr=1)

    # 学习率变化策略
    scheduler = OSMScheduler(optimizer)
    fn = lambda x: 1.0-float(x)*float(4*16)/(4424*4 * float(20))

    base_lr = scheduler.base_lrs[0]
    print(base_lr)
    print(scheduler.get_lr()[0])
    print(scheduler.last_epoch)
    print("=========================")

    # 画图
    x_plot = []
    y_plot = []
    for epoch in range(20):
        for step, batch in enumerate(zip(data_x, data_y)):
            x, y = batch
            y = torch.tensor(y)
            out, loss = net(x, y)
            loss.backward()
            optimizer.step()
            optimizer.zero_grad()

        x_plot.append(scheduler.last_epoch)
        y_plot.append(scheduler.get_lr()[0])
        print(optimizer.param_groups[0]["lr"]-fn(epoch))
        scheduler.step()

    plt.plot(x_plot, y_plot, 'r')
    plt.title('lr value of OSMScheduler ')
    plt.xlabel('step')
    plt.ylabel('lr')
    plt.savefig('./image/LambdaLR.jpg')






