# -*- coding: utf-8 -*-
# ===========================================
# @Time    : 2021/8/26 下午4:03
# @Author  : shutao
# @FileName: lr_scheduler.py
# @remark  : 
# 
# @Software: PyCharm
# Github 　： https://github.com/NameLacker
# ===========================================

import paddle
import math


class LRScheduler:
    def __init__(self, name, lr, iters_per_epoch=0, total_epoch=0, max_iters=0, **kwargs):
        assert iters_per_epoch * total_epoch > 0 or max_iters > 0, "任意方式的步长至少有一个必须大于0"
        self.lr = lr
        self.total_iters = iters_per_epoch * total_epoch if iters_per_epoch * total_epoch > 0 else max_iters

        # 将 kwargs 的参数转化成类内参数
        self.__dict__.update(kwargs)

        self.lr_func = self._get_lr_func(name)

    def _get_lr_func(self, name):
        if name == "PiecewiseDecay":  # 分段式学习率衰减
            assert hasattr(self, "boundaries"), "请设置 boundaries 参数"
            assert hasattr(self, "values"), "请设置 values 参数"
            scheduler = paddle.optimizer.lr.PiecewiseDecay(self.boundaries,
                                                           self.values
                                                           )
        elif name == "PolynomialDecay":  # 多项式学习率衰减
            assert hasattr(self, "power"), "请设置 power 参数"
            scheduler = paddle.optimizer.lr.PolynomialDecay(self.lr,
                                                            self.total_iters,
                                                            power=self.power
                                                            )
        elif name == "NoamDecay":  # 诺姆衰减
            assert hasattr(self, "d_model"), "请设置 d_model 参数"
            assert hasattr(self, "warmup_steps"), "请设置 warmup_steps 参数"
            scheduler = paddle.optimizer.lr.NoamDecay(d_model=self.d_model,
                                                      warmup_steps=self.warmup_steps,
                                                      learning_rate=self.lr
                                                      )
        elif name == "ExponentialDecay":  # 指数学习率衰减
            assert hasattr(self, "gamma"), "请设置 gamma 参数"
            scheduler = paddle.optimizer.lr.ExponentialDecay(self.lr,
                                                             gamma=self.gamma
                                                             )
        elif name == "NaturalExpDecay":  # 自然指数学习率衰减
            assert hasattr(self, "gamma"), "请设置 gamma 参数"
            scheduler = paddle.optimizer.lr.NaturalExpDecay(self.lr,
                                                            gamma=self.gamma
                                                            )
        elif name == "ReduceOnPlateau":  # loss 自适应学习率衰减
            assert hasattr(self, "factor"), "请设置 factor 参数"
            assert hasattr(self, "patience"), "请设置 patience 参数"
            scheduler = paddle.optimizer.lr.ReduceOnPlateau(self.lr,
                                                            factor=self.factor,
                                                            patience=self.patience
                                                            )
        else:
            raise ValueError("Scheduler name {} is not supported.".format(name))
        return scheduler
