from paddle.optimizer import Adam, SGD
from paddle.optimizer.lr import CosineAnnealingDecay


def generate_optimizer(paramters, **kwargs):
    lr_scheduler = kwargs.pop("lr")
    lr = lr_scheduler.pop("name")
    lr = eval(lr)(**lr_scheduler)
    optim = kwargs.pop("name")
    optim = eval(optim)(learning_rate=lr, parameters=paramters, **kwargs)
    return lr, optim


def load_optimizer(optimizer, optim_path):
    pass
