import copy
import torch


def build_optimizer(cfg, parameter_list=None):
    """
    Build an optimizer and learning rate scheduler to optimize parameters accroding to ```OPTIMIZER``` field in configuration .

    In configuration:
    OPTIMIZER:
        name: SGD
        opt_params:
            lr: 0.05
            momentum: 0.9
            weight_decay: 0.0001

    or

    OPTIMIZER:
        name: Adam
        opt_params:
            lr: 0.05
            betas: (0.9, 0.999)
            eps: 1e-08
            weight_decay: 0.0001

    OPTIMIZER:
        name: RMSprop
        opt_params:
            lr: 0.05
            alpha: 0.99
            momentum: 0.9
            eps:1e-08
            weight_decay: 0.0001

    Args:
        cfg (dict): optimizer configuration.
        parameter_list (list): parameters to be optimized.

    Returns:
        optimizer (torch.optimizer): torch optimizer.

    """

    cfg_copy = cfg.copy()
    # XXX check none and illegal cfg!!!
    opt_name = cfg_copy.pop('name')
    assert isinstance(cfg_copy.get('opt_params'), dict), "The opt_params dict must be defined"
    if not cfg_copy.get('opt_params').get('lr'):  # check learning_rate param
        raise ValueError("The learning_rate param must be defined, please check it")

    opt_params = cfg_copy.get('opt_params')
    return getattr(torch.optim, opt_name)(params=parameter_list,
                                          **opt_params)


def get_lr(optimizer: torch.optim.Optimizer):
    return optimizer.param_groups[0]['lr']
