def check_skip_list(name, skip_list):
    for s in skip_list:
        if s in name:
            return True
    return False


def add_weight_decay(model, weight_decay=1e-5, skip_list=('pos_embed', 'cls_token')):
    decay = []
    no_decay = []
    for param in model.trainable_params():
        if not param.requires_grad:
            continue  # frozen weights
        
        if len(param.shape) == 1 or param.name.endswith(".bias") or check_skip_list(param.name, skip_list):
            # print(name)
            no_decay.append(param)
        else:
            decay.append(param)
    if len(no_decay) > 0:
        return [
            {'params': no_decay, 'weight_decay': 0.},
            {'params': decay, 'weight_decay': weight_decay}]
    else:
        return [{'params': decay, 'weight_decay': weight_decay}]
