import random
import string
from torch.optim import Adam, AdamW


def pair(val):
    res = (val, val) if not isinstance(val, tuple) else val
    assert len(res) == 2
    return res


def generate_random_string(length):
    letters = string.ascii_letters + string.digits
    return ''.join([random.choice(letters) for _ in range(length)])


def separate_weight_decay_params(params):
    weight_decay_params, no_weight_decay_params = [], []
    for param in params:
        param_list = no_weight_decay_params if param.ndim < 2 else weight_decay_params
        param_list.append(param)
    return weight_decay_params, no_weight_decay_params


def get_optimizer(params, lr=1e-4, weight_decay=1e-2, betas=(0.9, 0.99), eps=1e-8, filter_by_requires_grad=False,
                  group_weight_decay_params=True, **kwargs):
    if filter_by_requires_grad:
        params = list(filter(lambda t: t.required_grads, params))

    if weight_decay == 0:
        return Adam(params, lr=lr, betas=betas, eps=eps)

    if group_weight_decay_params:
        weight_decay_params, no_weight_decay_params = separate_weight_decay_params(params)

        params = [
            {'params': weight_decay_params},
            {'params': no_weight_decay_params, 'weight_decay': 0},
        ]

    return AdamW(params, lr=lr, weight_decay=weight_decay, betas=betas, eps=eps)
