import loralib as lora
import torch
import logging

def lora_sub(model, keys, lora_alpha=1, lora_r=16, lora_dropout=0.0, merge_weights=True, use_adalora=False):
    """
        keys: list of str, eg. ['query'], ['query, key'],...
        
    """
    for  n , m in model.named_modules():
        replace = False
        for  k in keys:
            if (k in n or 'all' in keys) and isinstance(m, torch.nn.Linear):
                replace = True
                logging.info('{} will be repalced by lora Linear'.format(n))
                break
        if replace:
            tokens = n.split('.')
            sub_tokens = tokens[:-1]
            module_name = tokens[-1]
            in_dim = m.in_features
            out_dim = m.out_features
            bias = (m.bias != None)
            logging.info(f'{n}: {bias}')
            if not use_adalora:
                new_lora = lora.Linear(in_dim, out_dim, bias=bias, lora_dropout=lora_dropout, lora_alpha=lora_alpha, r=lora_r, merge_weights=merge_weights)
            else:
                new_lora = lora.SVDLinear(in_dim, out_dim, bias=bias, lora_dropout=lora_dropout, lora_alpha=lora_alpha, r=lora_r, merge_weights=merge_weights)
            cur_mod = model
            for s in sub_tokens:
                cur_mod = getattr(cur_mod, s)
            setattr(cur_mod, module_name, new_lora)

    return model

     
