import torch
from torch import nn
import math
# inter
def lora_forward_hook(layer: nn.Module, fea_in, fea_out):
    return fea_out + layer.dropout(layer.lora_up(layer.lora_down(fea_in))) * layer.scale

# inter
def unwarp_lora_layer(layer):
    del layer.lora_down
    del layer.dropout
    del layer.lora_up
    del layer.scale
    layer.lora_hook_handle.remove()
    del layer.lora_hook_handle
# inter
def extract_lora_state_layer(layer: nn.Module):
    sd_layer = layer.state_dict()
    sd_lora = {
        'lora_down': sd_layer['lora_down'],
        'lora_up': sd_layer['lora_up'],
        'scale': sd_layer['scale'],
    }
    return sd_lora
###############################################################################
def warp_layer_with_lora(layer: nn.Module, rank, dropout=0.1, scale=1.0):
    if isinstance(layer, nn.Linear):
        layer.lora_down = nn.Linear(layer.in_features, rank, bias=False)
        layer.dropout = nn.Dropout(dropout)
        layer.lora_up = nn.Linear(rank, layer.out_features, bias=False)
        layer.register_buffer('scale', torch.tensor(scale))
    elif isinstance(layer, nn.Conv2d):
        layer.lora_down = nn.Conv2d(layer.in_features, rank, kernel_size=layer.kernel_size, stride=layer.stride,
                    padding=layer.padding, dilation=layer.dilation, groups=layer.groups, bias=False)
        layer.dropout = nn.Dropout(dropout)
        layer.lora_up = nn.Conv2d(rank, layer.out_features, kernel_size=1, stride=1, padding=0, bias=False)
        layer.register_buffer('scale', torch.tensor(scale))
    else:
        raise NotImplementedError('lora support only Linear and Conv2d now.')

    nn.init.kaiming_uniform_(layer.lora_down.weight, a=math.sqrt(5))
    nn.init.zeros_(layer.lora_up.weight)
    h = layer.register_forward_hook(lora_forward_hook)
    layer.lora_hook_handle = h

def collapse_lora_layer(layer: nn.Module, alpha=None, remove_lora=True):
    if isinstance(layer, nn.Linear):
        if alpha is None:
            alpha = layer.scale

        layer.weight = nn.Parameter(
            layer.weight.data +
            alpha * (layer.lora_up.weight.data @ layer.lora_down.weight.data)
            .to(layer.weight.device, dtype=layer.weight.dtype)
        )
    elif isinstance(layer, nn.Conv2d):
        if alpha is None:
            alpha = layer.scale

        layer.weight = nn.Parameter(
            layer.weight.data +
            alpha * (layer.lora_up.weight.data.flatten(1) @ layer.lora_down.weight.data.flatten(1))
            .reshape(layer.weight.data.shape).to(layer.weight.device, dtype=layer.weight.dtype)
        )
    if remove_lora:
        unwarp_lora_layer(layer)


def extract_lora_state(model: nn.Module):
    sd = {}
    for name, block in model.named_modules():
        if hasattr(block, 'lora_down'):
            sd_layer = extract_lora_state_layer(block)
            for k, v in sd_layer.items():
                sd[f'{name}.{k}'] = v
    return sd