import torch
import torch.nn as nn
import torch.nn.functional as F
import  math

class LoRALayer(nn.Module):
    def __init__(self, original_layer, rank=2, alpha=1, dropout=0.3):
        super().__init__()
        self.original_layer = original_layer  # 原始层(冻结)

        # 冻结原始权重
        for param in original_layer.parameters():
            param.requires_grad = False

        # 获取原始层的形状
        if isinstance(original_layer, nn.Linear):
            in_features = original_layer.in_features
            out_features = original_layer.out_features
            self.lora_down = nn.Linear(in_features, rank, bias=False)
            self.lora_up = nn.Linear(rank, out_features, bias=False)
        elif isinstance(original_layer, nn.Conv2d):
            # 类似地实现Conv2d的LoRA
            pass

        # 初始化LoRA权重
        nn.init.kaiming_uniform_(self.lora_down.weight, a=math.sqrt(5))
        nn.init.zeros_(self.lora_up.weight)

        self.scaling = alpha / rank
        self.dropout = nn.Dropout(dropout)

    def forward(self, x):
        original_output = self.original_layer(x)
        lora_output = self.lora_up(self.lora_down(self.dropout(x)))
        return original_output + self.scaling * lora_output


def replace_layers_with_lora(model, rank=8, alpha=16, target_keywords=["q_proj", "v_proj"]):
# def replace_layers_with_lora(model, rank=8, alpha=16, target_keywords=["q_proj", "v_proj", "k_proj", "o_proj", "up_proj", "down_proj"]):
    for name, module in model.named_children():
        if isinstance(module, nn.Linear):
            if any(key in name for key in target_keywords):
                setattr(model, name, LoRALayer(module, rank, alpha))
        else:
            replace_layers_with_lora(module, rank, alpha, target_keywords)



def save_lora_weights(model, save_path,target_keywords):
    lora_weights = {}
    for name, param in model.named_parameters():
        if any(key in name for key in target_keywords):  # 只保存 LoRA 部分
            lora_weights[name] = param.data
    torch.save(lora_weights, save_path)

# 保存时调用
# save_lora_weights(model, "lora_adapter.pth")


def count_lora_parameters(model,target_keywords):
    lora_params = 0
    for name, param in model.named_parameters():
        if any( key in name for key in target_keywords):
            lora_params += param.numel()
    return lora_params

