# %%
import torch
import torch.nn as nn
import math
# %%
class LinearLoRALayer(nn.Module):
    def __init__(self, in_features, out_features, rank, lora_alpha, dropout, merge=False):
        super().__init__()
        self.in_features = in_features
        self.out_features = out_features
        self.rank = rank
        self.lora_alpha = lora_alpha
        self.dropout = dropout
        self.merge = merge

        self.linear = nn.Linear(in_features=in_features, out_features=out_features)

        if rank > 0:
            self.lora_a = nn.Parameter(torch.randn(out_features, rank))
            # 高斯分布
            nn.init.kaiming_uniform_(self.lora_a, a=math.sqrt(5))

            self.lora_b = nn.Parameter(torch.randn(rank, in_features))

            self.scale = lora_alpha / rank
        
        self.dropout = nn.Dropout(dropout) if dropout > 0 else nn.Identity()

        if merge:
            self.merge_weight()

    def merge_weight(self,):
        if self.merge and self.rank > 0:
            self.linear.weight.data += self.scale * (self.lora_a @ self.lora_b)
    
    def unmerge_weight(self,):
        if self.merge and self.rank > 0:
            self.linear.weight.data -= self.scale * (self.lora_a @ self.lora_b)

    def forward(self, x):

        if self.rank > 0:
            output_part1 = self.linear(x)

            output_part2 = self.scale * (x @ (self.lora_a @ self.lora_b).T) 

            output = output_part1 + output_part2
        
        elif self.rank > 0 and self.merge:
            output = self.linear(x)
        else:
            output = self.linear(x)
        
        output = self.dropout(output)

        return output
# %%
# 写一段测试代码
# Test the LoRALinear layer
batch_size = 32
seq_len = 128
in_features = 768
out_features = 512
rank = 8
lora_alpha = 16
dropout = 0.1

# Create a test input
x = torch.randn(batch_size, seq_len, in_features)

# Test regular mode (no merge)
lora_layer = LinearLoRALayer(
    in_features=in_features,
    out_features=out_features,
    rank=rank,
    lora_alpha=lora_alpha,
    dropout=dropout,
    merge=False
)

# Forward pass
output = lora_layer(x)
print(f"Output shape (no merge): {output.shape}")
# %%
