import torch.nn as nn

class Adapter(nn.Module):
    def __init__(self, dim, reduction_factor=16):
        super().__init__()
        self.down_proj = nn.Linear(dim, dim // reduction_factor)
        self.up_proj = nn.Linear(dim // reduction_factor, dim)
        self.act = nn.GELU()

    def forward(self, x):
        return x + self.up_proj(self.act(self.down_proj(x)))


# 在Transformer层中插入适配器
class TransformerWithAdapter(nn.Module):
    def __init__(self, layer):
        super().__init__()
        self.layer = layer
        self.adapter = Adapter(layer.config.hidden_size)

    def forward(self, x):
        x = self.layer(x)  # 原始层计算
        x = self.adapter(x)  # 添加适配器
        return x