# 参数设置
d_model = 512 # 词嵌入维度d_model
n_heads = 8
d_k = d_v = d_model //n_heads   # dimension of K(=Q), V
# K,Q,V矩阵的维度（K和Q一定是一样的,因为要K乘Q的转置）,V不一定
dim_feedforward = 2048  
#Feed Forward层（Attention后面的全连接网络）的隐藏层的神经元数量
class PoswiseFeedForwardNet_Add_Norm(nn.Module):
    def __init__(self):
        super(PoswiseFeedForwardNet_Add_Norm, self).__init__()
        self.fc = nn.Sequential(
            nn.Linear(d_model, dim_feedforward, bias=False),
            nn.ReLU(),
            nn.Linear(dim_feedforward, d_model, bias=False)
        )
    def forward(self, inputs):
        '''
        inputs: [batch_size, seq_len, d_model]
        '''
        residual = inputs
        output = self.fc(inputs)
        
        y_output = nn.LayerNorm(d_model)(output + residual) 
        # add & norm
        return y_output
        # [batch_size, seq_len, d_model]
        # 也有残差连接和layer normalization  
