import torch.nn as nn
import torch.nn.functional as F
from learn_multihead_attention import mha_result

# 通过类PositionwiseFeedForward来实现前馈全连接层
class PositionwiseFeedForward(nn.Module):
    def __init__(self, d_model, d_ff, dropout=0.1):
        """初始化函数有三个输入参数分别是d_model, d_ff,和dropout=0.1，第一个是线性层的输入维度也是第二个线性层的输出维度，
           因为我们希望输入通过前馈全连接层后输入和输出的维度不变. 第二个参数d_ff就是第二个线性层的输入维度和第一个线性层的输出维度.
           最后一个是dropout置0比率."""
        super(PositionwiseFeedForward, self).__init__()

        # 首先按照我们预期使用nn实例化了两个线性层对象，self.w1和self.w2
        # 它们的参数分别是d_model, d_ff和d_ff, d_model
        self.w1 = nn.Linear(d_model, d_ff)
        self.w2 = nn.Linear(d_ff, d_model)
        # 然后使用nn的Dropout实例化了对象self.dropout
        self.dropout = nn.Dropout(dropout)

    def forward(self, x):
        """输入参数为x，代表来自上一层的输出"""
        # 首先经过第一个线性层，然后使用Funtional中relu函数进行激活,
        # 之后再使用dropout进行随机置0，最后通过第二个线性层w2，返回最终结果.
        return self.w2(self.dropout(F.relu(self.w1(x))))


d_model = 512
# 线性变化的维度
d_ff = 64
# 置0率
dropout = 0.2

x = mha_result
'''
x: tensor([[[ 12.9616,  -5.9693,   4.8327,  ...,  -0.3708,   4.4671,   4.2709],
         [ 12.5559,  -9.1146,   1.8227,  ...,  -1.6848,   4.7663,   4.2422],
         [  9.1482,  -8.1453,   1.7617,  ...,  -0.8180,   6.1618,   0.5189],
         [ 12.5075,  -4.2878,   4.7038,  ...,   0.8913,   3.2719,   0.5297]],

        [[  3.8175,   2.3837,  -2.3969,  ..., -13.0653,  -3.0383,  -5.7519],
         [  3.6368,   4.8399,   0.1364,  ..., -12.0696,  -2.8033,  -4.4250],
         [  3.1304,   3.6628,  -1.3901,  ..., -10.1150,   1.2196,  -8.6856],
         [  2.5768,   4.5911,  -1.4081,  ..., -11.4767,  -5.3430,  -7.3822]]],
       grad_fn=<ViewBackward0>) size 2x4x512
'''
pff = PositionwiseFeedForward(d_model, d_ff, dropout)
pff_result = pff(x)

print('pff_result:', pff_result)
print(pff_result.shape)
'''
pff_result: tensor([[[ 0.7658, -0.0989,  0.4635,  ..., -1.3882, -1.0188,  1.3789],
         [ 0.9087,  0.9879, -1.2174,  ..., -2.1874, -1.8522,  1.8711],
         [ 1.2648, -0.9501,  1.2792,  ..., -1.0456,  0.0619, -0.8736],
         [ 0.0572,  0.4145,  0.4516,  ..., -0.2791, -1.1537,  0.9343]],

        [[ 1.7666, -0.1266,  1.9631,  ..., -2.3928, -0.4186,  0.5971],
         [ 2.2348,  0.9764,  1.4393,  ..., -2.5711,  0.4676,  0.4301],
         [ 0.9384, -0.1848,  1.0586,  ..., -1.1326, -0.0433,  1.1699],
         [ 1.0055, -0.8363,  0.3205,  ..., -2.4109, -0.4529,  0.6135]]],
       grad_fn=<ViewBackward0>) size 2x4x512
'''

