import torch
import torch.nn as nn
from torch.nn import Dropout
import torch.nn.functional as F
import math
from a_1_embedding import Embedding

Device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(Device)


class FeedForward(nn.Module):
    def __init__(self,d_model,middle_dim,drop=0.1):
        super(FeedForward, self).__init__()
        self.fc1 = nn.Linear(d_model, middle_dim,device=Device)
        self.fc2 = nn.Linear(middle_dim, d_model,device=Device)
        self.drop = Dropout(drop)
    
    def forward(self, x):
        x = x.to(Device)
        x = self.fc1(x)
        x = F.relu(x)
        x = self.fc2(x)
        return self.drop(x)


if __name__ == '__main__':
    x = torch.randn(5, 10, 512)
    feed_forward = FeedForward(512,128)
    res = feed_forward(x)
    print(res.shape)


