import torch.nn as nn
import torch.nn.functional as F


class myModel(nn.Module):
    def __init__(self):
        super(myModel, self).__init__()
        self.linear1 = nn.Linear(128*128, 2048)
        self.linear2 = nn.Linear(2048, 512)
        self.linear3 = nn.Linear(512, 180)
        self.linear4 = nn.Linear(180, 60)

    def forward(self, x):
        x = x.reshape(x.shape[0], -1)
        # print(x.shape)
        x = F.relu(self.linear1(x))
        # print(x.shape)
        x = F.relu(self.linear2(x))
        # print(x.shape)
        x = F.relu(self.linear3(x))
        # print(x.shape)
        x = self.linear4(x)
        # x = F.softmax(x)
        # print(x.shape)

        return x

if __name__ == "__main__":
    import torch
    model = myModel()
    output = model(torch.randn(size=(2,1,128,128)))
    print(output.shape)
