# Copyright (c) OpenMMLab. All rights reserved.
import torch
import torch.nn as nn

class Classifier(nn.Module):

    def __init__(self, nIn, nHidden, nOut, output_size, dropout=0.1):
        super().__init__()

        #self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True)
        self.embedding = nn.Linear(nHidden * 2, 1024)
        self.relu = nn.ReLU()
        self.drop = nn.Dropout(dropout)
        self.linear1 = nn.Linear(1024, 1024)
        self.linear2 = nn.Linear(1024, output_size)

    def forward(self, input):
        #recurrent, _ = self.rnn(input) # [T, b, c] [75, 64, 512]
        #T, b, h = recurrent.size()
        #t_rec = recurrent.view(T * b, h)

        output = self.embedding(input)  # [T * b, nOut] [64, 1024]

        output = self.relu(output)
        output = self.drop(output)
        output = self.linear1(output) # [T * b, nOut] [64, 1024]

        output = self.relu(output)
        output = self.drop(output)
        output = self.linear2(output) # [T * b, num_lang] [64, 7]

        # output = output.view(T, b, -1) # [T, b, num_lang] [75, 64, 7]

        return output
