import torch
import torch.nn as nn
import torch.nn.functional as F

class softmax(nn.Module):
    def __init__(self, embedding_dim, num_classes, **kwargs):
        super(softmax, self).__init__()
        self.embedding_dim = embedding_dim
        self.fc = nn.Linear(embedding_dim, num_classes)
        self.criertion = nn.CrossEntropyLoss()

        print('Embedding dim is {}, number of speakers is {}'.format(embedding_dim, num_classes))

    def forward(self, x, label=None):
        #label = label.repeat(4)
        x = self.fc(x)
        loss = self.criertion(x, label)
        return loss


if __name__ == "__main__":
    loss = softmax(10, 100)
