import torch
import torch.nn as nn


class Linear_Softmax(nn.Module):

    def __init__(self, embedding_module, emb_dim, tgt_size, keep_prob, use_cuda):
        super(Linear_Softmax, self).__init__()
        self.embeddings = embedding_module
        self.linear = nn.Linear(emb_dim, tgt_size)
        self.dropout = nn.Dropout(keep_prob)
        self.device = torch.device("cuda:0") if use_cuda else torch.device("cpu:0")
        self.to(self.device)

    def forward(self, sentences, tokens, masks):
        emb = self.embeddings(sentences, tokens, masks)  # (N, L, C)
        out = self.linear(emb[:, 0, :])
        out = self.dropout(out)
        return out

