# -*- coding: utf-8 -*-
# @Time : 2021-11-17 16:02
# @Author : lwb
# @Site : 
# @File : MLP.py
import torch.nn as nn
from torch.nn import functional as F
class MLP(nn.Module):
    def __init__(self,vocab_size,embedding_dim,hidden_dim,num_class):
        super(MLP, self).__init__()
        self.embedding=nn.EmbeddingBag(vocab_size,embedding_dim)
        self.linear1=nn.Linear(embedding_dim,hidden_dim)  # 输入-隐藏
        self.activate=F.relu
        self.linear2=nn.Linear(hidden_dim,num_class)    # 隐藏-输出
    def forward(self,inputs,offsets):
        embeddings=self.embedding(inputs,offsets)
        hidden = self.activate(self.linear1(embeddings))
        outputs = self.linear2(hidden)
        log_probs = F.log_softmax(outputs, dim=1)
        return log_probs

