import torch.nn as nn
import torch
import torch.nn.functional as F
from models.mish import Mish

class LSTM(nn.Module):
    def __init__(self,num_class=17,need_embedding=False,embedding_dim=128):
        super(LSTM, self).__init__()
        if need_embedding:
            self.embedding = nn.Embedding(num_embeddings=859, embedding_dim=embedding_dim, padding_idx=858)

        self.lstm = nn.LSTM(input_size=embedding_dim, hidden_size=128, num_layers=2, batch_first=True, bidirectional=True,
                            dropout=0.5)
        self.fc1 = nn.Linear(128 * 2, 128)
        self.fc2 = nn.Linear(128, num_class)
        self.fc = nn.Sequential(
            nn.Dropout(0.2),
            nn.Linear(128 * 2, 256),
            nn.Dropout(0.2),
            Mish(),
            nn.Linear(256, num_class),
        # nn.Sigmoid()
        )
        self.need_embedding = need_embedding

    def forward(self, input):
        '''
        :param input:
        :return:
        '''
        if self.need_embedding:
            input = self.embedding(input)  # [batch_size,seq_len,200]

        output, (h_n, c_n) = self.lstm(input)
        out = torch.cat([h_n[-1, :, :], h_n[-2, :, :]], dim=-1)  # 拼接正向最后一个输出和反向最后一个输出

        # 进行全连接
        out = self.fc(out)

        return out