import torch

import torch.nn as nn

import torch.nn.functional as F
import numpy as np
from config import Config

class Model(nn.Module):
    def __init__(self,config):
        super(Model,self).__init__()
        self.embedding=nn.Embedding(config.n_vocab,config.embed_size)
        self.lstm=nn.LSTM(config.embed_size,
                          config.hidden_size,
                          config.num_layers,
                          bidirectional=True,
                          batch_first=True)
        self.fc=nn.Linear(2*config.hidden_size,config.num_classes)
        self.softmax=nn.Softmax(dim=1)
    def forward(self,x):
        embed=self.embedding(x)
        out,_=self.lstm(embed)
        out=out.permute(1,0,2)
        out=out[-1]
        out=self.softmax(self.fc(out))
        return out
# cfg=Config()
# model=Model(cfg)
# print(model)