#!/usr/bin/env python
# coding: utf-8

# In[47]:


import torch
import torch.nn as nn
import torch.nn.functional as F

class LSTM(nn.Module):
    def __init__(self,input_size, hidden_size, layers_n, seq_len, Batch_first = True):
        super(LSTM,self).__init__()
        self.lstm = nn.LSTM(
        input_size=input_size,   #不要忘记加逗号
        hidden_size = hidden_size,
        num_layers = layers_n,
        batch_first = True,
        #bidirectional = True
        #dropout = 0.2,
        )
        
        self.fc1 = nn.Linear(seq_len,20)

    def forward(self,x):
        output,(h_n,c_n) = self.lstm(x)    
        dim0,dim1,dim2 = output.shape
        #print(output.shape)
        x = F.relu(output.reshape(dim0,dim2,dim1))
        #print(x.shape)
        x = self.fc1(x)
        #print(x.shape)
        return x.reshape(dim0,-1,dim2)

net = LSTM(25,50,2,120)

#batch-size,seqlen,input_size
input_data = torch.rand([100,120,25])
net(input_data).shape