from torch import nn

import torch

# input size 10 hidden size 20 num layer 2
rnn = nn.LSTM(10, 20, 2, batch_first=True)

batch_size = 5
seq_len = 3
word_dimension = 10
num_layer = 2

input = torch.randn(5, 3, 10)

output, (h_n, c_n) = rnn(input)
# 最后一层 所有batch ,所有单词 的h  [batch_size,seq_len,hidden_size]
print(output.shape)  # torch.Size([5, 3, 20])
# 所有layer 所有batch 最后一个单词的h
print(h_n.shape)  # num_layer,batch_size,hidden_size
# 所有layer 所有batch 最后一个单词的c
print(c_n.shape)  # num_layer,batch_size,hidden_size

h_0 = torch.zeros([2, 5, 20])
c_0 = torch.zeros([2, 5, 20])
output, (h_n, c_n) = rnn(input, (h_0, c_0))

# 双向

# input size 10 hidden size 20 num layer 2
rnn = nn.LSTM(10, 20, 2,bidirectional=True, batch_first=True)

batch_size = 5
seq_len = 3
word_dimension = 10
num_layer = 2

input = torch.randn(5, 3, 10)

output, (h_n, c_n) = rnn(input)
# 最后一层 所有batch ,所有单词 的h  [batch_size,seq_len,hidden_size*2]
print(output.shape)  # torch.Size([5, 3, 40])
# 所有layer 所有batch 最后一个单词的h
print(h_n.shape)  # 2*num_layer,batch_size,hidden_size
# 所有layer 所有batch 最后一个单词的c
print(c_n.shape)  # 2*num_layer,batch_size,hidden_size

h_0 = torch.zeros([4, 5, 20])
c_0 = torch.zeros([4, 5, 20])
output, (h_n, c_n) = rnn(input, (h_0, c_0))