import torch.nn as nn
"""
定义lstm参数含义: (input_size,hidden_size,num_layers)
定义输入张量的参数含义:(sequence_length,batch_size,input_size)
定义隐藏层初始化张量和细胞初始化状态张量参数含义:(num_layers * num_directions,batch_size,hidden_size)
"""
import torch

def dm_lstm():
    # 创建LSTM层
    # 将rnn类换成LSTM类
    lstm = nn.LSTM(input_size = 5,hidden_size=6,num_layers = 2)
    # 创建输入张量
    input = torch.randn(size = (1,3,5))
    # 初始化隐藏状态
    h0 = torch.randn(size = (2,3,6))
    # 初始化细胞状态
    c0 = torch.randn(size= (2,3,6))
    # hn输出两层隐藏状态,最后一个隐藏装天之等于output输出值
    # h和c是以数组形势传播
    output,(hn,cn) = lstm(input,(h0,c0))
    print('output--->',output.shape,output)
    print('hn--->',hn.shape,hn)
    print('cn--->',cn.shape,cn)


def dm01_lstm():
    # 创建LSTM层
    # 将rnn类换成LSTM类
    lstm = nn.LSTM(input_size = 5,hidden_size=6,num_layers = 2,)
    # 创建输入张量
    input = torch.randn(size = (1,3,5))
    # 初始化隐藏状态
    h0 = torch.randn(size = (2,3,6))
    # 初始化细胞状态
    c0 = torch.randn(size= (2,3,6))
    # hn输出两层隐藏状态,最后一个隐藏装天之等于output输出值
    # h和c是以数组形势传播
    output,(hn,cn) = lstm(input,(h0,c0))
    print('output--->',output.shape,output)
    print('hn--->',hn.shape,hn)
    print('cn--->',cn.shape,cn)

if __name__ == '__main__':
    dm_lstm()

