import torch
import torch.nn as nn


# class RNN(nn.Module):
#     def __init__(self, input_size, hidden_size):
#         super(RNN, self).__init__()
#         self.input_size = input_size
#         self.hidden_size = hidden_size
#         self.GRU = nn.GRU(input_size=input_size, hidden_size=hidden_size, batch_first=True)
#
#     def forward(self, input, hidden):
#         output, hidden = self.GRU(input, hidden)
#         return output, hidden
#
#     def initHidden(self):
#         return torch.zeros(1, 1, self.hidden_size)
#
# x = torch.tensor([[1,2,1]],dtype=torch.long)# 一个句子，3个词，处理成dataloader
# # print(f'x-->{x}')
# embedding = nn.Embedding(num_embeddings=3, embedding_dim=4)
# # print(f'embedding.weight-->{embedding.weight}')
# output = embedding(x)
# # print(f'output-->{output}')
# print(f'output.shape-->{output.shape}') # torch.Size([1, 3, 4]) batch, seq, feature
#
#
# GRU = RNN(input_size=4, hidden_size=8)
# hidden = GRU.initHidden()
# output, hidden = GRU(input=output, hidden=hidden)
# print(f'output.shape-->{output.shape}')
# print(f'hidden.shape-->{hidden.shape}')

a = torch.randn(1,1,3)
print(a)
b = torch.randn(1,3,4)
print(b)
c = a@b
print(c)



