import torch
from torch import nn
from torch.nn import functional as F
import dltools

batchSize = 32
num_steps = 35
train_iter, vocab = dltools.load_data_time_machine(batch_size=batchSize, num_steps=num_steps)

for x, y in train_iter:
    print(f"x.shape:{x.shape}")
    break

num_hiddens = 256
# 构造了一个具有256个隐藏单元的单隐藏层的循环神经网络
# batch_first: If ``True``, then the input and output tensors are provided as `(batch, seq, feature)` instead of `(seq, batch, feature)`.
rnn_layer = nn.RNN(len(vocab), hidden_size=num_hiddens, batch_first=True)
# 初始化隐藏状态 数字1是因为是单隐藏层
state = torch.zeros((1, batchSize, num_hiddens))
X = torch.rand(size=(batchSize, num_steps, len(vocab)))
H, state_new = rnn_layer(X, state)
#print(f"H.shape:{H.shape}")
#print(f"state_new.shape:{state_new.shape}")

# 构建一个完整的循环神经网络
class RNNModel(nn.Module) :
    def __init__(self, rnn_layer, vocab_size, **kwargs):
        super().__init__(**kwargs)
        self.rnn = rnn_layer
        self.vocab_size = vocab_size
        self.hidden_size = self.rnn.hidden_size

        if not self.rnn.bidirectional:
            self.num_directions = 1
            self.linear = nn.Linear(self.hidden_size, self.vocab_size)
        else:
            self.num_directions = 2
            self.linear = nn.Linear(self.hidden_size * 2, self.vocab_size)

    def forward(self, inputs, state):
        X = F.one_hot(inputs.long(), self.vocab_size)
        X = X.to(torch.float32)
        H, state = self.rnn(X, state)
        print(f"H.shape：{H.shape}")
        print(f"state.shape:{state.shape}")
#        print(f"H.shape：{H.shape}")
        H = H.reshape(-1, H.shape[-1])
#        print(f"H.shape：{H.shape}")
        output = self.linear(H)
        return output, state

    #初始化隐藏状态
    def begin_state(self, device, batch_size=1):
        return torch.zeros((self.num_directions * self.rnn.num_layers, batch_size, self.hidden_size), device=device)

device = dltools.try_gpu()
net = RNNModel(rnn_layer, vocab_size=len(vocab))
net = net.to(device)
print(dltools.predict_ch8("time traveller", 10, net, vocab, device))

# num_epochs, lr=500, 0.8
# dltools.train_ch8(net, train_iter, vocab, lr, num_epochs, device)
# num_epochs, lr=500, 0.5
# dltools.train_ch8(net, train_iter, vocab, lr, num_epochs, device)