import torch
from torch import nn


class RNN(nn.Module):
    def __init__(self, vocab_size, hidden_size, num_layers=1, bidirectional=False, device=None):
        super().__init__()
        self.device = device if device else torch.device("cuda" if torch.cuda.is_available() else "cpu")
        self.vocab_size = vocab_size
        self.hidden_size = hidden_size
        self.num_layers = num_layers
        self.bidirectional = bidirectional

        # 从左往右的隐藏状态层
        self.rnn_layers_forward = nn.ModuleList([
            nn.RNNCell(vocab_size, hidden_size, device=device) if i == 0 else
            nn.RNNCell(hidden_size, hidden_size, device=device) for i in range(num_layers)
        ])

        if bidirectional:
            # 从右往左的隐藏状态层
            self.rnn_layers_backward = nn.ModuleList([
                nn.RNNCell(vocab_size, hidden_size, device=device) if i == 0 else
                nn.RNNCell(hidden_size, hidden_size, device=device) for i in range(num_layers)
            ])

        self.fc = nn.Linear(2 * hidden_size, vocab_size, device=device) if bidirectional \
            else nn.Linear(hidden_size, vocab_size, device=device)

    def forward(self, x, h):
        # 判断输入h的合法性
        assert h0.shape[0] == 2 * self.num_layers if self.bidirectional else self.num_layers
        outputs = []
        batch_size = x.shape[1]
        if self.bidirectional:
            h = h.reshape(2, self.num_layers, batch_size, self.hidden_size)
            item1 = None
            item2 = None
            for item1 in x:
                for i, rnn in enumerate(self.rnn_layers_forward):
                    item1 = rnn(item1, h[0][i])
                    h[0][i] = item1
            for item2 in torch.flip(x, dims=[0]):
                for i, rnn in enumerate(self.rnn_layers_backward):
                    item2 = rnn(item2, h[1][i])
                    h[1][i] = item2
            # 针对前向RNN和反向RNN叠加，并放入FC层
            item = self.fc(torch.cat([item1, item2], dim=-1))
            outputs.append(item)
        else:
            for item in x:  # 遍历 三个维度的x的值 (seq_len,batch_size,vocab_size)
                for i, rnn in enumerate(self.rnn_layers_forward):  # 遍历所有RNN_Cell
                    item = rnn(item, h[i])  # 将垂直方向的隐藏状态进行轮询
                    h[i] = item  # 最后一层的RNN_Cell 的隐藏状态
                item = self.fc(item)  # 最后一层的RNN_Cell 的隐藏状态 送入全连接得到 预测值
                outputs.append(item)

        return torch.stack(outputs, dim=0), h


if __name__ == '__main__':
    inputs = torch.randn(5, 3, 10)
    model = RNN(10, 20, 2, bidirectional=True, device=torch.device("cpu"))
    h0 = torch.zeros((4, 3, 20))
    outputs, _ = model(inputs, h0)
    print(outputs.shape)
