import torch
from torch import nn


class RNN(nn.Module):
    def __init__(self, vocab_size, hidden_size, device=None):
        super().__init__()

        self.device = device if device else torch.device("cuda" if torch.cuda.is_available() else "cpu")

        def norm(in_channels, out_channels):
            return torch.randn((in_channels, out_channels), requires_grad=True, device=self.device)

        self.w_xh = nn.Parameter(norm(vocab_size, hidden_size))
        self.w_hh = nn.Parameter(norm(hidden_size, hidden_size))
        self.b_h = nn.Parameter(torch.randn((hidden_size,), requires_grad=True, device=self.device))

        self.w_hq = nn.Parameter(norm(hidden_size, vocab_size))
        self.b_q = nn.Parameter(torch.randn((vocab_size,), requires_grad=True, device=self.device))

    # output_size 要求输出只有一个词
    def forward(self, x, h):
        output = None
        for item in x:
            h = torch.tanh(item @ self.w_xh + h @ self.w_hh + self.b_h)
            y_hat = h @ self.w_hq + self.b_q
            output = y_hat
        return output, h
