import torch as pt
import numpy as np

pt.manual_seed(1)
np.random.seed(1)

sentence = 'hihello'

x_str_i = sentence[:-1]
y_str_i = sentence[1:]
print(x_str_i)
print(y_str_i)

dict = set(list(x_str_i)) | set(list(y_str_i))
dict_len = len(dict)

idx2char = list(dict)
char2idx = {ch: i for i, ch in enumerate(idx2char)}

x_idx_i = [char2idx[ch] for ch in x_str_i]
y_idx_i = [char2idx[ch] for ch in y_str_i]

x_idx = [x_idx_i, x_idx_i, x_idx_i]
y_idx = [y_idx_i, y_idx_i, y_idx_i]
x_idx = np.int32(x_idx)
print(x_idx)
print(y_idx)

x_oh = np.eye(dict_len, dtype=np.int32)[x_idx]
print(x_oh)


class MyLstmNet(pt.nn.Module):

    def __init__(self, n_input, n_hidden, n_layers, **kwargs):
        super().__init__(**kwargs)
        self.n_hidden = n_hidden
        self.lstm = pt.nn.LSTM(n_input, n_hidden, n_layers, batch_first=True)
        self.fc = pt.nn.Linear(n_hidden, n_input)

    def forward(self, inputs):
        x, _ = self.lstm(inputs)  # (M, steps, hidden)
        x = x.reshape((-1, self.n_hidden))  # (M*steps, hidden)
        x = self.fc(x)  # (M*steps, input) => y (M*steps, ) for sparse
        return x


n_input = dict_len
n_hidden = 15
n_layers = 2
n_steps = len(x_idx[0])

model = MyLstmNet(n_input, n_hidden, n_layers)

criterion = pt.nn.CrossEntropyLoss()  # sparse + no need softmax activation

optim = pt.optim.Adam(model.parameters(), lr=0.01)


def accuracy(y_true, y_pred):
    y_pred = y_pred.argmax(dim=1)
    acc = pt.eq(y_true, y_pred).float().mean()
    return acc


X = pt.autograd.Variable(pt.Tensor(x_oh))
y = pt.autograd.Variable(pt.tensor(y_idx))
y = y.reshape(-1)

ITERS = 50
for i in range(ITERS):
    model.train(True)
    optim.zero_grad()
    h = model(X)
    loss = criterion(h, y)
    loss.backward()
    optim.step()
    model.train(False)
    acc = accuracy(y, h)

    h_rows = h.detach().numpy().argmax(axis=1).reshape(-1, n_steps)
    result = [''.join([idx2char[i] for i in row]) for row in h_rows]

    print(f'#{i + 1}: cost = {loss.detach().item()}, acc = {acc.detach().item()}, result = {result}')

