import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, LSTM, TimeDistributed, Activation
from tensorflow.keras import utils

sample = "hihello"
# x: hihell
# y: ihello

char_set = list(set(sample))
char2idx = {w: i for i, w in enumerate(char_set)}
print("char2idx:", char2idx)

x_char = sample[:-1]
y_char = sample[1:]
print(x_char, y_char)

nb_inputs = len(char_set)
nb_outputs = len(char_set)
nb_neurons = 15
time_steps = len(x_char)

x_data = [char2idx[c] for c in x_char]
y_data = [char2idx[c] for c in y_char]

# One-hot encoding
x_data = utils.to_categorical(x_data, len(char_set))
y_data = utils.to_categorical(y_data, len(char_set))

x_data = np.reshape(x_data, [-1, len(x_data), nb_inputs])
y_data = np.reshape(y_data, [-1, len(y_data), nb_outputs])

model = Sequential([
    LSTM(nb_neurons, input_shape=(time_steps, nb_inputs), return_sequences=True),
    # Dense(nb_neurons, input_shape=(time_steps, nb_inputs)),
    TimeDistributed(Dense(nb_outputs)),
    Activation('softmax')])

model.summary()

model.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])

model.fit(x_data, y_data, epochs=500)

predictions = model.predict(x_data)

for pre in predictions:
    index = np.argmax(pre, 1)
    result = [char_set[j] for j in index]
    print("predictions:", index, '->', ''.join(result))
