import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import layers, optimizers, activations, losses, metrics, \
    callbacks, utils
import sys
import os
from python_ai.common.xcommon import *

np.random.seed(777)
tf.random.set_seed(777)
filename = os.path.basename(__file__)

ver = 'v1.0'
alpha = 0.001
n_epochs = 40
batch_size = 64
n_rnn_units = 128

sample = 'hihello'

dict = set(list(sample))
len_dict = len(dict)

idx2char = list(dict)
char2idx = {ch: i for i, ch in enumerate(idx2char)}
print(idx2char)
print(char2idx)

x_str = sample[:-1]
y_str = sample[1:]

n_steps = len(x_str)

x_idx = [char2idx[ch] for ch in x_str]
y_idx = [char2idx[ch] for ch in y_str]

x = utils.to_categorical(x_idx, len_dict)
print(type(x))
x = tf.reshape(x, [-1, n_steps, len_dict])
print('x', tf.shape(x).numpy())  # [1 6 5]
y = utils.to_categorical(y_idx, len_dict)
y = tf.reshape(y, [-1, n_steps, len_dict])
print('y', tf.shape(y).numpy())  # [1 6 5]

model = keras.Sequential([
    layers.LSTM(n_rnn_units, input_shape=(n_steps, len_dict), return_sequences=True),  # ATTENTION
    layers.TimeDistributed(layers.Dense(len_dict)),  # ATTENTION
    layers.Activation(activation=activations.softmax)  # ATTENTION
])
model.summary()

model.compile(
    optimizer=optimizers.RMSprop(learning_rate=alpha),
    loss=losses.categorical_crossentropy,
    metrics=[metrics.categorical_accuracy]
)


class MyEarlyStopping(callbacks.EarlyStopping):

    def __init__(self, my_monitor_min_val, **kwargs):
        super().__init__(**kwargs)
        self.my_monitor_min_val = my_monitor_min_val

    def on_epoch_end(self, epoch, logs=None):
        current = self.get_monitor_value(logs)
        if current > self.my_monitor_min_val:
            super().on_epoch_end(epoch, logs)


logdir = os.path.join('_log', filename, ver)
tb_callback = callbacks.TensorBoard(log_dir=logdir, update_freq='batch', profile_batch=0)
early_stopping = MyEarlyStopping(my_monitor_min_val=0.75,
                                 monitor='categorical_accuracy',
                                 min_delta=1e-3,
                                 patience=40,
                                 verbose=1,
                                 restore_best_weights=True)
model.fit(x, y, epochs=800,
          callbacks=[tb_callback, early_stopping])

sep('PREDICTION:')
pred = model.predict(x, verbose=1)
for i, pr in enumerate(pred):
    pr_idx = np.argmax(pr, axis=1)
    pr_str = ''.join([idx2char[j] for j in pr_idx])
    print(f'#{i}: {pr_str}')
