import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import layers, optimizers, activations, losses, metrics, \
    callbacks, utils
import sys
import os
from python_ai.common.xcommon import *

np.random.seed(777)
tf.random.set_seed(777)
filename = os.path.basename(__file__)

ver = 'v1.0'
alpha = 0.001
n_epochs = 40
batch_size = 64
n_rnn_units = 128
n_steps = 10

sentence = ("if you want to build a ship, don't drum up people together to "
            "collect wood and don't assign them tasks and work, but rather "
            "teach them to long for the endless immensity of the sea.")

char_dict = set(list(sentence))
idx2char = list(char_dict)
len_dict = len(idx2char)
print('len_dict', len_dict)
char2idx = {ch: i for i, ch in enumerate(idx2char)}
print(idx2char)
print(char2idx)

x_idx = []
y_idx = []
for i in range(len(sentence) - n_steps):
    x_str_i = sentence[i: i + n_steps]
    y_str_i = sentence[i + 1: i + 1 + n_steps]
    x_idx_i = [char2idx[ch] for ch in x_str_i]
    y_idx_i = [char2idx[ch] for ch in y_str_i]
    x_idx.append(x_idx_i)
    y_idx.append(y_idx_i)

x = utils.to_categorical(x_idx, len_dict)
x = x.reshape((-1, n_steps, len_dict))
print('x', x.shape)

y = utils.to_categorical(y_idx, len_dict)
y = y.reshape((-1, n_steps, len_dict))
print('y', y.shape)

model = keras.Sequential([
    layers.LSTM(n_rnn_units, return_sequences=True, unroll=True),
    layers.TimeDistributed(layers.Dense(len_dict))
])
model.build((None, n_steps, len_dict))
model.summary()
model.compile(
    loss=losses.CategoricalCrossentropy(from_logits=True),
    optimizer=optimizers.Adam(learning_rate=alpha),
    metrics=[metrics.categorical_accuracy]
)


class MyEarlyStopping(callbacks.EarlyStopping):

    def __init__(self, my_monitor_min_val, **kwargs):
        super().__init__(**kwargs)
        self.my_monitor_min_val = my_monitor_min_val

    def on_epoch_end(self, epoch, logs=None):
        current = self.get_monitor_value(logs)
        if current > self.my_monitor_min_val:
            super().on_epoch_end(epoch, logs)


logdir = os.path.join('_log', filename, ver)
tb_callback = callbacks.TensorBoard(log_dir=logdir, update_freq='batch', profile_batch=0)
early_stopping = MyEarlyStopping(my_monitor_min_val=0.75,
                                 monitor='categorical_accuracy',
                                 min_delta=1e-3,
                                 patience=20,
                                 verbose=1,
                                 restore_best_weights=True)
model.fit(x, y, epochs=800,
          callbacks=[tb_callback, early_stopping])

sep('PREDICTION:')
pred = model.predict(x, verbose=1)
for i, pr in enumerate(pred):
    pr_idx = np.argmax(pr, axis=1)
    pr_str = ''.join([idx2char[j] for j in pr_idx])
    y_idx_i = y_idx[i]
    y_str_i = ''.join([idx2char[j] for j in y_idx_i])
    print(f'#{i}: |{y_str_i}| => |{pr_str}|')
