import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import layers, optimizers, activations, losses, metrics, \
    callbacks, utils
import sys
import os
from python_ai.common.xcommon import *
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
import matplotlib.pyplot as plt

np.random.seed(777)
tf.random.set_seed(777)
filename = os.path.basename(__file__)

ver = 'v4.4'
alpha = 0.001
n_epochs = 800
n_rnn_units = 128
n_steps = 7
training_rate = 0.7

# load
df = pd.read_csv('../teachers/data-02-stock_daily.csv', header=1)
print('df', df.shape)
print(df[:10])
df = df[::-1]
m, n = df.shape

# scale
df = MinMaxScaler(feature_range=(0, 1)).fit_transform(df)  # outcome: numpy ndarray

# process
eps = 1e-8
x = []
y = []
for i in range(m - n_steps):
    x.append(df[i:i + n_steps])
    y.append(df[i + n_steps, -1:])
x = np.array(x)
y = np.array(y) + eps
print('x', x.shape)
print('y', y.shape)

m_train = int(np.ceil(m * training_rate))
m_test = m - m_train
x_train, x_test = np.split(x, [m_train])
y_train, y_test = np.split(y, [m_train])

print('y_train', np.unique(np.isclose(y_train, 0.)))
print('y_test', np.unique(np.isclose(y_test, 0.)))
# sys.exit(0)

model = keras.Sequential([
    layers.LSTM(n_rnn_units, return_sequences=True, unroll=True, input_shape=(n_steps, n)),
    layers.LSTM(n_rnn_units, return_sequences=False, unroll=True),
    layers.Dense(1)
])
model.summary()
model.compile(
    loss=losses.mean_squared_error,
    optimizer=optimizers.Adam(learning_rate=alpha),
    metrics=[metrics.mean_squared_error,
             metrics.mean_absolute_percentage_error]
)

savedir = os.path.join('_save', filename, ver)
savepath = os.path.join(savedir, 'save.tmp.dat')
os.makedirs(savedir, exist_ok=True)
if len(os.listdir(savedir)) != 0:
    model.load_weights(savepath)
    print('LOADED')
else:
    class MyEarlyStopping(callbacks.EarlyStopping):

        def __init__(self, my_monitor_min_val, **kwargs):
            super().__init__(**kwargs)
            self.my_monitor_min_val = my_monitor_min_val

        def on_epoch_end(self, epoch, logs=None):
            current = self.get_monitor_value(logs)
            if current < self.my_monitor_min_val:  # ATTENTION It is less than for mape
                super().on_epoch_end(epoch, logs)


    logdir = os.path.join('_log', filename, ver)
    tb_callback = callbacks.TensorBoard(log_dir=logdir, update_freq='batch', profile_batch=0)
    # early_stopping = MyEarlyStopping(my_monitor_min_val=5,
    #                                  monitor='val_mean_absolute_percentage_error',
    #                                  min_delta=0.1,
    #                                  patience=5,
    #                                  verbose=1,
    #                                  restore_best_weights=True)
    early_stopping = callbacks.EarlyStopping(monitor='val_mean_squared_error',
                                     min_delta=1e-4,
                                     patience=10,
                                     verbose=1,
                                     restore_best_weights=True)
    model.fit(x_train, y_train, batch_size=m_train, epochs=n_epochs,
              callbacks=[tb_callback, early_stopping],
              # callbacks=[tb_callback, ],
              validation_data=(x_test, y_test))
    model.save_weights(savepath)
    print('SAVED')

sep('PREDICTION:')
pred = model.predict(x, verbose=1)
pred_train = model.predict(x_train, verbose=1)
pred_test = model.predict(x_test, verbose=1)

plt.figure(figsize=(12, 12))
spr = 2
spc = 2
spn = 0
# all
spn += 1
plt.subplot(spr, spc, spn)
plt.plot(y.ravel(), 'b-')
plt.plot(pred.ravel(), 'r-')
plt.title('All data')
# train
spn += 1
plt.subplot(spr, spc, spn)
plt.plot(y_train.ravel(), 'b-')
plt.plot(pred_train.ravel(), 'r-')
plt.title('Training data')
# test
spn += 1
plt.subplot(spr, spc, spn)
plt.plot(y_test.ravel(), 'b-')
plt.plot(pred_test.ravel(), 'r-')
plt.title('Testing data')
plt.show()
