import tensorflow.compat.v1 as tf
import tensorflow as tsf
from tensorflow.examples.tutorials.mnist import input_data
import numpy as np
import os
import sys

tf.set_random_seed(777)

path = r'../../../../../large_data/DL1/mnist'
if not os.path.exists(path):
    print('[[[ DATA DIR WRONG ! ]]]', file=sys.stderr)
    os.exit(1)
mnist = input_data.read_data_sets(path, one_hot=False)
n_cls = len(np.unique(mnist.train.labels))
_, n_features = mnist.test.images.shape

alpha = 0.001
n_epoch = 2
batch_size = 100
n_neurons = 128
n_steps = 28
n_input = n_features // n_steps

ph_x = tf.placeholder(tf.float32, [None, n_features], 'ph_x')
ph_y = tf.placeholder(tf.int32, [None], 'ph_y')

input = tf.reshape(ph_x, [-1, n_steps, n_input], name='input')
cell = tf.nn.rnn_cell.BasicLSTMCell(n_neurons)
outputs, states = tf.nn.dynamic_rnn(cell, input, dtype=tf.float32)

w = tf.get_variable('w', [n_neurons, n_cls], initializer=tsf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.random.normal([1, n_cls], dtype=tf.float32, name='b'))
logits = tf.matmul(outputs[:, -1], w) + b

cost = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=ph_y))
train = tf.train.AdamOptimizer(learning_rate=alpha).minimize(cost)
acc = tf.reduce_mean(tf.cast(tf.math.in_top_k(logits, ph_y, 1), dtype=tf.float32))

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for epoch in range(n_epoch):
        cost_avg = 0
        acc_avg = 0
        n_batch = len(mnist.train.labels) // batch_size
        group = n_batch // 20
        for i in range(n_batch):
            x_batch, y_batch = mnist.train.next_batch(batch_size)
            _, costv, accv = sess.run([train, cost, acc], feed_dict={ph_x: x_batch, ph_y: y_batch})
            if i % group == 0:
                print(f'epoch#{epoch+1}, batch#{i + 1}: cost = {costv}, acc = {accv} [ Basic LSTM ]')
            cost_avg += costv
            acc_avg += accv
        cost_avg /= n_batch
        acc_avg /= n_batch
        if i % group != 0:
            print(f'epoch#{epoch + 1}, batch#{i + 1}: cost = {costv}, acc = {accv}')
        print(f'epoch#{epoch + 1}: cost avg = {cost_avg}, acc avg = {acc_avg}')
        print(f'Training acc = {sess.run(acc, feed_dict={ph_x: mnist.train.images, ph_y: mnist.train.labels})}')
        print(f'Testing acc = {sess.run(acc, feed_dict={ph_x: mnist.test.images, ph_y: mnist.test.labels})}')
