import os
import tensorflow as tf
import tensorflow.examples.tutorials.mnist as mnist

data_set_path = os.path.join('..', 'DATASET', 'MNIST', 'gz')
mnist_data = mnist.input_data.read_data_sets(data_set_path, one_hot=True)

print(type(mnist_data))

time_steps = 28
num_units = 128
n_input = 28
learning_rate = 0.001
n_classes=10
batch_size=128

out_weights = tf.Variable(tf.random_normal([num_units, n_classes]))
out_bias = tf.Variable(tf.random_normal([n_classes]))

x = tf.placeholder(tf.float32, [None, time_steps, n_input, 1], name='x')
y = tf.placeholder(tf.float32, [None, n_classes])

input = tf.reshape(x, [tf.shape(x)[0], time_steps, n_input])

fw_cell = tf.nn.rnn_cell.LSTMCell(num_units, forget_bias=1.0)
outputs, _ = tf.nn.dynamic_rnn(fw_cell, input, dtype=tf.float32)

outputs = tf.slice(outputs, begin=[0, tf.shape(outputs)[1]-1, 0], size=[tf.shape(outputs)[0], 1, 128])
outputs = tf.reshape(outputs, [tf.shape(outputs)[0], -1])

outputs = tf.matmul(outputs, out_weights)
prediction = tf.nn.bias_add(outputs, out_bias, name='logits')

#loss_function
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y))
#optimization
opt = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(loss)
saver = tf.train.Saver(max_to_keep=4)

#model evaluation
correct_prediction = tf.equal(tf.argmax(prediction, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

#initialize variables
init = tf.global_variables_initializer()
with tf.Session() as sess:
    sess.run(init)
    iter = 1
    while iter < 1800:
        batch_x, batch_y = mnist_data.train.next_batch(batch_size=batch_size)
        #print(batch_x.shape, batch_y.shape)
        batch_x = batch_x.reshape((batch_size, time_steps, n_input, 1))

        sess.run(opt, feed_dict={x: batch_x, y: batch_y})

        if iter %10 == 0:
            acc = sess.run(accuracy, feed_dict={x:batch_x,y:batch_y})
            los = sess.run(loss, feed_dict={x:batch_x,y:batch_y})
            print("For iter ",iter)
            print("Accuracy ",acc)
            print("Loss ",los)
            print("__________________")

        if iter % 100 == 0:
            saver.save(sess, os.path.join('.', 'lstm_mnist'), global_step=iter)
            tf.train.write_graph(sess.graph_def, '.', 'lstm_mnist_dynamic.pbtxt')

        iter = iter + 1
        