#!/usr/bin/python
#coding=utf-8

import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
# Download and extract the MNIST data set.
# Retrieve the labels as one-hot-encoded vectors.
mnist = input_data.read_data_sets("D:/deep/data/mnist", one_hot=True)

def variable_summaries(var):
    """Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
    with tf.name_scope('summaries'):
        mean = tf.reduce_mean(var)
        tf.summary.scalar('mean', mean)
        with tf.name_scope('stddev'):
            stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
        tf.summary.scalar('stddev', stddev)
        tf.summary.scalar('max', tf.reduce_max(var))
        tf.summary.scalar('min', tf.reduce_min(var))
        tf.summary.histogram('histogram', var)
       
graph = tf.Graph()
# Set our graph as the one to add nodes to
with graph.as_default():
    # Placeholder for input examples (None = variable dimension)
    examples = tf.placeholder(shape=[None, 784], dtype=tf.float32)
    # Placeholder for labels
    labels = tf.placeholder(shape=[None, 10], dtype=tf.float32)
    
    with tf.name_scope('weights'):
        weights = tf.Variable(tf.truncated_normal(shape=[784, 10], stddev=0.1))
        variable_summaries(weights)
        
    with tf.name_scope('biases'):
        bias = tf.Variable(tf.constant(0.05, shape=[10]))
        variable_summaries(bias)
    
    # Apply an affine transformation to the input features
    logits = tf.matmul(examples, weights) + bias
    estimates = tf.nn.softmax(logits)
    # Compute the cross-entropy
    cross_entropy = -tf.reduce_sum(labels * tf.log(estimates), reduction_indices=[1])
    # And finally the loss
    loss = tf.reduce_mean(cross_entropy)
    # Create a gradient-descent train_step that minimizes the loss.
    # We choose a learning rate of 0.01
    train_step = tf.train.GradientDescentOptimizer(0.05).minimize(loss)
    
    
    # Find the indices where the predictions were correct
    correct_predictions = tf.equal(tf.argmax(estimates, dimension=1), tf.argmax(labels, dimension=1))
    accuracy = tf.reduce_mean(tf.cast(correct_predictions, tf.float32))
    
    tf.summary.scalar('loss',loss)
    tf.summary.scalar('accuracy',accuracy)
    #tf.summary.scalar('cross_entropy', cross_entropy)


def feed_dict(train):
    """Make a TensorFlow feed_dict: maps data onto Tensor placeholders."""
    if train:
        xs, ys = mnist.train.next_batch(100)
        k = 0.9
    else:
        xs, ys = mnist.test.images, mnist.test.labels
        k = 1.0
    return {examples: xs, labels: ys}
    

log_dir = 'D:/tmp/mnist_logs'
if tf.gfile.Exists(log_dir):
    tf.gfile.DeleteRecursively(log_dir)
tf.gfile.MakeDirs(log_dir)


with tf.Session(graph=graph) as sess:
    sess.run(tf.global_variables_initializer())
    
    merged = tf.summary.merge_all()
    train_writer = tf.summary.FileWriter(log_dir + '/train', sess.graph)
    test_writer = tf.summary.FileWriter(log_dir + '/test')
    
    saver = tf.train.Saver()
    #saver = tf.train.import_meta_graph('D:/deep/model/mnist-100.meta')
    #graph = tf.get_default_graph()
    #saver.restore(sess, 'D:/deep/model/mnist-100.data-00000-of-00001')
    #print(sess.run(global_step_tensor))
    
    for i in range(301):
        if i % 10 == 0:
            summary, _, loss_value, accuracy_value = sess.run([merged, train_step, loss, accuracy], feed_dict=feed_dict(False))
            test_writer.add_summary(summary, i)
            
            print("Loss at time {0}: {1}".format(i, loss_value))
            print("Accuracy at time {0}:{1}".format(i, accuracy_value))
            print('\n')
        else:
            if i % 100 == 99:
                run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
                run_metadata = tf.RunMetadata()
                summary, _ = sess.run([merged, train_step], feed_dict=feed_dict(True), options=run_options, run_metadata=run_metadata)
                
                train_writer.add_run_metadata(run_metadata, 'step%03d' % i)
                train_writer.add_summary(summary, i)
                print('Adding run metadata for', i)
            else:
                #train_step.run(feed_dict)
                sess.run([merged, train_step], feed_dict=feed_dict(True))
                train_writer.add_summary(summary, i)
            
        if i % 100 == 0:
            save_path = saver.save(sess, 'D:/deep/model/mnist', global_step=i)
            print("Model saved in file: %s" % save_path)

    save_path = saver.save(sess, 'D:/deep/model/mnist')
    print("Model saved in file: %s" % save_path)
    
    train_writer.close()
    test_writer.close()


