import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
from matplotlib import pyplot as plt

mnist = input_data.read_data_sets('.', one_hot=True)

# plt.figure(figsize=(8, 8))
# for idx in range(16):
#     plt.subplot(4, 4, idx + 1)
#     plt.axis('off')
#     plt.title('[{}]'.format(np.argmax(mnist.train.labels[idx])))
#     plt.imshow(mnist.train.images[idx].reshape((28, 28)))
# plt.show()

learning_rate = tf.placeholder(tf.float32)

x = tf.placeholder(tf.float32, [None, 784], name='x')
W = tf.Variable(tf.truncated_normal([784, 10]), name='weight')
b = tf.Variable(tf.zeros([10]), name='bias')
logits = tf.matmul(x, W) + b

y = tf.placeholder(tf.float32, [None, 10], name='y')

#取平均

cross_entropy = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits(labels=y, logits=logits))

train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy)

pred = tf.nn.softmax(logits)
correct_prediction = tf.equal(tf.arg_max(y,1), tf.arg_max(logits, 1)) #correct_prediction是布尔型的
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))  #强制类型转换，
grap = tf.get_default_graph()
sess = tf.Session()
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
lr = 1.0
for step in range(3000):
    if step > 1000:
        lr = 0.3
    if step > 2000:
        lr = 0.1
    batch_x, batch_y = mnist.train.next_batch(32)
    _, loss = sess.run(
        [train_step, cross_entropy], #前向和反向传播一次，再看一下交叉熵损失
        feed_dict={
            x: batch_x,
            y: batch_y,
            learning_rate: lr
        })

    if (step + 1) % 100 == 0:
        print('#' * 10)
        print('step [{}], entropy loss: [{}]'.format(step + 1, loss))
        print(sess.run(accuracy, feed_dict={x: batch_x, y: batch_y}))
        print(sess.run(accuracy, feed_dict={x: mnist.test.images, y: mnist.test.labels}))

# final_pred, acc = sess.run(
#     [pred, accuracy],
#     feed_dict={
#         x: mnist.test.images[:16],
#         y: mnist.test.labels[:16],
#         learning_rate: 1.0
#         })
# orders = np.argsort(final_pred)
#
# plt.figure(figsize=(8, 8))
# for idx in range(16):
#     order = orders[idx, :][-1]
#     prob = final_pred[idx, :][order]
#     plt.subplot(4, 4, idx + 1)
#     plt.axis('off')
#     plt.title('{}: [{}]-[{:.1f}%]'.format(
#         np.argmax(mnist.test.labels[idx]), order, prob * 100))
#     plt.imshow(mnist.test.images[idx].reshape((28, 28)))
# plt.show()