import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib.pyplot as plt
from tensorflow_core import initializers

from tensorflow_core.python.keras import regularizers

# 导入Mnist数据集
(train_images, train_labels), (test_images, test_labels) = keras.datasets.mnist.load_data()

# 1.打印出训练集和测试集的形状（4分）
print("train_images.shape", train_images.shape)
print("train_labels.shape", train_labels.shape)
print("train_labels[0]", train_labels[0])
print("test_images.shape", test_images.shape)
print("test_labels.shape", test_labels.shape)

# 2.对图片进行归一化处理（像素值缩放到0-1之间）（2分）
train_images = train_images / 255
test_images = test_images / 255

# 3.可视化训练集前16张图片，并在图像下方显示类别（5分）
plt.figure(figsize=(10, 10))
for i in range(16):
    plt.subplot(4, 4, i + 1)
    plt.xticks([])
    plt.yticks([])
    plt.imshow(train_images[i])
    plt.xlabel(train_labels[i])
plt.tight_layout()  # 优化布局
plt.show()

# 4.创建一个简单的神经网络训练MNIST数据集（网络层数自己设置）（8分）
model = keras.Sequential([
    keras.layers.Flatten(input_shape=(28, 28)),
    keras.layers.Dense(256, activation="relu",
                       # kernel_regularizer=regularizers.l2(0.01),
                       kernel_initializer=initializers.TruncatedNormal(mean=0.0, stddev=0.001,seed=None)),
    keras.layers.Dense(10)
])
# 编译模型
model.compile(optimizer="adam",
              loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
              metrics=["accuracy"])
# 训练模型
model.fit(train_images, train_labels, epochs=10)

# 5.打印输出测试集的准确率，测试集准确率达到98%左右（8分）
test_loss, test_accuracy = model.evaluate(test_images, test_labels, verbose=1)
print("\nTest Accuracy", test_accuracy)

# 6.绘制一个测试图像以及他们的预测标签和真实标签，预测正确用蓝色表示，预测错误用红色表示（14分）
# 预测值
# predict_label = model.predict(test_images[0])
# print("predict_label[0]", predict_label)
# print("test_labels[0]", test_labels[0])

# 预测概率
probability_model = tf.keras.Sequential([model, tf.keras.layers.Softmax()])
predictions = probability_model.predict(test_images)

print("predictions[0]", predictions[0])


# 以图形的方式查看完整的10个类别的预测
def plot_image(i, predictions_array, true_label, img):
    predictions_array, true_label, img = predictions_array, true_label[i], img[i]
    plt.xticks([])
    plt.yticks([])
    plt.imshow(img)

    predictions_label = np.argmax(predictions_array)
    if predictions_label == true_label:
        color = "blue"
    else:
        color = "red"

    plt.xlabel("{} {:2.0f}% ({})".format(predictions_label,
                                         100 * np.max(predictions_array),
                                         true_label),
               color=color)


# 绘制概率统计的条形图
def plot_value_array(i, predictions_array, true_label):
    predictions_array, true_label = predictions_array, true_label[i]
    plt.xticks(range(10))
    plt.yticks([])
    # 添加一个条形图，横轴是10列，纵轴表示预测的概率【0到1之间】
    thisplot = plt.bar(range(10), predictions_array, color="#777777")
    plt.ylim([0, 1])  # 设置y轴的范围
    # 预测的label的index
    predict_label = np.argmax(predictions_array)
    # 红色表示预测的结果，蓝色表示真实的结果，如果没有出现红色，表示预测正确
    thisplot[predict_label].set_color("red")
    thisplot[true_label].set_color("blue")


# 预测第一张测试图片并显示
i = 0
plt.figure(figsize=(6, 3))
plt.subplot(1, 2, 1)
plot_image(i, predictions[i], test_labels, test_images)
plt.subplot(1, 2, 2)
plot_value_array(i, predictions[i], test_labels)
plt.show()

# 预测一组测试图片并显示
num_rows = 5
num_cols = 3
num_images = num_rows * num_cols
plt.figure(figsize=(2 * 2 * num_cols, 2 * num_rows))
for i in range(num_images):
    plt.subplot(num_rows, 2 * num_cols, 2 * i + 1)
    plot_image(i, predictions[i], test_labels, test_images)
    plt.subplot(num_rows, 2 * num_cols, 2 * i + 2)
    plot_value_array(i, predictions[i], test_labels)
# tight_layout会自动调整子图参数，使之填充整个图像区域。
plt.tight_layout()
plt.show()


# 7.用训练好的模型预测测试集中任意一张图片，输出预测的标签与图像（5分）
img = test_images[1]
img = (np.expand_dims(img, 0))
predictions_single = probability_model.predict(img)
plot_value_array(1, predictions_single[0], test_labels)
labels = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
_ = plt.xticks(range(10), labels, rotation=45)
plt.show()
print(np.argmax(predictions_single))