from keras.datasets import mnist
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD

(X_train,Y_train),(X_test,Y_test) = mnist.load_data()
60000*28*28
print(X_train.shape)

import matplotlib.pyplot as plt
print(Y_train[0])
plt.imshow(X_train[0],cmap="gray")
plt.show()

X_train = X_train.reshape(60000,784)/255.0
X_test = X_test.reshape(10000,784)/255.0
# 归一化操作

from keras.utils import to_categorical
# 了解下什么叫one-shot编码
Y_train = to_categorical(Y_train,10)
Y_test = to_categorical(Y_test,10)

#创建模型
model = Sequential()
model.add(Dense(units=256,activation='relu', input_dim=784))
model.add(Dense(units=256,activation='relu'))
model.add(Dense(units=256,activation='relu'))
# 输出是一个多分类问题 而不是二分类问题  sigmoid做不了
# softmax多分类 且多个预测出来的数值是百分比 令其为1
model.add(Dense(units=10,activation='softmax'))

#告诉keras使用xxx代价函数和随机梯度下降算法(SGD)
model.compile(loss='categorical_crossen',
            optimizer=SGD(learning_rate=0.05),# Learning rate
            metrics=['accuracy'])
#start training
model.fit(X_train, Y_train, epochs=5000, batch_size=1024)

# 损失 和 准确率 评估
loss,accuracy = model.evaluate(X_test,Y_test)

print(loss)
print(accuracy)

# predict function
# pres = model.predict(X)
# 同时绘制散点图和预测曲线
# plot_utils.show_scatter_surface(X,Y,model)

print(model.getweight())


