import tensorflow
from tensorflow import keras
from sklearn.preprocessing import StandardScaler
import numpy as np
from self_mudle import plot_learning_curves

(train_x, train_y), (test_x, test_y)  = keras.datasets.fashion_mnist.load_data()
valid_x, valid_y = train_x[:5000], train_y[:5000]
train_x, train_y = train_x[5000:], train_y[5000:]

scaler = StandardScaler()
train_x = scaler.fit_transform(train_x.astype(np.float32).reshape(-1,1)).reshape(-1,28,28)
valid_x = scaler.transform(valid_x.astype(np.float32).reshape(-1,1)).reshape(-1,28,28)
test_x = scaler.transform(test_x.astype(np.float32).reshape(-1,1)).reshape(-1,28,28)

model = keras.Sequential()
model.add(keras.layers.Flatten())
for _ in range(20):
    model.add(keras.layers.Dense(100,activation = 'selu'))  
model.add(keras.layers.AlphaDropout(0.5)) 
# dropout一般是出现在网络的最后几层 rate一般是0.5 
# dropout会丢失一些连接 即丢失一些输入数据 导致输入数据分布发生变化
# AlphaDropout可以保证数据分布不发生变化 归一化性质不变 因此方便搭配批归一化使用
model.add(keras.layers.Dense(10,activation='softmax'))

model.build(input_shape=[None,28,28])
model.summary()

model.compile(  
    optimizer = 'sgd',
    loss = 'sparse_categorical_crossentropy',  
    metrics = ['accuracy'] 
)

history = model.fit(train_x,train_y,epochs=3,validation_data=(valid_x,valid_y))

predict = model.evaluate(test_x, test_y)  # 85%

plot_learning_curves.plot_learning_curves(history)