# -*- coding: utf-8 -*-

import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dense, Activation,Convolution2D,MaxPooling2D, Flatten
from tensorflow.keras.optimizers import RMSprop,Adam

# download the mnist to the path '~/.keras/dattasets/' if it is the first time to be called
# X shape (60,000 28x28), y shape (60,000,)
(X_train, y_train), (X_test, y_test) = mnist.load_data()

print('X_train.shape=',X_train.shape) # X_train.shape= (60000, 28, 28)
print('y_train.shape=',y_train.shape) # y_train.shape= (60000,)


# data pre-processing
X_train = X_train.reshape(-1,
                           1, # channel 黑白照片，只有1层
                           28,28) # image size
X_test = X_test.reshape(-1,1,28,28)
y_train = np_utils.to_categorical(y_train, num_classes = 10)
y_test = np_utils.to_categorical(y_test, num_classes = 10)

print('X_train.shape=',X_train.shape) # X_train.shape= (60000, 1, 28, 28)
print('y_train.shape=',y_train.shape) # y_train.shape= (60000, 10)

# Another way to build your Convolution neural network
model = Sequential()

# Conv layer 1 output shape (32,28,28)
model.add(Convolution2D(
            filters=32,
            kernel_size=5,
            padding='same',  # padding method
            input_shape=(1,  # channel
                         28,28) # height & width
        ))
            
model.add(Activation('relu'))

# Pooling layer 1 (max pooling) output shape (32, 14, 14)
model.add(MaxPooling2D(
            pool_size=(2, 2),
            strides=(2, 2),
            padding='same', # padding method
        ))

# Conv layer 2 output shape (64, 14, 14 )
model.add(Convolution2D(
            filters=64,
            kernel_size=5,
            padding='same', # padding method
        ))
model.add(Activation('relu'))

# Pooling layer 2 (max pooling) output shape (64, 7, 7)
model.add(MaxPooling2D(
            pool_size=(2, 2),
            strides=(2, 2),
            padding='same', # padding method
        ))

# Fully connected layer 1 input shape (64 * 7 * 7) = (3136), output shape (1024)
model.add(Flatten())
model.add(Dense(1024))
model.add(Activation('relu'))

# Fully connected layer 2 to shape (10) for 10 classes
model.add(Dense(10))
model.add(Activation('softmax'))

# Anther way to define your optimizer
adam = Adam(learning_rate = 1e-4)
print(model.summary())  # 打印模型  
# We add metrics to get more results you want to see
model.compile(
        optimizer = adam,
        loss='categorical_crossentropy',
        metrics=['accuracy']
    )

print('Training ----------')
# Another way to train the model
model.fit(X_train, y_train, epochs=1, batch_size=32)

print('\nTesting ----------')
# Evaluate the model with the metrics we defined
loss, accuracy = model.evaluate(X_test, y_test)

print('test loss: ', loss) # test loss:  0.10677983611822128
print('test accruracy: ', accuracy) # test accruracy:  0.9671000242233276