from tensorflow.keras import Input, Model
from tensorflow.keras.layers import Lambda, Conv2D, Activation, MaxPooling2D, Flatten, Dense, Dropout, \
    BatchNormalization
import tensorflow as tf
from tensorflow.keras.optimizers import Adam


def network_cnn_normalization(input_shape, num_classes):
    """
    卷积神经网络(进行了归一化的)

    :param input_shape: 输入的图片shape
    :param num_classes: 最大的分类数目
    :return: model
    """
    img_input = Input(shape=input_shape, name='data')
    x = Conv2D(16, (5, 5), strides=(1, 1), padding='same', name='conv1')(img_input)
    x = Activation('relu', name='conv1_relu')(x)
    x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='pool1')(x)
    x = Conv2D(32, (5, 5), strides=(1, 1), padding='same', name='conv2')(x)
    x = Activation('relu', name='conv2_relu')(x)
    x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='pool2')(x)
    x = Conv2D(64, (5, 5), strides=(1, 1), padding='same', name='conv3')(x)
    x = Activation('relu', name='conv3_relu')(x)
    x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='pool3')(x)
    x = Conv2D(128, (5, 5), strides=(1, 1), padding='same', name='conv4')(x)
    x = Activation('relu', name='conv4_relu')(x)
    x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='pool4')(x)
    x = Flatten()(x)
    x = BatchNormalization()(x)
    x = Dense(1024, name='fcl1')(x)
    x = BatchNormalization()(x)
    x = Activation('relu')(x)
    x = Dropout(0.2)(x)
    x = Dense(256, name='fcl2')(x)
    x = BatchNormalization()(x)
    x = Activation('relu')(x)
    x = Dropout(0.2)(x)
    out = Dense(num_classes, activation='softmax', name='predictions')(x)
    rez = Model(inputs=img_input, outputs=out)
    return rez


if __name__ == '__main__':
    mnist = tf.keras.datasets.mnist

    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    x_train, x_test = x_train / 255.0, x_test / 255.0
    x_train = tf.expand_dims(x_train, axis=-1)
    x_test = tf.expand_dims(x_test, axis=-1)
    print(x_train.shape)
    print(x_test.shape)
    model = network_cnn_normalization(input_shape=(28, 28, 1), num_classes=10)
    optimizer = Adam(learning_rate=1e-5)
    model.compile(optimizer=optimizer, loss='sparse_categorical_crossentropy', metrics=['acc'])
    model.fit(x_train, y_train, epochs=5, batch_size=40)
    print(model.evaluate(x_test, y_test))
