import numpy as np
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.layers import  Conv2D, MaxPooling2D, Flatten
from keras.optimizers import SGD, Adam
from keras.utils import np_utils
from keras.datasets import mnist


def load_data():
    (x_train, y_train), (x_test, y_test) = mnist.load_data('./mnist.npz')
    number = 10000
    x_train = x_train[0:number]
    y_train = y_train[0:number]
    x_train = x_train.reshape(number, 28*28)
    x_test = x_test.reshape(x_test.shape[0], 28*28)
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    y_train = np_utils.to_categorical(y_train, 10)
    # y = tf.one_hot(y, depth=10)
    y_test = np_utils.to_categorical(y_test, 10)

    x_train = x_train / 255
    x_test = x_test / 255
    return (x_train, y_train), (x_test, y_test)



if __name__ == '__main__':
    (x_train, y_train), (x_test, y_test) = load_data()
    model = Sequential()
    model.add(Dense(input_dim=28*28, units=100, activation='relu'))
    # model.add(Dropout(0.7))
    model.add(Dense(units=200, activation='relu'))
    # model.add(Dropout(0.7))
    # model.add(Dense(units=600, activation='relu'))
    # model.add(Dense(units=600, activation='relu'))
    model.add(Dense(units=10, activation='relu'))
    # import tensorflow as tf
    # from tensorflow.keras import optimizers
    # with tf.GradientTape() as tape:
    #     ut = model(x_train)
    #     grads = tape.gradient('mse', model.trainable_variables)
    #     optimizers.apply_gradients(zip(grads, model.trainable_variables))

    '''
    loss: mse ; categorical_crossentropy
    optimizer: SGD(lr=0.1) ; adam
    '''
    model.compile(loss='mse', optimizer='adam', metrics=['accuracy']) #Adam SGD(lr=0.1)
    model.fit(x_train, y_train, batch_size=50, epochs=40)
    result = model.evaluate(x_test, y_test)
    print('test123.py acc: {}'.format(result))

    print(123)