# -*- coding: utf-8 -*-
"""
------------------------------------------------------------------------------
    File Name:  minist_demo
    Author   :  wanwei1029
    Date     :  2018/12/14
    Desc     :
------------------------------------------------------------------------------
"""
import numpy as np
import os
from keras.models import Sequential
from keras.layers.core import Dense, Dropout
from keras.optimizers import SGD, RMSprop, Adam
from keras.utils import np_utils, plot_model
from keras.callbacks import TensorBoard
import matplotlib.pyplot as plt

np.random.seed(1671)

NB_EPOCH = 200
BATCH_SIZE = 128
VERBOSE = 2
NB_CLASSES = 10
# OPTIMIZER = SGD()
# OPTIMIZER = RMSprop()
OPTIMIZER = Adam()
N_HIDDEN = 128
VALIDATION_SPLIT = 0.2
RESHAPED = 784
DROP_OUT = 0.3

def load_mnist_data():
    """
    返回mnist数据集,x是图片，y是标签，对应shape分别为：
    (60000, 28, 28) (60000,) (10000, 28, 28) (10000,)
    :return:
    """
    # path = "./datasets/mnist.npz"
    path = "D:\\pycharmProjects\\python_samp\\samp\\keras_learning\\dl_action\\chapter01\\datasets\\mnist.npz"
    f = np.load(path)
    x_train, y_train = f['x_train'], f['y_train']
    x_test, y_test = f['x_test'], f['y_test']
    f.close()
    return (x_train, y_train), (x_test, y_test)


def mnist_train():
    (x_train, y_train), (x_test, y_test) = load_mnist_data()
    print("x_train type is {0}".format(type(x_train)))
    x_train = x_train.reshape(60000, RESHAPED)
    x_test = x_test.reshape(10000, RESHAPED)
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')

    # 归一化处理，将数据值的范围限定在0-1之间。
    x_train /= 255
    x_test /= 255

    print(x_train.shape[0], 'train samples')
    print(x_test.shape[0], 'test samples')

    y_train = np_utils.to_categorical(y_train, NB_CLASSES)
    y_test = np_utils.to_categorical(y_test, NB_CLASSES)

    # 加了两个隐藏层，dense(128,activation=relu), 准确率loss: 0.0150 - acc: 0.9980 - val_loss: 0.0893 - val_acc: 0.9749
    # 测试集：0.075 0.9765
    # 1: 去掉一层全连接，结果： loss: 0.0531 - acc: 0.9870 - val_loss: 0.0943 - val_acc: 0.9729
    #       测试集： 0.0831 0.9747 比两层结果稍差
    # 2:    如果再加一层全连接呢，训练相对要慢些，结果loss: 0.0039 - acc: 0.9999 - val_loss: 0.1077 - val_acc: 0.9745
    #       测试集：0.1028    0.9736，结果并没有变好，说明一味的增加全连接层数，并不能优化网络。
    # 3: 在两层Dense的基础上，增加DropOut  loss: 0.0742 - acc: 0.9768 - val_loss: 0.0810 - val_acc: 0.9765
    #       测试集：0.07831    0.9769 最终测试集上效果有优化。
    # 4：更改optimizer，之前是SGD，默认选择，改为：
    # 4.1：RMSprop, 这个优化器，在20轮的时候，就已经达到SGD需要200轮才得到的准确率。但是它每次时间要稍长些。
    #       最终结果：loss: 0.0640 - acc: 0.9886 - val_loss: 0.1977 - val_acc: 0.9776
    #           测试集：0.16037 0.9792 准确率有提高
    # 4.2: Adam,同4.1，20轮也达到了SGD需要200轮才能达到的效果，每次时间也稍长些。
    #       最终结果：loss: 0.0179 - acc: 0.9943 - val_loss: 0.1224 - val_acc: 0.9794
    #            测试集 0.12164  0.9783 稍差些
    model = Sequential()
    # model.add(Dense(NB_CLASSES, input_shape=(RESHAPED,)))
    # model.add(Activation("softmax"))
    model.add(Dense(128, input_shape=(RESHAPED,), activation="relu"))
    model.add(Dropout(DROP_OUT))
    # model.add(Activation("relu"))
    model.add(Dense(128, activation="relu"))
    model.add(Dropout(DROP_OUT))
    # model.add(Activation('relu'))
    # model.add(Dense(128))
    # model.add(Activation('relu'))
    model.add(Dense(NB_CLASSES, activation="softmax"))
    # model.add(Activation('softmax'))
    model.summary()

    model.compile(loss='categorical_crossentropy', optimizer=OPTIMIZER, metrics=['accuracy'])
    plot_model(model, to_file=os.path.join("./", "modle.png"))

    tb_callback = TensorBoard(log_dir="./logs", histogram_freq=1, write_graph=True, write_images=True)
    history = model.fit(x_train, y_train, batch_size=BATCH_SIZE, epochs=NB_EPOCH, verbose=VERBOSE,
                        validation_split=VALIDATION_SPLIT, callbacks=[tb_callback])

    score = model.evaluate(x_test, y_test, verbose=VERBOSE)

    print("Test score:", score[0])
    print("Test accuracy:", score[1])

    # 打印训练过程
    print(history.history.keys())
    # 打印准确率数据
    plt.plot(history.history['acc'])
    plt.plot(history.history['val_acc'])
    plt.title("model accuracy")
    plt.ylabel('accuracy')
    plt.xlabel('epoch')
    plt.legend(['train', 'test'], loc='upper left')
    plt.show()

    # 打印损失函数历史数据
    plt.plot(history.history['loss'])
    plt.plot(history.history['val_loss'])
    plt.title('model loss')
    plt.ylabel('loss')
    plt.xlabel('epoch')
    plt.legend(['train','test'], loc='upper left')
    plt.show()


def demo():
    """
    """
    mnist_train()


if __name__ == '__main__':
    test_method = "demo"
    if test_method == "demo":
        demo()
