import mindspore as ms
import mindspore.nn as nn
from util.datasets import load_mnist
from util.transform import data_iter
from util.callback import AccuracyMonitor
# from mindvision.classification.dataset import Mnist


def main():
    batch_size, lr, num_epochs, dropout1, dropout2 = 256, 0.1, 10, 0.2, 0.5

    # dataset_train = Mnist(path='/shareData/mindspore-dataset/Mnist', split="train",
    #                       batch_size=batch_size, repeat_num=1, shuffle=True,
    #                       download=False).run()
    # dataset_test = Mnist(path='/shareData/mindspore-dataset/Mnist', split="test",
    #                      batch_size=batch_size, repeat_num=1, shuffle=True,
    #                      download=False).run()
    # dataset_train = Mnist(path='/shareData/mindspore-dataset/Mnist', split="train",
    #                       batch_size=batch_size, repeat_num=1, shuffle=True,
    #                       download=False).run()
    # dataset_test = Mnist(path='/shareData/mindspore-dataset/Mnist', split="test",
    #                      batch_size=batch_size, repeat_num=1, shuffle=True,
    #                      download=False).run()

    # net = nn.SequentialCell(
    #     nn.Flatten(),
    #     nn.Dense(32*32, 256, activation="relu"),
    #     nn.Dropout(dropout1),
    #     nn.Dense(256, 256, activation="relu"),
    #     nn.Dropout(dropout2),
    #     nn.Dense(256, 10),
    # )

    features_t, labels_t = load_mnist('/shareData/mindspore-dataset/Mnist/train')
    features_v, labels_v = load_mnist('/shareData/mindspore-dataset/Mnist/test', split='t10k')

    dataset_train = data_iter(features_t, labels_t, batch_size)
    dataset_valid = data_iter(features_v, labels_v, batch_size)

    net = nn.SequentialCell(
        nn.Dense(28*28, 256, activation="relu"),
        nn.Dropout(dropout1),
        nn.Dense(256, 256, activation="relu"),
        nn.Dropout(dropout2),
        nn.Dense(256, 10),
    )

    loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
    opti = nn.SGD(net.trainable_params(), learning_rate=lr)

    model = ms.Model(net, loss_fn=loss, optimizer=opti, metrics={'acc'})

    model.train(num_epochs, dataset_train, callbacks=[AccuracyMonitor(dataset_valid)])


if __name__ == '__main__':
    main()


"""
output
epoch:[1/10] Loss:2.0149279 Train Accuracy:0.3459 Valid Accuracy:0.3501
epoch:[2/10] Loss:0.8582725 Train Accuracy:0.72285 Valid Accuracy:0.7222
epoch:[3/10] Loss:0.6015272 Train Accuracy:0.81845 Valid Accuracy:0.82
epoch:[4/10] Loss:0.50120336 Train Accuracy:0.84955 Valid Accuracy:0.8575
epoch:[5/10] Loss:0.4410843 Train Accuracy:0.8710166666666667 Valid Accuracy:0.875
epoch:[6/10] Loss:0.40757343 Train Accuracy:0.8811666666666667 Valid Accuracy:0.8821
epoch:[7/10] Loss:0.3858448 Train Accuracy:0.8857666666666667 Valid Accuracy:0.8856
epoch:[8/10] Loss:0.35340407 Train Accuracy:0.8973833333333333 Valid Accuracy:0.8985
epoch:[9/10] Loss:0.33094788 Train Accuracy:0.9036333333333333 Valid Accuracy:0.8991
epoch:[10/10] Loss:0.32787356 Train Accuracy:0.9035666666666666 Valid Accuracy:0.903
"""