import math
import numpy as np
import matplotlib.pyplot as plt
import dezero
from dezero import Variable
from dezero import optimizers
import dezero.functions as F
from dezero.models import MLP
from dezero.datasets import MNIST,MNIST_sklearn
from dezero.dataloaders import DataLoader

#设置超参和批次
max_epoch = 5
batch_size = 100
hidden_size = 1000
lr = 1.0

#训练数据
train_set = MNIST(train=True)
train_loader = DataLoader(train_set, batch_size)
#测试数据
test_set = MNIST(train=False)
test_loader = DataLoader(train_set, batch_size,shuffle=False)

#创建模型 2层网络，优化器
model = MLP((hidden_size,hidden_size, 10),activation=F.relu)
optimizer = optimizers.SGD(lr).setup(model)

#GPU mode
if dezero.cuda.gpu_enable:
    train_loader.to_gpu()
    model.to_gpu()

for epoch in range(max_epoch):
    sum_loss, sum_acc = 0, 0
    for x,t in train_loader: #用于训练小批量数据
        # 计算梯度并更新参数
        y = model(x)
        loss = F.softmax_cross_entropy(y, t)
        acc = F.accuracy(y,t)
        model.cleargrads()
        loss.backward()
        optimizer.update()

        sum_loss += float(loss.data) * len(t)
        sum_acc += float(acc.data) * len(t)

    print(f'epoch: {epoch+1}')
    print('train loss: {:.4f}, accuracy: {:.4f}'.format(
        sum_loss / len(train_set), sum_acc / len(train_set)
    ))

# Plot boundary area the model predict
labels = MNIST.labels()
for x,t in test_set:
    y = model(x)
    print(labels[y.data.argmax()],labels[t])
test_set.show()
