from comman.activator import *
from comman.layers import *
from comman.net import Sequential
from comman.optimizers import *
from dataset.mnist.mnist import load_mnist

epochs = 50
batch_size = 1000
learning_rate = 1e-1

# 读入数据
(x_train, t_train), (x_test, t_test) = load_mnist(one_hot_label=True)
x_validation, t_validation = x_test[:1000], t_test[:1000]
# x_test, t_test = x_test[batch_size:], t_test[batch_size:]
# x_train = x_train.reshape(x_train.shape[0], 1, 28, 28)
# x_test = x_test.reshape(x_test.shape[0], 1, 28, 28)

net = Sequential([
    Reshape((1, 28, 28)),

    Conv2D(10, 3, (1, 1), activation='relu'),
    BatchNormalization(),
    Pooling(pool_h=2, pool_w=2, stride=1),

    Conv2D(30, 5, (2, 2), activation='relu'),
    BatchNormalization(),
    Pooling(pool_h=3, pool_w=3, stride=2),

    Conv2D(50, 5, (1, 1), activation='relu'),
    Flatten(),

    BatchNormalization(),
    Dense(10, None),
])

net.compile(SoftmaxWithLoss(), SGD(learning_rate))

net.fit(x_train, t_train, epochs=epochs, batch_size=batch_size,
        validation_data=(x_validation, t_validation))

accuracy = net.evaluate(x_test, t_test)
print(f"accuracy={accuracy}")

import os
import pickle

dataset_dir = os.path.dirname(os.path.abspath(__file__))
save_file = dataset_dir + "/cnn_net.pkl"
with open(save_file, 'wb') as f:
    pickle.dump(net, f, -1)
