import nn
from ops import *
import numpy as np
import optimizor
import model
from datasets.MNIST.mnist.mnist import load_mnist


class CNN(nn.net):
    def __init__(self):
        super().__init__()
        self.Conv1 = Convolution()
        self.Relu1 = ReLU()
        self.Pool1 = Pooling(pool_w=2, pool_h=2)
        self.Flatten1 = Flatten()
        self.Linear1 = MetaDense(input_channel=15870, output_channel=100, init_str='he')
        self.Relu2 = ReLU()
        self.Linear2 = MetaDense(input_channel=100, output_channel=10, init_str='he')
        self.soft = Softmax_cross_entropy()
        self.init_layers()

    def __call__(self, inputs, target):
        x = self.Conv1.forward(inputs)
        x = self.Relu1(x)
        x = self.Pool1.forward(x)
        x = self.Flatten1(x)
        x = self.Linear1(x)
        x = self.Relu2(x)
        x = self.Linear2(x)
        outputs, ls = self.soft(x, target)
        return outputs, ls

    def backward(self):
        grad = self.soft.backward()
        gradx, gradw = self.Linear2.backward(grad)
        grad = self.Relu2.backward(gradx)
        gradx, gradw = self.Linear1.backward(grad)
        grad = self.Flatten1.backward(gradx)
        grad = self.Pool1.backward(grad)
        grad = self.Relu1.backward(grad)
        self.Conv1.backward(grad)

if __name__ == "__main__":
    (train, train_lable), (test, test_lable) = load_mnist(flatten=False)
    train = train[:5000]
    train_lable = train_lable[:5000]
    test = test[:5000]
    test_lable = test_lable[:5000]
    train = list(map(lambda x: np.array([x]), train))
    true_y = np.zeros((len(train_lable), max(train_lable) + 1))
    for i in range(len(train_lable)):
        true_y[i][train_lable[i]] = 1
    train_lable = true_y
    test = list(map(lambda x: np.array([x]), test))
    true_y = np.zeros((len(test_lable), max(test_lable) + 1))
    for i in range(len(test_lable)):
        true_y[i][test_lable[i]] = 1
    test_lable = true_y

    mynet = CNN()
    model = model.Model(network=mynet)

    model.compile(
        optimizer=optimizor.AdaGrad(lr=0.1)
    )

    model.train(
        x=train,
        y=train_lable,
        epochs=7,
        batch_size=100
    )

    model.fit(
        x=test,
        y=test_lable
    )

    model.drawHistory()