from tensorflow.keras import Model, layers, activations


class PlainNet(Model):
    def __init__(self, num_classes=200):
        super(PlainNet, self).__init__()
        self.conv1 = layers.Conv2D(32, 3, 1, "same", activation=activations.relu)
        self.bn1 = layers.BatchNormalization()
        self.maxpool1 = layers.MaxPool2D(2, strides=2)

        self.conv2 = layers.Conv2D(64, 3, 1, "same", activation=activations.relu)
        self.bn2 = layers.BatchNormalization()
        self.maxpool2 = layers.MaxPool2D(2, strides=2)

        self.conv3 = layers.Conv2D(128, 3, 1, "same", activation=activations.relu)
        self.bn3 = layers.BatchNormalization()
        self.maxpool3 = layers.MaxPool2D(2, strides=2)

        self.flatten = layers.Flatten()

        self.fc = layers.Dense(1024)
        self.dropout = layers.Dropout(rate=0.25)

        self.out = layers.Dense(units=num_classes, activation=activations.softmax)

    def call(self, inputs, training=None, **kwargs):
        x = self.conv1(inputs)
        x = self.bn1(x)
        x = self.maxpool1(x)

        x = self.conv2(x)
        x = self.bn2(x)
        x = self.maxpool2(x)

        x = self.conv3(x)
        x = self.bn3(x)
        x = self.maxpool3(x)

        x = self.flatten(x)
        x = self.fc(x)
        x = self.dropout(x, training=training)
        x = self.out(x)
        return x
