from tensorflow.keras import Model, layers, activations, Sequential


class VGG(Model):
    def __init__(self, features, num_classes=10):
        super(VGG, self).__init__()
        self.features = features
        self.avgpool = layers.GlobalAveragePooling2D()
        self.flatten = layers.Flatten()
        self.classifier = Sequential([
            layers.Dense(4096, activation=activations.relu),
            layers.Dropout(0.4),
            layers.Dense(4096, activation=activations.relu),
            layers.Dropout(0.4),
            layers.Dense(num_classes),
        ])

    def call(self, inputs, training=None, mask=None):
        x = self.features(inputs)
        x = self.avgpool(x)
        x = self.flatten(x)
        x = self.classifier(x)
        return x


cfgs = {
    'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
    'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
    'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
    'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
}


def make_layers(cfg, batch_norm=False):
    layer_list = []
    for v in cfg:
        if v == 'M':
            layer_list += [layers.MaxPool2D(2, 2)]
        else:
            conv2d = layers.Conv2D(v, 3, 1, 'same')
            if batch_norm:
                layer_list += [conv2d, layers.BatchNormalization(v), layers.ReLU()]
            else:
                layer_list += [conv2d, layers.ReLU()]
    return Sequential(layer_list)


def _vgg(cfg, batch_norm):
    return VGG(make_layers(cfgs[cfg], batch_norm=batch_norm))


def vgg16():
    return _vgg('D', False)


def vgg16_bn():
    return _vgg('D', True)


def vgg19():
    return _vgg('E', False)


def vgg19_bn():
    return _vgg('E', True)
