import tensorflow as tf
from tensorflow import keras as K
from tensorflow.keras import layers as L


class AlexNet(K.Model):
    """[n, 227, 227, 1/3] -> [n, c]
    ref: http://datahacker.rs/tf-alexnet/
    """
    def __init__(self):
        super(AlexNet, self).__init__()
        self.first = K.Sequential([
            _conv(96, 11, 4),
            _lrn(),
            L.MaxPool2D(3, 2)
        ])
        self.second = K.Sequential([
            _conv(256, 5, 1, padding="SAME"),
            _lrn(),
            L.MaxPool2D(3, 2)
        ])
        self.third = K.Sequential([
            _conv(384, 3, 1, padding="SAME"),
            _conv(684, 3, 1, padding="SAME"),
            _conv(256, 3, 1, padding="SAME"),
            L.MaxPool2D(3, 2),
            L.Flatten()
        ])
        self.fc = L.Dense(4096, "relu")
        self.drop = L.Dropout(0.5)
        self.fc2 = L.Dense(4096, "relu")
        self.drop2 = L.Dropout(0.5)

    def call(self, x, training=False):
        x = self.first(x)
        x = self.second(x)
        x = self.third(x)
        x = self.fc(x)
        x = self.drop(x, training=training)
        x = self.fc2(x)
        x = self.drop2(x, training=training)
        return x


def _conv(filters, kernel_size, strides, padding="VALID"):
    return L.Conv2D(filters, kernel_size, strides, padding,
                    activation="relu", kernel_initializer="he_normal")


def _lrn(depth_radius=5, bias=2, alpha=1e-4, beta=0.75):
    return L.Lambda(lambda x: tf.nn.local_response_normalization(
        x, depth_radius=depth_radius, bias=bias, alpha=alpha, beta=beta))
