"""
# Reference

[Searching for MobileNetV3]()

This model is based on the following implementations:
- https://github.com/calmisential/MobileNetV3_TensorFlow2

"""
import tensorflow as tf
from tensorflow.keras import Model, layers, activations, Sequential
from model.tf.mobilenet_v2 import make_divisible


class HSigmoid(layers.Layer):
    def __init__(self):
        super(HSigmoid, self).__init__()
        self.relu6 = layers.ReLU(max_value=6, name="ReLU6")

    def call(self, inputs, **kwargs):
        return self.relu6(inputs + 3.) / 6.


class HSwish(layers.Layer):
    def __init__(self):
        super(HSwish, self).__init__()
        self.sigmoid = HSigmoid()

    def call(self, inputs, **kwargs):
        return inputs * self.sigmoid(inputs)


class SEBlock(layers.Layer):
    def __init__(self, input_channels, reduction=4, l2=2e-4):
        super(SEBlock, self).__init__()
        self.reduction = reduction
        self.l2 = l2

        self.avg_pool = layers.GlobalAveragePooling2D()
        self.fc1 = tf.keras.layers.Dense(units=input_channels // self.reduction, activation="relu", use_bias=False,
                                         kernel_regularizer=tf.keras.regularizers.l2(self.l2), name="Squeeze")
        self.fc2 = tf.keras.layers.Dense(units=input_channels, activation=HSigmoid(), use_bias=False,
                                         kernel_regularizer=tf.keras.regularizers.l2(self.l2), name="Excite")
        self.reshape = tf.keras.layers.Reshape((1, 1, input_channels))

    def call(self, inputs, **kwargs):
        branch = self.avg_pool(inputs)
        branch = self.fc1(branch)
        branch = self.fc2(branch)
        branch = self.reshape(branch)
        return inputs * branch


class InvertedResidual(layers.Layer):
    def __init__(self, inp, hidden_dim, oup, kernel_size, stride, use_se, use_hs):
        super(InvertedResidual, self).__init__()
        assert stride in [1, 2]

        self.identity = stride == 1 and inp == oup

        if inp == hidden_dim:
            self.conv = Sequential([
                # dw
                layers.Conv2D(hidden_dim, kernel_size, stride, "same"),
                layers.BatchNormalization(),
                HSwish() if use_hs else layers.ReLU(),
                # Squeeze-and-Excite
                SEBlock(hidden_dim) if use_se else Sequential(),
                # pw-linear
                layers.Conv2D(oup, 1, 1, "same"),
                layers.BatchNormalization(),
            ])
        else:
            self.conv = Sequential([
                # pw
                layers.Conv2D(hidden_dim, 1, 1, "same"),
                layers.BatchNormalization(),
                HSwish() if use_hs else layers.ReLU(),
                # dw
                layers.Conv2D(hidden_dim, kernel_size, stride, "same"),
                layers.BatchNormalization(),
                # Squeeze-and-Excite
                SEBlock(hidden_dim) if use_se else Sequential(),
                HSwish() if use_hs else layers.ReLU(),
                # pw-linear
                layers.Conv2D(oup, 1, 1, "same"),
                layers.BatchNormalization(),
            ])

    def call(self, inputs, training=None, **kwargs):
        x = self.conv(inputs)
        if self.identity:
            return inputs + x
        else:
            return x


class MobileNetV3(Model):
    def __init__(self, cfgs, mode, num_classes=200, width_mult=1.):
        super(MobileNetV3, self).__init__()
        # setting of inverted residual blocks
        self.cfgs = cfgs
        assert mode in ['large', 'small']

        # building first layer
        self.conv1 = Sequential([
            layers.Conv2D(16, 3, 2, "same"),
            layers.BatchNormalization(),
            HSwish()
        ])

        # building inverted residual blocks
        input_channel = make_divisible(16 * width_mult, 8)
        layer_list = []
        for k, exp_size, c, use_se, use_hs, s in self.cfgs:
            output_channel = make_divisible(c * width_mult, 8)
            layer_list.append(InvertedResidual(input_channel, exp_size, output_channel, k, s, use_se, use_hs))
            input_channel = output_channel
        self.features = Sequential(layer_list)

        # building last several layers
        self.conv2 = Sequential([
            layers.Conv2D(576, 1, 1, "same"),
            SEBlock(576) if mode == 'small' else Sequential()
        ])
        self.avg_pool = Sequential([
            layers.AveragePooling2D(1, 1),
            HSwish(),
        ])
        self.classifier = Sequential([
            layers.Conv2D(1280, 1, 1, "same"),
            HSwish(),
            layers.Conv2D(num_classes, 1, 1, "same", activation=activations.softmax),
            layers.Flatten()
        ])

    def call(self, inputs, training=None, mask=None):
        x = self.conv1(inputs)
        x = self.features(x)
        x = self.conv2(x)
        x = self.avg_pool(x)
        print(x.shape)
        x = self.classifier(x)
        return x


def mobilenetv3_large(**kwargs):
    """
    Constructs a MobileNetV3-Large model
    """
    cfgs = [
        # k, t, c, SE, NL, s
        [3, 16, 16, 0, 0, 1],
        [3, 64, 24, 0, 0, 2],
        [3, 72, 24, 0, 0, 1],
        [5, 72, 40, 1, 0, 2],
        [5, 120, 40, 1, 0, 1],
        [5, 120, 40, 1, 0, 1],
        [3, 240, 80, 0, 1, 2],
        [3, 200, 80, 0, 1, 1],
        [3, 184, 80, 0, 1, 1],
        [3, 184, 80, 0, 1, 1],
        [3, 480, 112, 1, 1, 1],
        [3, 672, 112, 1, 1, 1],
        [5, 672, 160, 1, 1, 1],
        [5, 672, 160, 1, 1, 2],
        [5, 960, 160, 1, 1, 1]
    ]
    return MobileNetV3(cfgs, mode='large', **kwargs)


def mobilenetv3_small(**kwargs):
    """
    Constructs a MobileNetV3-Small model
    """
    cfgs = [
        # k, t, c, SE, NL, s
        [3, 16, 16, 1, 0, 2],
        [3, 72, 24, 0, 0, 2],
        [3, 88, 24, 0, 0, 1],
        [5, 96, 40, 1, 1, 2],
        [5, 240, 40, 1, 1, 1],
        [5, 240, 40, 1, 1, 1],
        [5, 120, 48, 1, 1, 1],
        [5, 144, 48, 1, 1, 1],
        [5, 288, 96, 1, 1, 2],
        [5, 576, 96, 1, 1, 1],
        [5, 576, 96, 1, 1, 1],
    ]

    return MobileNetV3(cfgs, mode='small', **kwargs)
