from helm.static.models.layers import Conv2d, GlobalAvgPool, Linear, Layer, Sequential, LayerList

__all__ = [
    'MobileNetV2'
]


def _make_divisible(v, divisor=8, min_value=None):
    """
    This function is taken from the original tf repo.
    It ensures that all layers have a channel number that is divisible by 8
    It can be seen here:
    https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
    :param v:
    :param divisor:
    :param min_value:
    :return:
    """
    if min_value is None:
        min_value = divisor
    new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
    # Make sure that round down does not go down by more than 10%.
    if new_v < 0.9 * v:
        new_v += divisor
    return new_v


class InvertedResidual(Layer):
    def __init__(self, in_channels, out_channels, expand_ratio=6, stride=1):
        super().__init__()
        self.strides = stride
        self.use_res_connect = in_channels == out_channels and stride == 1

        channels = in_channels * expand_ratio
        layers = []
        if expand_ratio != 1:
            layers.append(Conv2d(in_channels, channels, kernel_size=1, bn=True, act='relu6'))
        layers.extend([
            Conv2d(channels, channels, kernel_size=3, bn=True, act='relu6', stride=stride, groups=channels),
            Conv2d(channels, out_channels, kernel_size=1, bn=True),
        ])

        self.conv = Sequential(*layers)

    def forward(self, x):
        identity = x
        x = self.conv(x)
        if self.use_res_connect:
            x = x + identity
        return x


class MobileNetV2(Layer):
    def __init__(self, width_multiplier=1.0, num_classes=10):
        super(MobileNetV2, self).__init__()

        in_channels = 32
        last_channels = 1280

        param = [
            [1, 16, 1, 1],
            [6, 24, 2, 1],
            [6, 32, 3, 1],
            [6, 64, 4, 2],
            [6, 96, 3, 1],
            [6, 160, 3, 2],
            [6, 320, 1, 1],
        ]

        layers = [Conv2d(3, in_channels, kernel_size=3, bn=True, act='relu6')]
        for t, c, n, s in param:
            out_channels = _make_divisible(c * width_multiplier, 8)
            layers.append(InvertedResidual(in_channels, out_channels, t, s))
            for _ in range(1, n):
                layers.append(InvertedResidual(out_channels, out_channels, t, 1))
            in_channels = out_channels
        last_channels = _make_divisible(last_channels * width_multiplier, 8)
        layers.append(Conv2d(in_channels, last_channels, kernel_size=1, bn=True, act='relu6'))

        self.features = Sequential(*layers)
        self.final_pool = GlobalAvgPool()
        self.fc = Linear(last_channels, num_classes)

    def forward(self, x):
        x = self.features(x)
        x = self.final_pool(x)
        x = self.fc(x)
        return x
