import paddle.fluid.layers as L
from paddle.fluid.dygraph import Dropout

from helm.static.models.layers import Conv2d, GlobalAvgPool, Linear, Layer, Sequential
from helm.static.models.modules import SELayer

__all__ = [
    'ShuffleNetV2'
]


def channel_shuffle(x, groups):
    b, c, h, w = x.shape[0], x.shape[1], x.shape[2], x.shape[3]
    channels_per_group = c // groups

    x = L.reshape(
        x=x, shape=[b, groups, channels_per_group, h, w])

    x = L.transpose(x=x, perm=[0, 2, 1, 3, 4])

    x = L.reshape(x=x, shape=[b, c, h, w])

    return x


class NormalCell(Layer):

    def __init__(self, in_channels, use_se):
        super().__init__()
        c = in_channels // 2
        branch2 = [
            Conv2d(c, c, kernel_size=1, bn=True, act='default'),
            Conv2d(c, c, kernel_size=3, groups=c, bn=True),
            Conv2d(c, c, kernel_size=1, bn=True, act='default')
        ]
        if use_se:
            branch2.append(SELayer(c, reduction=2))
        self.branch2 = Sequential(*branch2)

    def forward(self, x):
        c = x.shape[1] // 2
        x1, x2 = L.split(x, num_or_sections=[c, c], dim=1)
        x2 = self.branch2(x2)
        x = L.concat([x1, x2], axis=1)
        return channel_shuffle(x, 2)


class ReduceCell(Layer):

    def __init__(self, in_channels, out_channels, use_se):
        super().__init__()
        c = out_channels // 2
        self.branch1 = Sequential(
            Conv2d(in_channels, in_channels, kernel_size=3, stride=2, groups=in_channels, bn=True),
            Conv2d(in_channels, c, kernel_size=1, bn=True, act='default'),
        )
        branch2 = [
            Conv2d(in_channels, c, kernel_size=1, bn=True, act='default'),
            Conv2d(c, c, kernel_size=3, stride=2, groups=c, bn=True),
            Conv2d(c, c, kernel_size=1, bn=True, act='default')
        ]
        if use_se:
            branch2.append(SELayer(c, reduction=2))
        self.branch2 = Sequential(*branch2)

    def forward(self, x):
        x1 = self.branch1(x)
        x2 = self.branch2(x)
        x = L.concat([x1, x2], axis=1)
        return channel_shuffle(x, 2)


def _make_layer(in_channels, out_channels, num_units, stride, use_se):
    layers = []
    if stride == 2:
        layers.append(ReduceCell(in_channels, out_channels, use_se))
    else:
        layers.append(Conv2d(in_channels, out_channels, 3, bn=True, act='default'))
    for i in range(1, num_units):
        layers.append(NormalCell(out_channels, use_se))
    return Sequential(*layers)


class ShuffleNetV2(Layer):

    def __init__(self, stem_channels=32, channels_per_stage=(128, 256, 512), units_per_stage=(4, 8, 4),
                 final_channels=1024, use_se=True, dropout=0.2, num_classes=10):
        super().__init__()
        self.dropout = dropout

        cs = [stem_channels] + list(channels_per_stage)
        ns = units_per_stage

        self.conv = Conv2d(3, stem_channels, 3, bn=True, act='default')

        self.stage1 = _make_layer(cs[0], cs[1], ns[0], stride=1, use_se=use_se)
        self.stage2 = _make_layer(cs[1], cs[2], ns[1], stride=2, use_se=use_se)
        self.stage3 = _make_layer(cs[2], cs[3], ns[2], stride=2, use_se=use_se)

        self.final_conv = Conv2d(cs[-1], final_channels, 1, bn=True, act='default')
        self.pool = GlobalAvgPool()
        if dropout:
            self.dropout = Dropout(dropout, dropout_implementation='upscale_in_train')
        self.classifier = Linear(final_channels, num_classes)

    def forward(self, x):
        x = self.conv(x)

        x = self.stage1(x)
        x = self.stage2(x)
        x = self.stage3(x)

        x = self.final_conv(x)

        x = self.pool(x)
        if self.dropout:
            x = self.dropout(x)
        x = self.classifier(x)
        return x
