import math

import paddle.fluid.layers as L
from helm.static.models.modules import DropPath
from paddle.fluid.dygraph import Pool2D, Dropout, Linear
from helm.pretrained.layers import Conv2d, BN, Act, GlobalAvgPool
from helm.static.models.layers import Layer, Sequential

__all__ = [
    'EfficientNet', 'EfficientNetB0', 'EfficientNetB1', 'EfficientNetB2',
    'EfficientNetB3', 'EfficientNetB4', 'EfficientNetB5', 'EfficientNetB6',
    'EfficientNetB7'
]


def round_channels(channels, multiplier=None, divisor=8, min_depth=None):
    """Round number of filters based on depth multiplier."""

    if not multiplier:
        return channels

    channels *= multiplier
    min_depth = min_depth or divisor
    new_channels = max(min_depth, int(channels + divisor / 2) // divisor * divisor)
    # Make sure that round down does not go down by more than 10%.
    if new_channels < 0.9 * channels:
        new_channels += divisor
    return int(new_channels)


def round_repeats(repeats, multiplier=None):
    """Round number of filters based on depth multiplier."""
    if not multiplier:
        return repeats
    return int(math.ceil(multiplier * repeats))


class Crop(Layer):

    def __init__(self):
        super().__init__()

    def forward(self, x):
        return x[:, :, 1:, 1:]


class SEModule(Layer):
    def __init__(self, in_channels, channels):
        super().__init__()
        self.pool = Pool2D(global_pooling=True, pool_type='avg', use_cudnn=False)
        self.reduce = Conv2d(in_channels, channels, 1, bias=True, act='swish')
        self.expand = Conv2d(channels, in_channels, 1, bias=True, act='sigmoid')

    def forward(self, x):
        s = self.pool(x)
        s = self.reduce(s)
        s = self.expand(s)
        return x * s


class MBConv(Sequential):
    def __init__(self, in_channels, out_channels, kernel_size, stride, expand_ratio, se_ratio=0.25, drop_connect=0.2, is_test=False):
        super().__init__()

        channels = in_channels * expand_ratio
        self.use_se = se_ratio is not None and 0 < se_ratio < 1
        self.use_res_connect = stride == 1 and in_channels == out_channels

        if expand_ratio != 1:
            self.expand = Sequential(
                    ("conv", Conv2d(in_channels, channels, kernel_size=1, bias=False)),
                    ("bn", BN(channels, momentum=0.99, epsilon=1e-3)),
                    ("act", Act('swish')),
                )

        if stride == 2:
            dwconv = [
                ("conv", Conv2d(channels, channels, kernel_size, stride,
                                padding=(kernel_size + 1) // 2, groups=channels, bias=False)),
                ("crop", Crop()),
            ]
        else:
            dwconv = [
                ("conv", Conv2d(channels, channels, kernel_size, stride, groups=channels, bias=False)),
            ]
        dwconv.extend([
            ("bn", BN(channels, momentum=0.99, epsilon=1e-3)),
            ("act", Act('swish')),
        ])
        self.dwconv = Sequential(*dwconv)

        if self.use_se:
            self.se = SEModule(channels, int(in_channels * se_ratio))

        self.project = Sequential(
                ("conv", Conv2d(channels, out_channels, kernel_size=1, bias=False)),
                ("bn", BN(out_channels, momentum=0.99, epsilon=1e-3)),
            )

        if self.use_res_connect:
            self.drop_connect = DropPath(drop_connect, is_test=is_test)

    def forward(self, x):
        if self.use_res_connect:
            return x + super().forward(x)
        return super().forward(x)


class EfficientNet(Layer):
    def __init__(self, num_classes=1000, width_mult=1.0, depth_coef=1.0, dropout=0.2, drop_connect=0.2, is_test=False):
        super().__init__()
        in_channels = 32
        last_channels = 1280
        setting = [
            # r, k, s, e, i, o, se,
            [1, 3, 1, 1, 32, 16, 0.25],
            [2, 3, 2, 6, 16, 24, 0.25],
            [2, 5, 2, 6, 24, 40, 0.25],
            [3, 3, 2, 6, 40, 80, 0.25],
            [3, 5, 1, 6, 80, 112, 0.25],
            [4, 5, 2, 6, 112, 192, 0.25],
            [1, 3, 1, 6, 192, 320, 0.25],
        ]

        in_channels = round_channels(in_channels, width_mult)
        last_channels = round_channels(last_channels, width_mult)

        # building stem
        self.features = Sequential()
        self.features.add_sublayer(
            "init_block", Sequential(
                ("conv", Conv2d(3, in_channels, 3, stride=2, padding=2, bias=False)),
                ("crop", Crop()),
                ("bn", BN(in_channels, momentum=0.99, epsilon=1e-3)),
                ("act", Act('swish')),
            ))

        si = 1
        j = 1
        stage = Sequential()
        # building inverted residual blocks
        for idx, (r, k, s, e, i, o, se) in enumerate(setting):
            drop_rate = drop_connect * (float(idx) / len(setting))
            if s == 2:
                self.features.add_sublayer("stage%d" % si, stage)
                si += 1
                j = 1
                stage = Sequential()
            in_channels = round_channels(i, width_mult)
            out_channels = round_channels(o, width_mult)
            stage.add_sublayer("unit%d" % j, MBConv(
                in_channels, out_channels, k, s, e, se,
                drop_connect=drop_rate, is_test=is_test))
            j += 1
            for _ in range(round_repeats(r, depth_coef) - 1):
                stage.add_sublayer("unit%d" % j, MBConv(
                    out_channels, out_channels, k, 1, e, se,
                    drop_connect=drop_rate, is_test=is_test))
                j += 1
        self.features.add_sublayer("stage%d" % si, stage)
        self.features.add_sublayer(
            "final_block", Sequential(
                ("conv", Conv2d(out_channels, last_channels, 1, bias=False)),
                ("bn", BN(last_channels, momentum=0.99, epsilon=1e-3)),
                ("act", Act('swish')),
            ))
        self.classifier = Sequential(
            ("avgpool", GlobalAvgPool()),
            ("dropout", Dropout(dropout, is_test=is_test, dropout_implementation='upscale_in_train')),
            ("fc", Linear(last_channels, num_classes))
        )

    def forward(self, x):
        x = self.features.init_block(x)
        x = self.features.stage1(x)
        x = self.features.stage2(x)
        x = self.features.stage3(x)
        x = self.features.stage4(x)
        x = self.features.stage5(x)
        x = self.features.final_block(x)
        # x = self.features(x)
        x = self.classifier(x)
        return x


def transpile(sd, model):

    depths = [len(getattr(model.features, "stage%d" % (i + 1))) for i in range(5)]
    from itertools import accumulate
    from bisect import bisect_left
    accs = list(accumulate(depths))

    import re

    def map_bn_name(p):
        if p in ['mean', 'variance']:
            p = "_" + p
        elif p == 'scale':
            p = 'weight'
        elif p == 'offset':
            p = 'bias'
        return p

    new_sd = {}
    for k, v in sd.items():
        match = re.search(r"^_conv_(stem|head)_weights", k)
        if match:
            if match.group(1) == 'stem':
                p = 'init_block'
            else:
                p = 'final_block'
            new_k = f"features.{p}.conv.weight"
            new_sd[new_k] = v
            continue

        match = re.search(r"^_bn([01])_(scale|offset|mean|variance)", k)
        if match:
            if match.group(1) == '0':
                p1 = 'init_block'
            else:
                p1 = 'final_block'
            p2 = map_bn_name(match.group(2))
            new_k = f"features.{p1}.bn.{p2}"
            new_sd[new_k] = v
            continue

        match = re.search(r"^_fc_(weights|offset)", k)
        if match:
            if match.group(1) == 'weights':
                new_k = "classifier.fc.weight"
            else:
                new_k = 'classifier.fc.bias'
            new_sd[new_k] = v
            continue


        match = re.search(r"^_blocks.(\d+)._([a-z_]+)_(weights|offset)", k)
        if match:
            i = int(match.group(1)) + 1
            stage = bisect_left(accs, i) + 1
            if stage == 1:
                unit = i
            else:
                unit = i - accs[stage - 2]

            p2 = match.group(3)
            if p2 == 'weights':
                p2 = 'weight'
            elif p2 == 'offset':
                p2 = 'bias'
            s = match.group(2)
            if s == 'depthwise_conv':
                new_k = f"features.stage{stage}.unit{unit}.dwconv.conv.{p2}"
            elif s == 'project_conv':
                new_k = f"features.stage{stage}.unit{unit}.project.conv.{p2}"
            elif s == 'expand_conv':
                new_k = f"features.stage{stage}.unit{unit}.expand.conv.{p2}"
            elif s == 'se_reduce':
                new_k = f"features.stage{stage}.unit{unit}.se.reduce.{p2}"
            elif s == 'se_expand':
                new_k = f"features.stage{stage}.unit{unit}.se.expand.{p2}"
            else:
                print(s)
            new_sd[new_k] = v
            continue

        match = re.search(r"^_blocks.(\d+)._bn([012])_(scale|offset|mean|variance)", k)
        if match:
            i = int(match.group(1)) + 1
            stage = bisect_left(accs, i) + 1
            if stage == 1:
                unit = i
            else:
                unit = i - accs[stage - 2]

            p2 = map_bn_name(match.group(3))
            s = int(match.group(2))
            if s == 0:
                new_k = f"features.stage{stage}.unit{unit}.expand.bn.{p2}"
            elif s == 1:
                new_k = f"features.stage{stage}.unit{unit}.dwconv.bn.{p2}"
            elif s == 2:
                new_k = f"features.stage{stage}.unit{unit}.project.bn.{p2}"
            else:
                print(s)
            new_sd[new_k] = v
            continue

        print(k)
    return new_sd



efficient_params = {
    # width_coef, depth_coef, resolution, dropout_rate
    'efficientnet-b0': (1.0, 1.0, 224, 0.2),
    'efficientnet-b1': (1.0, 1.1, 240, 0.2),
    'efficientnet-b2': (1.1, 1.2, 260, 0.3),
    'efficientnet-b3': (1.2, 1.4, 300, 0.3),
    'efficientnet-b4': (1.4, 1.8, 380, 0.4),
    'efficientnet-b5': (1.6, 2.2, 456, 0.4),
    'efficientnet-b6': (1.8, 2.6, 528, 0.5),
    'efficientnet-b7': (2.0, 3.1, 600, 0.5),
}


def efficientnet(version='b0', num_classes=1000, **kwargs):
    name = 'efficientnet-%s' % version
    assert name in efficient_params, "%s is invalid." % name
    width_mult, depth_coef, resolution, dropout = efficient_params[name]
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs,
    )


def EfficientNetB0(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b0']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs
    )


def EfficientNetB1(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b1']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs
    )


def EfficientNetB2(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b2']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs
    )


def EfficientNetB3(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b3']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs
    )


def EfficientNetB4(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b4']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs,
    )


def EfficientNetB5(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b5']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs,
    )


def EfficientNetB6(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b6']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs,
    )


def EfficientNetB7(num_classes=1000, **kwargs):
    width_mult, depth_coef, resolution, dropout = efficient_params['efficientnet-b7']
    return EfficientNet(
        width_mult=width_mult,
        depth_coef=depth_coef,
        dropout=dropout,
        num_classes=num_classes,
        **kwargs,
    )
