import paddle.fluid.layers as L
from helm.static.models.modules import DropPath
from paddle.fluid.dygraph import Layer, LayerList, Sequential, Linear

from helm.static.models.layers import BN, GlobalAvgPool, Conv2d, Act, BNAct, Pool2d, flatten
from helm.dynamic.models.nas.darts.operations import OPS, ReLUConvBN, FactorizedReduce


class AuxiliaryHead(Layer):

    def __init__(self, C, num_classes):
        super().__init__()
        self.features = Sequential(
            Act(),
            Pool2d(5, stride=3, padding=0, type='avg'),
            Conv2d(C, 128, 1, bn=True, act='default'),
            Conv2d(128, 768, 2, bn=True, act='default'),
        )
        self.classifier = Linear(768, num_classes)

    def forward(self, x):
        x = self.features(x)
        x = self.classifier(flatten(x))
        return x


class ReductionCell(Layer):

    def __init__(self, C_prev_prev, C_prev, C, drop_prob):
        super().__init__()
        self.preprocess0 = ReLUConvBN(C_prev_prev, C, 1)
        self.preprocess1 = ReLUConvBN(C_prev, C, 1)

        self.branch_a1 = Sequential(
            Act(),
            Conv2d(C, C, (1, 3), stride=(1, 2), groups=8, bias=False),
            Conv2d(C, C, (3, 1), stride=(2, 1), groups=8, bias=False),
            BNAct(C, act='default', affine=True),
            Conv2d(C, C, 1),
            BN(C, affine=True),
        )
        self.branch_a2 = Sequential(
            Pool2d(3, stride=2, type='max'),
            BN(C, affine=True),
        )
        self.branch_b1 = Sequential(
            Act(),
            Conv2d(C, C, (1, 3), stride=(1, 2), groups=8, bias=False),
            Conv2d(C, C, (3, 1), stride=(2, 1), groups=8, bias=False),
            BNAct(C, act='default', affine=True),
            Conv2d(C, C, 1),
            BN(C, affine=True),
        )
        self.branch_b2 = Sequential(
            Pool2d(3, stride=2, type='max'),
            BN(C, affine=True),
        )
        self.drop_path = DropPath(drop_prob)

    def forward(self, s0, s1, *args):
        s0 = self.preprocess0(s0)
        s1 = self.preprocess1(s1)

        x0 = self.branch_a1(s0)
        x1 = self.branch_a2(s1)
        x2 = self.branch_b1(s0)
        x3 = self.branch_b2(s1)
        x0, x1, x2, x3 = self.drop_path(x0), self.drop_path(x1), self.drop_path(x2), self.drop_path(x3)

        return L.concat([x0, x1, x2, x3], axis=1)


class NormalCell(Layer):

    def __init__(self, operations, concat, C_prev_prev, C_prev, C, reduction_prev, drop_prob):
        super().__init__()
        if reduction_prev:
            self.preprocess0 = FactorizedReduce(C_prev_prev, C)
        else:
            self.preprocess0 = ReLUConvBN(C_prev_prev, C, 1)
        self.preprocess1 = ReLUConvBN(C_prev, C, 1)
        self.steps = len(operations)
        self.concat = concat
        self.drop_prob = drop_prob

        self.ops = LayerList()
        self.indices = []
        for g in operations:
            (op1, op2), indices = zip(*g)
            op1 = OPS[op1](C, 1) if op1 == 'identity' else Sequential(OPS[op1](C, 1), DropPath(drop_prob))
            op2 = OPS[op2](C, 1) if op2 == 'identity' else Sequential(OPS[op2](C, 1), DropPath(drop_prob))
            ops = LayerList([op1, op2])
            self.ops.append(ops)
            self.indices.append(indices)
        self.drop_prob = DropPath(drop_prob)

    def forward(self, s0, s1):
        s0 = self.preprocess0(s0)
        s1 = self.preprocess1(s1)
        states = [s0, s1]
        for (op1, op2), (i1, i2) in zip(self.ops, self.indices):
            h1 = op1(states[i1])
            h2 = op2(states[i2])
            s = h1 + h2
            states.append(s)

        return L.concat([states[i] for i in self.concat], axis=1)


class Network(Layer):

    def __init__(self, operations, concat, C=36, layers=20, stem_multiplier=3, num_classes=10, drop_prob=0.2):
        super().__init__()
        self.C = C
        self.num_classes = num_classes
        self.layers = layers

        multiplier = len(concat)

        C_curr = stem_multiplier * C
        self.stem = Sequential(
            Conv2d(3, C_curr, kernel_size=3, bias=False),
            BN(C_curr),
        )

        C_prev_prev, C_prev, C_curr = C_curr, C_curr, C
        self.cells = LayerList()
        reduction_prev = False
        for i in range(layers):
            if i in [layers // 3, 2 * layers // 3]:
                C_curr *= 2
                reduction = True
            else:
                reduction = False
            if reduction:
                cell = ReductionCell(C_prev_prev, C_prev, C_curr, drop_prob)
            else:
                cell = NormalCell(operations, concat, C_prev_prev, C_prev, C_curr, reduction_prev, drop_prob)
            self.cells.append(cell)
            reduction_prev = reduction
            C_prev_prev, C_prev = C_prev, multiplier * C_curr
            if i == 2 * layers // 3:
                C_to_auxiliary = C_prev

        self.post_activ = BNAct(C_prev)
        self.avg_pool = GlobalAvgPool()
        self.classifier = Linear(C_prev, num_classes)

        self.aux_head = AuxiliaryHead(C_to_auxiliary, num_classes)

    def forward(self, input):
        logits_aux = None
        s0 = s1 = self.stem(input)
        for i, cell in enumerate(self.cells):
            s0, s1 = s1, cell(s0, s1)
            if (i == 2 * self.layers // 3) and self.training:
                logits_aux = self.aux_head(s1)
        out = self.avg_pool(s1)
        out = self.post_activ(out)
        logits = self.classifier(out)
        if logits_aux is not None:
            return logits, logits_aux
        else:
            return logits


def test_net():
    net = Network()
