import torch
import functools


class BasicBlock(torch.nn.Module):
    expansion = 1

    def __init__(self, in_planes, planes, stride=1, activator=torch.nn.ReLU(inplace=True)):
        super(BasicBlock, self).__init__()
        self.conv1 = torch.nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
        self.bn1 = torch.nn.BatchNorm2d(planes)
        self.conv2 = torch.nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
        self.bn2 = torch.nn.BatchNorm2d(planes)
        self.activator = activator

        self.shortcut = torch.nn.Sequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = torch.nn.Sequential(
                torch.nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False),
                torch.nn.BatchNorm2d(self.expansion * planes)
            )
        self.desc = f'{type(self).__name__}'

    def forward(self, x):
        out = self.activator(self.bn1(self.conv1(x)))
        out = self.bn2(self.conv2(out))
        out += self.shortcut(x)
        out = self.activator(out)
        return out


class Bottleneck(torch.nn.Module):
    expansion = 4

    def __init__(self, in_planes, planes, stride=1, activator=torch.nn.ReLU(inplace=True)):
        super(Bottleneck, self).__init__()
        self.conv1 = torch.nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
        self.bn1 = torch.nn.BatchNorm2d(planes)
        self.conv2 = torch.nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
        self.bn2 = torch.nn.BatchNorm2d(planes)
        self.conv3 = torch.nn.Conv2d(planes, self.expansion * planes, kernel_size=1, bias=False)
        self.bn3 = torch.nn.BatchNorm2d(self.expansion * planes)
        self.activator = activator

        self.shortcut = torch.nn.Sequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = torch.nn.Sequential(
                torch.nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False),
                torch.nn.BatchNorm2d(self.expansion * planes)
            )
        self.desc = f'{type(self).__name__}'

    def forward(self, x):
        out = self.activator(self.bn1(self.conv1(x)))
        out = self.activator(self.bn2(self.conv2(out)))
        out = self.bn3(self.conv3(out))
        out += self.shortcut(x)
        out = self.activator(out)
        return out


class ResNet(torch.nn.Module):
    def __init__(self,
                 block,
                 num_blocks=[],
                 input_shape=[3, 32, 32],
                 num_levels=[4, 4, 3, 3],
                 one_level_ks=3,
                 parainit=2.0,
                 num_classes=10,
                 inchannel=3,
                 actvator=torch.nn.ReLU(inplace=True)):
        super(ResNet, self).__init__()
        self.in_planes = 64
        self.block = block
        self.input_shape = input_shape
        self.num_levels = num_levels
        self.one_level_ks = one_level_ks
        self.parainit = parainit
        self.actvator = actvator
        self.conv1 = torch.nn.Conv2d(inchannel, 64, kernel_size=3, stride=1, padding=1, bias=False)
        self.bn1 = torch.nn.BatchNorm2d(64)
        self.layer1 = self._make_layer(64, num_blocks[0], stride=1, num_level=num_levels[0])
        self.layer2 = self._make_layer(128, num_blocks[1], stride=2, num_level=num_levels[1])
        self.layer3 = self._make_layer(256, num_blocks[2], stride=2, num_level=num_levels[2])
        self.layer4 = self._make_layer(512, num_blocks[3], stride=2, num_level=num_levels[3])
        # self.linear = torch.nn.Linear(8192, num_classes)
        self.linear = torch.nn.Linear(512 * block.expansion, num_classes)
        # if self.block in [BasicBlock, Bottleneck]:
        #     self.desc = f'{type(self).__name__}_{block.__name__}{num_blocks}'
        # if self.block in [MSDwSBasicBlock, MSDwSBottleneck]:
        #     ks_desc = f'_olks{one_level_ks}' if num_levels == [1, 1, 1, 1] else ''
        #     self.desc = f'{type(self).__name__}_{block.__name__}{num_blocks}_nl{num_levels}_init{parainit}{ks_desc}'
        # self.desc += actvator.desc if hasattr(actvator, 'desc') else f'_{type(actvator).__name__}'

    def rm_linear(self):
        if hasattr(self, 'linear'):
            del self.linear

    def _make_layer(self, planes, num_blocks, stride, num_level):
        strides = [stride] + [1] * (num_blocks - 1)
        layers = []
        for stride in strides:
            layers.append(self.block(self.in_planes, planes, stride, self.actvator))

            self.in_planes = planes * self.block.expansion
        return torch.nn.Sequential(*layers)

    def forward(self, x):
        out = self.actvator(self.bn1(self.conv1(x)))
        out = self.layer1(out)
        out = self.layer2(out)
        out = self.layer3(out)
        out = self.layer4(out)
        out = torch.nn.functional.adaptive_avg_pool2d(out, 1)
        out = out.view(out.size(0), -1)
        out = self.linear(out)
        return out


def resnet18(num_classes=10, inchannel=3):
    model = ResNet(BasicBlock, [2, 2, 2, 2], num_classes=num_classes, inchannel=inchannel)
    model.desc = 'ResNet18'
    return model


def resnet34(num_classes=10, inchannel=3):
    model = ResNet(BasicBlock, [3, 4, 6, 3], num_classes=num_classes, inchannel=inchannel)
    model.desc = 'ResNet34'
    return model


def resnet50(num_classes=10, inchannel=3):
    model = ResNet(Bottleneck, [3, 4, 6, 3], num_classes=num_classes, inchannel=inchannel)
    model.desc = 'ResNet50'
    return model


def resnet101(num_classes=10, inchannel=3):
    model = ResNet(Bottleneck, [3, 4, 23, 3], num_classes=num_classes, inchannel=inchannel)
    model.desc = 'ResNet101'
    return model


def resnet152(num_classes=10, inchannel=3):
    model = ResNet(Bottleneck, [3, 8, 36, 3], num_classes=num_classes, inchannel=inchannel)
    model.desc = 'ResNet152'
    return model
