from torch import nn
from utils.units import ConvRelu, ConvUnit, Dense


class VGGBlock(nn.Module):
    def __init__(self, conv_in, conv_out, conv_num):
        super().__init__()
        layers = []
        layers.append(ConvRelu(conv_in, conv_out))
        for i in range(conv_num - 1):
            layers.append(ConvRelu(conv_out, conv_out))

        layers.append(nn.MaxPool2d(kernel_size=2, stride=2))
        self.block = nn.Sequential(*layers)

    def forward(self, x):
        return self.block(x)


class VGG16(nn.Module):
    def __init__(self):
        super().__init__()
        self.conv_arch = ((3, 64, 2), (64, 128, 2), (128, 256, 3), (256, 512, 3), (512, 512, 3))
        self.features = self._build_sequence()
        self.classifier = nn.Sequential(
            Dense(512, 4096),
            Dense(4096, 4096),
            nn.Linear(4096, 10),
        )

    def forward(self, x):
        x = self.features(x)
        x = nn.Flatten()(x)
        x = self.classifier(x)
        return x

    def _build_sequence(self):
        layers = []
        for (conv_in, conv_out, conv_num) in self.conv_arch:
            layers.append(VGGBlock(conv_in, conv_out, conv_num))
        return nn.Sequential(*layers)


class ResUnit(nn.Module):
    def __init__(self, in_channels, out_channels, stride=1):
        super().__init__()
        self.conv1 = ConvRelu(in_channels, out_channels, conv_step=stride, conv_pad=1)
        self.conv2 = ConvUnit(out_channels, out_channels, conv_pad=1)
        self.shortcut = nn.Sequential()
        if stride != 1 or in_channels != out_channels:
            self.shortcut = ConvUnit(in_channels, out_channels, conv_size=1, conv_step=stride)

    def forward(self, x):
        out = self.conv1(x)
        out = self.conv2(out)
        out += self.shortcut(x)
        return nn.ReLU()(out)


class ResBlock(nn.Module):
    def __init__(self, in_channels, out_channels, num_residuals, stride=2):
        super().__init__()
        layers = []
        layers.append(ResUnit(in_channels, out_channels, stride))
        for _ in range(num_residuals - 1):
            layers.append(ResUnit(out_channels, out_channels))
        self.block = nn.Sequential(*layers)
        
    def forward(self, x):
        return self.block(x)


class ResNet18(nn.Module):
    def __init__(self):
        super().__init__()
        self.conv_pool1 = ConvRelu(3, 64)
        self.res1 = ResBlock(64, 64, 2, 1)
        self.res2 = ResBlock(64, 128, 2)
        self.res3 = ResBlock(128, 256, 2)
        self.res4 = ResBlock(256, 512, 2)

        self.avg = nn.AdaptiveAvgPool2d((1, 1))
        self.fc = Dense(512, 10)

    def forward(self, x):
        x = self.conv_pool1(x)
        x = self.res1(x)
        x = self.res2(x)
        x = self.res3(x)
        x = self.res4(x)
        x = self.avg(x)
        x = nn.Flatten()(x)
        return self.fc(x)


