from torch.nn import Module
from torch import nn
from torch.nn import functional as F
from copy import deepcopy
import torchvision


class ResidualConnection(Module):
    def __init__(self, sublayer: Module, out_channels: int, downsample=False, *args, **kwargs) -> None:
        super().__init__(*args, **kwargs)
        self.sublayer = sublayer
        self.bn = nn.BatchNorm2d(out_channels)
        self.downsample_ = nn.Upsample(
            scale_factor=0.5, mode='bicubic', recompute_scale_factor=False) if downsample else nn.Identity()

    def forward(self, x):

        residual = self.downsample_(x)
        o = F.relu(self.bn(self.sublayer(x)))
        b, c, h, w = residual.shape
        diff = o.size(1) - c

        if diff != 0:
            zeros = torch.zeros([b, diff, h, w])
            residual = torch.concat([residual, zeros], 1)

        return residual + o


class ResidualLayer(Module):
    def __init__(self, in_channels: int, out_channels: int, layers: int = 1, downsample=False, *args, **kwargs) -> None:
        super().__init__(*args, **kwargs)

        mid_channels = int(out_channels / 4)
        stride = 2 if downsample else 1

        self.model = nn.ModuleList([])

        for ind in range(layers):

            inc = in_channels
            oc = in_channels if ind != layers-1 else out_channels
            sublayer = nn.Sequential(nn.Conv2d(in_channels=inc, out_channels=mid_channels, kernel_size=1, padding=0, stride=stride),
                                     nn.Conv2d(
                                         in_channels=mid_channels, out_channels=mid_channels, kernel_size=3, padding=1, stride=1),
                                     nn.Conv2d(in_channels=mid_channels, out_channels=oc, kernel_size=1, padding=0, stride=1))
            residual_connection = ResidualConnection(
                sublayer, out_channels=oc, downsample=downsample)
            self.model.append(residual_connection)

    def forward(self, x):
        o = x
        for layer in self.model:
            o = layer(o)
        return o


class Resnet50(Module):
    def __init__(self, *args, **kwargs) -> None:
        super().__init__(*args, **kwargs)

        self.model = nn.Sequential(
            nn.Conv2d(3, 64, kernel_size=7, padding=3, stride=2),  # 128
            nn.MaxPool2d(2, 2),  # 64
            ResidualLayer(in_channels=64, out_channels=512,
                          layers=3, downsample=True),  # 8
            ResidualLayer(in_channels=512, out_channels=1024,
                          layers=4, downsample=False),  # 8
            ResidualLayer(in_channels=1024, out_channels=2048,
                          layers=6, downsample=False),  # 8
            ResidualLayer(in_channels=2048, out_channels=2048,
                          layers=3, downsample=False),  # 8
            nn.AvgPool2d(3, 1),  # 6
            nn.Flatten(),  # 2048 * 6 * 6
            nn.Linear(73728, 1024)
        )

    def forward(self, x):
        return F.softmax(self.model(x), -1)


if __name__ == '__main__':
    import torch
    # res = ResidualLayer(64,downsample=True)
    # x = torch.randn([4, 64, 64, 64])
    # y = res(x)
    # print(y.shape)

    # in 256
    # conv = nn.Conv2d(3,64,kernel_size=7,padding=3,stride=2)
    # x = torch.randn([4,3,256,256])
    # y = conv(x)
    # print(y.shape)

    # mp = nn.MaxPool2d(2,2)
    # x = torch.randn([4,64,128,128])
    # y = mp(x)
    # print(y.shape)
    x = torch.randn([4, 3, 256, 256])
    resnet50 = Resnet50()
    y = resnet50(x)
    print(y.shape)
