import torch.nn as nn
import torch

class Resnet50(nn.Module):
    def __init__(self):
        super().__init__()
        self.layers = nn.ModuleList([nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3),
                                        nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
                                        Block(in_channel=64, inter_channel=64, out_channel=256, mod='start', s=1),
                                        *make_blocks(256, 64, 256, 2),
                                        Block(in_channel=256, inter_channel=128, out_channel=512, mod='downsample', s=2),
                                        *make_blocks(512, 128, 512, 3),
                                        Block(in_channel=512, inter_channel=256, out_channel=1024, mod='downsample', s=2),
                                        *make_blocks(1024, 256, 1024, 5),
                                        Block(in_channel=1024, inter_channel=512, out_channel=2048, mod='downsample', s=2),
                                        *make_blocks(2048, 512, 2048, 2),
                                        nn.AvgPool2d(kernel_size=7),
                                        ])
        self.linear1 = nn.Linear(2048, 1024)
        self.linear2 = nn.Linear(1024, 257)        

    def forward(self, x):
        for layer in self.layers:
            x = layer(x)
        x = x.flatten(1)
        x = torch.relu(self.linear1(x))
        x = self.linear2(x)
        return torch.softmax(x, dim = -1)


class Block(nn.Module):
    def __init__(self, in_channel=256, inter_channel=128, out_channel=512, mod='downsample', s=2):
        super().__init__()
        self.layers = nn.ModuleList([nn.Conv2d(in_channel, inter_channel, kernel_size=1, stride=1, bias=False),
                                     nn.BatchNorm2d(num_features=inter_channel),
                                     nn.ReLU(),
                                     nn.Conv2d(inter_channel, inter_channel, kernel_size=3, stride=s, padding=1, bias=False),
                                     nn.BatchNorm2d(num_features=inter_channel),
                                     nn.ReLU(),
                                     nn.Conv2d(inter_channel, out_channel, kernel_size=1, stride=1, bias=False),
                                     nn.BatchNorm2d(num_features=out_channel)])
        if mod == 'downsample':
            self.shortcut = nn.Conv2d(in_channel, in_channel * 2, kernel_size=3 ,stride=2, padding=1)
        if mod == 'identity':
            self.shortcut = nn.Identity()
        if mod == 'start':
            self.shortcut = nn.Conv2d(in_channel, in_channel * 4, kernel_size=1 ,stride=1)

    def forward(self, x):
        shortcut = self.shortcut(x)
        for layer in self.layers:
            x = layer(x)
        output = torch.relu(x + shortcut)
        return output


def make_blocks(in_channel, inter_channel, out_channel, num_block):
    blocks = nn.ModuleList()
    for _ in range(num_block):
        blocks.append(Block(in_channel=in_channel, inter_channel=inter_channel, out_channel=out_channel, mod='identity', s=1))
    return blocks




class Resnet18(nn.Module):

    def __init__(self):
        super().__init__()
        self.layers = nn.ModuleList([nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3),
                        nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
                        Block18(64, 64, mod = 'start'),

                        Block18(64, 64, mod = 'identity'),
                        Block18(64, 128, mod = 'downsample', s=2),

                        Block18(128, 128, mod = 'identity'),
                        Block18(128, 256, mod = 'downsample', s=2),
                        Block18(256, 256, mod = 'identity'),

                        Block18(256, 512, mod = 'downsample', s=2),
                        Block18(512, 512, mod = 'identity'),

                        nn.AvgPool2d(kernel_size = 7)                       
        ])
        self.linear = nn.Linear(512, 257)

    def forward(self, x):
        for layer in self.layers:
            x = layer(x)
        x = x.flatten(1)
        x = self.linear(x)
        return torch.softmax(x, dim = -1)


class Block18(nn.Module):
    def __init__(self, in_channel=64, out_channel=64, mod='start', s=1):
        super().__init__()
        self.layers = nn.ModuleList([
                        nn.Conv2d(in_channels=in_channel, out_channels=in_channel, kernel_size=3, stride=s, padding=1, bias=False),
                        nn.BatchNorm2d(in_channel),
                        nn.ReLU(),
                        nn.Conv2d(in_channels=in_channel, out_channels=out_channel, kernel_size=3, stride=1, padding=1, bias=False),
                        nn.BatchNorm2d(out_channel),         
        ])

        if mod == 'downsample':
            self.shortcut = nn.ModuleList([nn.Conv2d(in_channel, in_channel * 2, kernel_size=3 ,stride=2, padding=1, bias=False),
                                           nn.BatchNorm2d(in_channel * 2)])
        if mod == 'identity':
            self.shortcut = nn.ModuleList([nn.Identity()])
        if mod == 'start':
            self.shortcut = nn.ModuleList([nn.Conv2d(in_channel, in_channel, kernel_size=1 ,stride=1, bias=False),
                                           nn.BatchNorm2d(in_channel)])

    def forward(self, x):
        shortcut = x
        for s in self.shortcut:
            shortcut = s(shortcut)
        for layer in self.layers:
            x = layer(x)
        out = torch.relu(x + shortcut)
        return out
    

# def make_blocks18(in_channel, out_channel, num_block):
#     blocks = nn.ModuleList()
#     for _ in range(num_block):
#         blocks.append(
#             Block18(in_channel, out_channel, mod='identity')
#         )
#     return blocks

    


if __name__ == '__main__':
    resnet  = Resnet18().to('cuda')
    x = torch.randn([8, 3, 640, 640]).to('cuda')
    out = resnet(x)
    print(out.shape)