import torch
import torch.nn as nn
from model.BasicBlock import BasicBlock
from model.Bottleneck import Bottleneck

class ResNet(nn.Module):
    def __init__(self, img_channels, block, block_nums, num_classes, include_top=False):
        super().__init__()
        self.include_top = include_top
        self.in_channels = 64

        self.conv1 = nn.Conv2d(img_channels, self.in_channels, kernel_size=7, stride=2, padding=3, bias=False)
        self.bn1 = nn.BatchNorm2d(self.in_channels)
        self.relu = nn.ReLU(inplace=True)
        self.max_pool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        self.layer1 = self._make_layer(block, 64, block_nums[0], stride=1)
        self.layer2 = self._make_layer(block, 128, block_nums[1], stride=2)
        self.layer3 = self._make_layer(block, 256, block_nums[2], stride=2)
        self.layer4 = self._make_layer(block, 512, block_nums[3], stride=2)

        if self.include_top:
            self.avg_pool = nn.AdaptiveAvgPool2d((1,1))
            self.fc = nn.Linear(512 * block.expansion, num_classes)

        for layer in self.modules():
            if isinstance(layer, nn.Conv2d):
                nn.init.kaiming_normal_(layer.weight, mode="fan_out", nonlinearity="leaky_relu")

    def _make_layer(self, block, channels, residual_num, stride=1):
        down_sample = None
        if stride != 1 or self.in_channels != channels * block.expansion:
            down_sample = nn.Sequential(
                nn.Conv2d(self.in_channels, channels * block.expansion, kernel_size=1, stride=stride, bias=False),
                nn.BatchNorm2d(channels * block.expansion)
            )

        layers = [block(self.in_channels, channels, stride=stride, down_sample=down_sample)]
        self.in_channels = channels * block.expansion
        for _ in range(1, residual_num):
            layers.append(block(self.in_channels, channels))

        return nn.Sequential(*layers)

    def forward(self, X):
        X = self.max_pool(self.relu(self.bn1(self.conv1(X))))
        X = self.layer1(X)
        X = self.layer2(X)
        X = self.layer3(X)
        X = self.layer4(X)

        if self.include_top:
            X = self.avg_pool(X)
            X = torch.flatten(X, 1)
            X = self.fc(X)

        return X


def ResNet18(num_classes=10):
    return ResNet(3, BasicBlock, [2, 2, 2, 2], num_classes=num_classes, include_top=True)

def ResNet34(num_classes=10):
    return ResNet(3, BasicBlock, [3, 4, 6, 3], num_classes=num_classes, include_top=True)

def ResNet50(num_classes=10):
    return ResNet(3, Bottleneck, [3, 4, 6, 3], num_classes=num_classes, include_top=True)

def ResNet101(num_classes=10):
    return ResNet(3, Bottleneck, [3, 4, 23, 3], num_classes=num_classes, include_top=True)

def ResNet152(num_classes=10):
    return ResNet(3, Bottleneck, [3, 8, 36, 3], num_classes=num_classes, include_top=True)




