from collections import OrderedDict

import torch
import torch.nn as nn
from torchvision import models

from detection.layers import FrozenBatchNorm2d


class ResNet(nn.Module):
    def __init__(self, resnet, cfg):
        super().__init__()
        self.has_layer4 = cfg.MODEL.BACKBONE.HAS_LAYER4
        self.out_indices = cfg.MODEL.BACKBONE.out_indices
        # model
        self.stem = nn.Sequential(
                    resnet.conv1,
                    resnet.bn1,
                    resnet.relu,
                    resnet.maxpool,
                )
        self.layer1 = resnet.layer1
        self.layer2 = resnet.layer2
        self.layer3 = resnet.layer3
        if self.has_layer4:
            self.layer4 = resnet.layer4
        else:
            self.out_indices = [False, False, True, False]
        
    def forward(self, x):
        results = []
        idx = 0
        x = self.stem(x)
        # layer1
        x = self.layer1(x)
        if self.out_indices[idx]:
            results.append(x)

        # layer2
        idx += 1
        x = self.layer2(x)
        if self.out_indices[idx]:
            results.append(x)

        # layer3
        idx += 1
        x = self.layer3(x)
        if self.out_indices[idx]:
            results.append(x)
        
        # layer4
        if self.has_layer4:
            idx += 1
            x = self.layer4(x)
            if self.out_indices[idx]:
                results.append(x)

        return results


def resnet(cfg, pretrained=True):
    has_layer4 = cfg.MODEL.BACKBONE.HAS_LAYER4
    backbone_name = cfg.MODEL.BACKBONE.NAME
    freeze_layer = cfg.MODEL.BACKBONE.freeze_layer
    backbone = models.resnet.__dict__[backbone_name](pretrained=pretrained, norm_layer=FrozenBatchNorm2d)
    backbone = ResNet(backbone, cfg)
    if has_layer4:
        backbone.out_channels = 2048
    else:
        backbone.out_channels = 1024

    # freeze layer
    for param in backbone.stem.parameters():
        param.requires_grad = False
    for layer_name in freeze_layer:
        for bottleneck in backbone._modules[layer_name]:
            for param in bottleneck.parameters():
                param.requires_grad = False
            FrozenBatchNorm2d.convert_frozen_batchnorm(bottleneck)

    return backbone

