Spaces:
Runtime error
Runtime error
# coding=utf-8 | |
# Copyright 2022 The IDEA Authors. All rights reserved. | |
# | |
# Licensed under the Apache License, Version 2.0 (the "License"); | |
# you may not use this file except in compliance with the License. | |
# You may obtain a copy of the License at | |
# | |
# http://www.apache.org/licenses/LICENSE-2.0 | |
# | |
# Unless required by applicable law or agreed to in writing, software | |
# distributed under the License is distributed on an "AS IS" BASIS, | |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
# See the License for the specific language governing permissions and | |
# limitations under the License. | |
# ------------------------------------------------------------------------------------------------ | |
# Copyright (c) Facebook, Inc. and its affiliates. | |
# ------------------------------------------------------------------------------------------------ | |
# Modified from: | |
# https://github.com/facebookresearch/detr/blob/main/models/backbone.py | |
# ------------------------------------------------------------------------------------------------ | |
from collections import OrderedDict | |
import torch | |
import torch.nn as nn | |
import torchvision | |
from torchvision.models._utils import IntermediateLayerGetter | |
from detectron2.utils.comm import is_main_process | |
class FrozenBatchNorm2d(torch.nn.Module): | |
""" | |
BatchNorm2d where the batch statistics and the affine parameters are fixed. | |
Copy-paste from torchvision.misc.ops with added eps before rqsrt, | |
without which any other models than torchvision.models.resnet[18,34,50,101] | |
produce nans. | |
""" | |
def __init__(self, n): | |
super(FrozenBatchNorm2d, self).__init__() | |
self.register_buffer("weight", torch.ones(n)) | |
self.register_buffer("bias", torch.zeros(n)) | |
self.register_buffer("running_mean", torch.zeros(n)) | |
self.register_buffer("running_var", torch.ones(n)) | |
def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, | |
missing_keys, unexpected_keys, error_msgs): | |
num_batches_tracked_key = prefix + 'num_batches_tracked' | |
if num_batches_tracked_key in state_dict: | |
del state_dict[num_batches_tracked_key] | |
super(FrozenBatchNorm2d, self)._load_from_state_dict( | |
state_dict, prefix, local_metadata, strict, | |
missing_keys, unexpected_keys, error_msgs) | |
def forward(self, x): | |
# move reshapes to the beginning | |
# to make it fuser-friendly | |
w = self.weight.reshape(1, -1, 1, 1) | |
b = self.bias.reshape(1, -1, 1, 1) | |
rv = self.running_var.reshape(1, -1, 1, 1) | |
rm = self.running_mean.reshape(1, -1, 1, 1) | |
eps = 1e-5 | |
scale = w * (rv + eps).rsqrt() | |
bias = b - rm * scale | |
return x * scale + bias | |
class BackboneBase(nn.Module): | |
def __init__( | |
self, | |
backbone: nn.Module, | |
train_backbone: bool, | |
num_channels: int, | |
return_layers: dict, | |
): | |
super().__init__() | |
for name, parameter in backbone.named_parameters(): | |
if not train_backbone or 'layer2' not in name and 'layer3' not in name and 'layer4' not in name: | |
parameter.requires_grad_(False) | |
self.body = IntermediateLayerGetter(backbone, return_layers=return_layers) | |
self.num_channels = num_channels | |
def forward(self, x): | |
xs = self.body(x) | |
out = {} | |
for name, x in xs.items(): | |
out[name] = x | |
return out | |
class TorchvisionResNet(BackboneBase): | |
"""ResNet backbone with frozen BatchNorm.""" | |
def __init__(self, | |
name: str, | |
train_backbone: bool, | |
return_layers: dict = {"layer4": "res5"}, | |
dilation: bool = False, | |
): | |
backbone = getattr(torchvision.models, name)( | |
replace_stride_with_dilation=[False, False, dilation], | |
pretrained=False, norm_layer=FrozenBatchNorm2d) | |
num_channels = 512 if name in ('resnet18', 'resnet34') else 2048 | |
super().__init__(backbone, train_backbone, num_channels, return_layers) | |