Spaces:
Sleeping
Sleeping
File size: 2,057 Bytes
57f6a10 613c01a 57f6a10 18f2566 57f6a10 18f2566 57f6a10 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
import collections
from torch import nn
from timm import create_model
def get_model() -> nn.Sequential:
net = create_model(
"vit_tiny_patch16_224", pretrained=False, num_classes=0, in_chans=3
)
head = nn.Sequential(
nn.BatchNorm1d(192),
nn.Dropout(0.25),
nn.Linear(192, 512, bias=False),
nn.ReLU(inplace=True),
nn.BatchNorm1d(512),
nn.Dropout(0.5),
nn.Linear(512, 200, bias=False),
)
return nn.Sequential(net, head)
def copy_weight(name, parameter, state_dict):
"""
Takes in a layer `name`, model `parameter`, and `state_dict`
and loads the weights from `state_dict` into `parameter`
if it exists.
"""
# Part of the body
if name[0] == "0":
name = name[:2] + "model." + name[2:]
if name in state_dict.keys():
input_parameter = state_dict[name]
if input_parameter.shape == parameter.shape:
parameter.copy_(input_parameter)
else:
print(f"Shape mismatch at layer: {name}, skipping")
else:
print(f"{name} is not in the state_dict, skipping.")
def apply_weights(
input_model: nn.Module,
input_weights: collections.OrderedDict,
application_function: callable,
):
"""
Takes an input state_dict and applies those weights to the `input_model`,
potentially with a modifier function.
Args:
input_model (`nn.Module`):
The model that weights should be applied to
input_weights (`collections.OrderedDict`):
A dictionary of weights, the trained model's `state_dict()`
application_function (`callable`):
A function that takes in one parameter and layer name from `input_model`
and the `input_weights`. Should apply the weights from the state dict into
`input_model`.
"""
model_dict = input_model.state_dict()
for name, parameter in model_dict.items():
application_function(name, parameter, input_weights)
input_model.load_state_dict(model_dict)
|