Upload 12 files
Browse files- efficientnet_model.pth +3 -0
- examples/OIP-U-NRJLJpckkAXN0iKPSp1AHaHa.jpeg +0 -0
- examples/OIP-W78XBYQ0JibkWl4at4_kOAHaIJ.jpeg +0 -0
- examples/OIP-XoARhzu6qXLSgfkXPIRhywHaEA.jpeg +0 -0
- examples/OIP-Y1MB2tBaRG5-ePPidka4BwHaGG.jpeg +0 -0
- examples/OIP-Y6_rxGV23EeP8KPdNR4A0gHaHa.jpeg +0 -0
- examples/OIP-ueOp0-GC1GoLKaGBSnfX5gHaFj.jpeg +0 -0
- examples/OIP-x-T0cc_QnQMYehCbCJF4QgHaFj.jpeg +0 -0
- examples/OIP-xJuqSpaD-Dmm7S7eH4KrtQHaE3.jpeg +0 -0
- examples/OIP-y7PQi9MNrhQFZuDIA3O_1wHaFj.jpeg +0 -0
- examples/OIP-z0UEPlOllV3uEYUIOS_W9gHaFp.jpeg +0 -0
- model.py +38 -0
efficientnet_model.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9223524b7fe0151348f5fd8472beec765c8b57855113b4c4c1a001e53160f6e1
|
3 |
+
size 31318643
|
examples/OIP-U-NRJLJpckkAXN0iKPSp1AHaHa.jpeg
ADDED
examples/OIP-W78XBYQ0JibkWl4at4_kOAHaIJ.jpeg
ADDED
examples/OIP-XoARhzu6qXLSgfkXPIRhywHaEA.jpeg
ADDED
examples/OIP-Y1MB2tBaRG5-ePPidka4BwHaGG.jpeg
ADDED
examples/OIP-Y6_rxGV23EeP8KPdNR4A0gHaHa.jpeg
ADDED
examples/OIP-ueOp0-GC1GoLKaGBSnfX5gHaFj.jpeg
ADDED
examples/OIP-x-T0cc_QnQMYehCbCJF4QgHaFj.jpeg
ADDED
examples/OIP-xJuqSpaD-Dmm7S7eH4KrtQHaE3.jpeg
ADDED
examples/OIP-y7PQi9MNrhQFZuDIA3O_1wHaFj.jpeg
ADDED
examples/OIP-z0UEPlOllV3uEYUIOS_W9gHaFp.jpeg
ADDED
model.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torchvision
|
3 |
+
from torch import nn
|
4 |
+
|
5 |
+
def create_effnetb2_model(num_classes: int = 10):
|
6 |
+
"""Creates an EfficientNetB2 feature extractor model and transforms.
|
7 |
+
|
8 |
+
Args:
|
9 |
+
num_classes (int, optional): number of classes in the classifier head.
|
10 |
+
Defaults to 3.
|
11 |
+
seed (int, optional): random seed value. Defaults to 42.
|
12 |
+
|
13 |
+
Returns:
|
14 |
+
model (torch.nn.Module): EffNetB2 feature extractor model.
|
15 |
+
transforms (torchvision.transforms): EffNetB2 image transforms.
|
16 |
+
"""
|
17 |
+
# Create EffNetB2 pretrained weights, transforms and model
|
18 |
+
weights = torchvision.models.EfficientNet_B2_Weights.DEFAULT
|
19 |
+
# transforms = weights.transforms()
|
20 |
+
transforms = torchvision.transforms
|
21 |
+
transform = transforms.Compose([transforms.Resize(224),
|
22 |
+
transforms.CenterCrop(224),
|
23 |
+
transforms.ToTensor(),
|
24 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406],std=[0.229, 0.224, 0.225])])
|
25 |
+
|
26 |
+
model = torchvision.models.efficientnet_b2(weights=weights)
|
27 |
+
|
28 |
+
# Freeze all layers in base model
|
29 |
+
for param in model.parameters():
|
30 |
+
param.requires_grad = False
|
31 |
+
|
32 |
+
# Change classifier head with random seed for reproducibility
|
33 |
+
model.classifier = nn.Sequential(
|
34 |
+
nn.Dropout(p=0.3, inplace=True),
|
35 |
+
nn.Linear(in_features=1408, out_features=num_classes),
|
36 |
+
)
|
37 |
+
|
38 |
+
return model, transform
|