dawn17 commited on
Commit
fe8ce56
1 Parent(s): 6ade604

Upload model.py

Browse files
Files changed (1) hide show
  1. model.py +87 -0
model.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ CIFAR 10
3
+ INPUT - [3, 32, 32]
4
+ """
5
+ import torch.nn as nn
6
+
7
+
8
+ class BasicBlock(nn.Module):
9
+ def __init__(self, in_channel, out_channel, dropout):
10
+ super(BasicBlock, self).__init__()
11
+ self.cblock = nn.Sequential(
12
+ *[
13
+ self._get_base_layer(
14
+ in_channel if i == 0 else out_channel, out_channel, dropout
15
+ )
16
+ for i in range(2)
17
+ ]
18
+ )
19
+
20
+ def _get_base_layer(self, in_channel, out_channel, dropout):
21
+ return nn.Sequential(
22
+ nn.Conv2d(
23
+ in_channel,
24
+ out_channel,
25
+ kernel_size=3,
26
+ padding=1,
27
+ padding_mode="replicate",
28
+ bias=False,
29
+ ),
30
+ nn.BatchNorm2d(out_channel),
31
+ nn.ReLU(),
32
+ nn.Dropout(dropout),
33
+ )
34
+
35
+ def forward(self, x):
36
+ return self.cblock(x) + x
37
+
38
+
39
+ class DavidPageNet(nn.Module):
40
+ def __init__(self, channels=[64, 128, 256, 512], dropout=0.01):
41
+ super(DavidPageNet, self).__init__()
42
+ self.block0 = self._get_base_layer(3, channels[0], pool=False)
43
+ self.block1 = nn.Sequential(
44
+ *[
45
+ self._get_base_layer(channels[0], channels[1]),
46
+ BasicBlock(channels[1], channels[1], dropout),
47
+ ]
48
+ )
49
+
50
+ self.block2 = self._get_base_layer(channels[1], channels[2])
51
+ self.block3 = nn.Sequential(
52
+ *[
53
+ self._get_base_layer(channels[2], channels[3]),
54
+ BasicBlock(channels[3], channels[3], dropout),
55
+ ]
56
+ )
57
+
58
+ self.logit = nn.Sequential(
59
+ nn.MaxPool2d(4),
60
+ nn.Flatten(),
61
+ nn.Linear(512, 10),
62
+ )
63
+
64
+ def _get_base_layer(self, in_channel, out_channel, pool=True):
65
+ return nn.Sequential(
66
+ nn.Conv2d(
67
+ in_channel,
68
+ out_channel,
69
+ stride=1,
70
+ padding=1,
71
+ kernel_size=3,
72
+ bias=False,
73
+ padding_mode="replicate",
74
+ ),
75
+ nn.MaxPool2d(2) if pool else nn.Identity(),
76
+ nn.BatchNorm2d(out_channel),
77
+ nn.ReLU(),
78
+ )
79
+
80
+ def forward(self, x):
81
+ x = self.block0(x)
82
+
83
+ x = self.block1(x)
84
+ x = self.block2(x)
85
+ x = self.block3(x)
86
+
87
+ return self.logit(x)