Paresh commited on
Commit
b55968c
1 Parent(s): 1d97932

Upload 7 files

Browse files
models/face_age/age_deploy.prototxt ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "CaffeNet"
2
+ input: "data"
3
+ input_dim: 1
4
+ input_dim: 3
5
+ input_dim: 227
6
+ input_dim: 227
7
+ layers {
8
+ name: "conv1"
9
+ type: CONVOLUTION
10
+ bottom: "data"
11
+ top: "conv1"
12
+ convolution_param {
13
+ num_output: 96
14
+ kernel_size: 7
15
+ stride: 4
16
+ }
17
+ }
18
+ layers {
19
+ name: "relu1"
20
+ type: RELU
21
+ bottom: "conv1"
22
+ top: "conv1"
23
+ }
24
+ layers {
25
+ name: "pool1"
26
+ type: POOLING
27
+ bottom: "conv1"
28
+ top: "pool1"
29
+ pooling_param {
30
+ pool: MAX
31
+ kernel_size: 3
32
+ stride: 2
33
+ }
34
+ }
35
+ layers {
36
+ name: "norm1"
37
+ type: LRN
38
+ bottom: "pool1"
39
+ top: "norm1"
40
+ lrn_param {
41
+ local_size: 5
42
+ alpha: 0.0001
43
+ beta: 0.75
44
+ }
45
+ }
46
+ layers {
47
+ name: "conv2"
48
+ type: CONVOLUTION
49
+ bottom: "norm1"
50
+ top: "conv2"
51
+ convolution_param {
52
+ num_output: 256
53
+ pad: 2
54
+ kernel_size: 5
55
+ }
56
+ }
57
+ layers {
58
+ name: "relu2"
59
+ type: RELU
60
+ bottom: "conv2"
61
+ top: "conv2"
62
+ }
63
+ layers {
64
+ name: "pool2"
65
+ type: POOLING
66
+ bottom: "conv2"
67
+ top: "pool2"
68
+ pooling_param {
69
+ pool: MAX
70
+ kernel_size: 3
71
+ stride: 2
72
+ }
73
+ }
74
+ layers {
75
+ name: "norm2"
76
+ type: LRN
77
+ bottom: "pool2"
78
+ top: "norm2"
79
+ lrn_param {
80
+ local_size: 5
81
+ alpha: 0.0001
82
+ beta: 0.75
83
+ }
84
+ }
85
+ layers {
86
+ name: "conv3"
87
+ type: CONVOLUTION
88
+ bottom: "norm2"
89
+ top: "conv3"
90
+ convolution_param {
91
+ num_output: 384
92
+ pad: 1
93
+ kernel_size: 3
94
+ }
95
+ }
96
+ layers{
97
+ name: "relu3"
98
+ type: RELU
99
+ bottom: "conv3"
100
+ top: "conv3"
101
+ }
102
+ layers {
103
+ name: "pool5"
104
+ type: POOLING
105
+ bottom: "conv3"
106
+ top: "pool5"
107
+ pooling_param {
108
+ pool: MAX
109
+ kernel_size: 3
110
+ stride: 2
111
+ }
112
+ }
113
+ layers {
114
+ name: "fc6"
115
+ type: INNER_PRODUCT
116
+ bottom: "pool5"
117
+ top: "fc6"
118
+ inner_product_param {
119
+ num_output: 512
120
+ }
121
+ }
122
+ layers {
123
+ name: "relu6"
124
+ type: RELU
125
+ bottom: "fc6"
126
+ top: "fc6"
127
+ }
128
+ layers {
129
+ name: "drop6"
130
+ type: DROPOUT
131
+ bottom: "fc6"
132
+ top: "fc6"
133
+ dropout_param {
134
+ dropout_ratio: 0.5
135
+ }
136
+ }
137
+ layers {
138
+ name: "fc7"
139
+ type: INNER_PRODUCT
140
+ bottom: "fc6"
141
+ top: "fc7"
142
+ inner_product_param {
143
+ num_output: 512
144
+ }
145
+ }
146
+ layers {
147
+ name: "relu7"
148
+ type: RELU
149
+ bottom: "fc7"
150
+ top: "fc7"
151
+ }
152
+ layers {
153
+ name: "drop7"
154
+ type: DROPOUT
155
+ bottom: "fc7"
156
+ top: "fc7"
157
+ dropout_param {
158
+ dropout_ratio: 0.5
159
+ }
160
+ }
161
+ layers {
162
+ name: "fc8"
163
+ type: INNER_PRODUCT
164
+ bottom: "fc7"
165
+ top: "fc8"
166
+ inner_product_param {
167
+ num_output: 8
168
+ }
169
+ }
170
+ layers {
171
+ name: "prob"
172
+ type: SOFTMAX
173
+ bottom: "fc8"
174
+ top: "prob"
175
+ }
models/face_age/age_net.caffemodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dde5d07df5ca1d66ff39e525693f05ccfb9d2c437e188fdd1a10d42e57fabd6
3
+ size 45661480
models/face_alignment/shape_predictor_68_face_landmarks.dat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbdc2cb80eb9aa7a758672cbfdda32ba6300efe9b6e6c7a299ff7e736b11b92f
3
+ size 99693937
models/face_detection/deploy.prototxt.txt ADDED
@@ -0,0 +1,1789 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ input: "data"
2
+ input_shape {
3
+ dim: 1
4
+ dim: 3
5
+ dim: 300
6
+ dim: 300
7
+ }
8
+
9
+ layer {
10
+ name: "data_bn"
11
+ type: "BatchNorm"
12
+ bottom: "data"
13
+ top: "data_bn"
14
+ param {
15
+ lr_mult: 0.0
16
+ }
17
+ param {
18
+ lr_mult: 0.0
19
+ }
20
+ param {
21
+ lr_mult: 0.0
22
+ }
23
+ }
24
+ layer {
25
+ name: "data_scale"
26
+ type: "Scale"
27
+ bottom: "data_bn"
28
+ top: "data_bn"
29
+ param {
30
+ lr_mult: 1.0
31
+ decay_mult: 1.0
32
+ }
33
+ param {
34
+ lr_mult: 2.0
35
+ decay_mult: 1.0
36
+ }
37
+ scale_param {
38
+ bias_term: true
39
+ }
40
+ }
41
+ layer {
42
+ name: "conv1_h"
43
+ type: "Convolution"
44
+ bottom: "data_bn"
45
+ top: "conv1_h"
46
+ param {
47
+ lr_mult: 1.0
48
+ decay_mult: 1.0
49
+ }
50
+ param {
51
+ lr_mult: 2.0
52
+ decay_mult: 1.0
53
+ }
54
+ convolution_param {
55
+ num_output: 32
56
+ pad: 3
57
+ kernel_size: 7
58
+ stride: 2
59
+ weight_filler {
60
+ type: "msra"
61
+ variance_norm: FAN_OUT
62
+ }
63
+ bias_filler {
64
+ type: "constant"
65
+ value: 0.0
66
+ }
67
+ }
68
+ }
69
+ layer {
70
+ name: "conv1_bn_h"
71
+ type: "BatchNorm"
72
+ bottom: "conv1_h"
73
+ top: "conv1_h"
74
+ param {
75
+ lr_mult: 0.0
76
+ }
77
+ param {
78
+ lr_mult: 0.0
79
+ }
80
+ param {
81
+ lr_mult: 0.0
82
+ }
83
+ }
84
+ layer {
85
+ name: "conv1_scale_h"
86
+ type: "Scale"
87
+ bottom: "conv1_h"
88
+ top: "conv1_h"
89
+ param {
90
+ lr_mult: 1.0
91
+ decay_mult: 1.0
92
+ }
93
+ param {
94
+ lr_mult: 2.0
95
+ decay_mult: 1.0
96
+ }
97
+ scale_param {
98
+ bias_term: true
99
+ }
100
+ }
101
+ layer {
102
+ name: "conv1_relu"
103
+ type: "ReLU"
104
+ bottom: "conv1_h"
105
+ top: "conv1_h"
106
+ }
107
+ layer {
108
+ name: "conv1_pool"
109
+ type: "Pooling"
110
+ bottom: "conv1_h"
111
+ top: "conv1_pool"
112
+ pooling_param {
113
+ kernel_size: 3
114
+ stride: 2
115
+ }
116
+ }
117
+ layer {
118
+ name: "layer_64_1_conv1_h"
119
+ type: "Convolution"
120
+ bottom: "conv1_pool"
121
+ top: "layer_64_1_conv1_h"
122
+ param {
123
+ lr_mult: 1.0
124
+ decay_mult: 1.0
125
+ }
126
+ convolution_param {
127
+ num_output: 32
128
+ bias_term: false
129
+ pad: 1
130
+ kernel_size: 3
131
+ stride: 1
132
+ weight_filler {
133
+ type: "msra"
134
+ }
135
+ bias_filler {
136
+ type: "constant"
137
+ value: 0.0
138
+ }
139
+ }
140
+ }
141
+ layer {
142
+ name: "layer_64_1_bn2_h"
143
+ type: "BatchNorm"
144
+ bottom: "layer_64_1_conv1_h"
145
+ top: "layer_64_1_conv1_h"
146
+ param {
147
+ lr_mult: 0.0
148
+ }
149
+ param {
150
+ lr_mult: 0.0
151
+ }
152
+ param {
153
+ lr_mult: 0.0
154
+ }
155
+ }
156
+ layer {
157
+ name: "layer_64_1_scale2_h"
158
+ type: "Scale"
159
+ bottom: "layer_64_1_conv1_h"
160
+ top: "layer_64_1_conv1_h"
161
+ param {
162
+ lr_mult: 1.0
163
+ decay_mult: 1.0
164
+ }
165
+ param {
166
+ lr_mult: 2.0
167
+ decay_mult: 1.0
168
+ }
169
+ scale_param {
170
+ bias_term: true
171
+ }
172
+ }
173
+ layer {
174
+ name: "layer_64_1_relu2"
175
+ type: "ReLU"
176
+ bottom: "layer_64_1_conv1_h"
177
+ top: "layer_64_1_conv1_h"
178
+ }
179
+ layer {
180
+ name: "layer_64_1_conv2_h"
181
+ type: "Convolution"
182
+ bottom: "layer_64_1_conv1_h"
183
+ top: "layer_64_1_conv2_h"
184
+ param {
185
+ lr_mult: 1.0
186
+ decay_mult: 1.0
187
+ }
188
+ convolution_param {
189
+ num_output: 32
190
+ bias_term: false
191
+ pad: 1
192
+ kernel_size: 3
193
+ stride: 1
194
+ weight_filler {
195
+ type: "msra"
196
+ }
197
+ bias_filler {
198
+ type: "constant"
199
+ value: 0.0
200
+ }
201
+ }
202
+ }
203
+ layer {
204
+ name: "layer_64_1_sum"
205
+ type: "Eltwise"
206
+ bottom: "layer_64_1_conv2_h"
207
+ bottom: "conv1_pool"
208
+ top: "layer_64_1_sum"
209
+ }
210
+ layer {
211
+ name: "layer_128_1_bn1_h"
212
+ type: "BatchNorm"
213
+ bottom: "layer_64_1_sum"
214
+ top: "layer_128_1_bn1_h"
215
+ param {
216
+ lr_mult: 0.0
217
+ }
218
+ param {
219
+ lr_mult: 0.0
220
+ }
221
+ param {
222
+ lr_mult: 0.0
223
+ }
224
+ }
225
+ layer {
226
+ name: "layer_128_1_scale1_h"
227
+ type: "Scale"
228
+ bottom: "layer_128_1_bn1_h"
229
+ top: "layer_128_1_bn1_h"
230
+ param {
231
+ lr_mult: 1.0
232
+ decay_mult: 1.0
233
+ }
234
+ param {
235
+ lr_mult: 2.0
236
+ decay_mult: 1.0
237
+ }
238
+ scale_param {
239
+ bias_term: true
240
+ }
241
+ }
242
+ layer {
243
+ name: "layer_128_1_relu1"
244
+ type: "ReLU"
245
+ bottom: "layer_128_1_bn1_h"
246
+ top: "layer_128_1_bn1_h"
247
+ }
248
+ layer {
249
+ name: "layer_128_1_conv1_h"
250
+ type: "Convolution"
251
+ bottom: "layer_128_1_bn1_h"
252
+ top: "layer_128_1_conv1_h"
253
+ param {
254
+ lr_mult: 1.0
255
+ decay_mult: 1.0
256
+ }
257
+ convolution_param {
258
+ num_output: 128
259
+ bias_term: false
260
+ pad: 1
261
+ kernel_size: 3
262
+ stride: 2
263
+ weight_filler {
264
+ type: "msra"
265
+ }
266
+ bias_filler {
267
+ type: "constant"
268
+ value: 0.0
269
+ }
270
+ }
271
+ }
272
+ layer {
273
+ name: "layer_128_1_bn2"
274
+ type: "BatchNorm"
275
+ bottom: "layer_128_1_conv1_h"
276
+ top: "layer_128_1_conv1_h"
277
+ param {
278
+ lr_mult: 0.0
279
+ }
280
+ param {
281
+ lr_mult: 0.0
282
+ }
283
+ param {
284
+ lr_mult: 0.0
285
+ }
286
+ }
287
+ layer {
288
+ name: "layer_128_1_scale2"
289
+ type: "Scale"
290
+ bottom: "layer_128_1_conv1_h"
291
+ top: "layer_128_1_conv1_h"
292
+ param {
293
+ lr_mult: 1.0
294
+ decay_mult: 1.0
295
+ }
296
+ param {
297
+ lr_mult: 2.0
298
+ decay_mult: 1.0
299
+ }
300
+ scale_param {
301
+ bias_term: true
302
+ }
303
+ }
304
+ layer {
305
+ name: "layer_128_1_relu2"
306
+ type: "ReLU"
307
+ bottom: "layer_128_1_conv1_h"
308
+ top: "layer_128_1_conv1_h"
309
+ }
310
+ layer {
311
+ name: "layer_128_1_conv2"
312
+ type: "Convolution"
313
+ bottom: "layer_128_1_conv1_h"
314
+ top: "layer_128_1_conv2"
315
+ param {
316
+ lr_mult: 1.0
317
+ decay_mult: 1.0
318
+ }
319
+ convolution_param {
320
+ num_output: 128
321
+ bias_term: false
322
+ pad: 1
323
+ kernel_size: 3
324
+ stride: 1
325
+ weight_filler {
326
+ type: "msra"
327
+ }
328
+ bias_filler {
329
+ type: "constant"
330
+ value: 0.0
331
+ }
332
+ }
333
+ }
334
+ layer {
335
+ name: "layer_128_1_conv_expand_h"
336
+ type: "Convolution"
337
+ bottom: "layer_128_1_bn1_h"
338
+ top: "layer_128_1_conv_expand_h"
339
+ param {
340
+ lr_mult: 1.0
341
+ decay_mult: 1.0
342
+ }
343
+ convolution_param {
344
+ num_output: 128
345
+ bias_term: false
346
+ pad: 0
347
+ kernel_size: 1
348
+ stride: 2
349
+ weight_filler {
350
+ type: "msra"
351
+ }
352
+ bias_filler {
353
+ type: "constant"
354
+ value: 0.0
355
+ }
356
+ }
357
+ }
358
+ layer {
359
+ name: "layer_128_1_sum"
360
+ type: "Eltwise"
361
+ bottom: "layer_128_1_conv2"
362
+ bottom: "layer_128_1_conv_expand_h"
363
+ top: "layer_128_1_sum"
364
+ }
365
+ layer {
366
+ name: "layer_256_1_bn1"
367
+ type: "BatchNorm"
368
+ bottom: "layer_128_1_sum"
369
+ top: "layer_256_1_bn1"
370
+ param {
371
+ lr_mult: 0.0
372
+ }
373
+ param {
374
+ lr_mult: 0.0
375
+ }
376
+ param {
377
+ lr_mult: 0.0
378
+ }
379
+ }
380
+ layer {
381
+ name: "layer_256_1_scale1"
382
+ type: "Scale"
383
+ bottom: "layer_256_1_bn1"
384
+ top: "layer_256_1_bn1"
385
+ param {
386
+ lr_mult: 1.0
387
+ decay_mult: 1.0
388
+ }
389
+ param {
390
+ lr_mult: 2.0
391
+ decay_mult: 1.0
392
+ }
393
+ scale_param {
394
+ bias_term: true
395
+ }
396
+ }
397
+ layer {
398
+ name: "layer_256_1_relu1"
399
+ type: "ReLU"
400
+ bottom: "layer_256_1_bn1"
401
+ top: "layer_256_1_bn1"
402
+ }
403
+ layer {
404
+ name: "layer_256_1_conv1"
405
+ type: "Convolution"
406
+ bottom: "layer_256_1_bn1"
407
+ top: "layer_256_1_conv1"
408
+ param {
409
+ lr_mult: 1.0
410
+ decay_mult: 1.0
411
+ }
412
+ convolution_param {
413
+ num_output: 256
414
+ bias_term: false
415
+ pad: 1
416
+ kernel_size: 3
417
+ stride: 2
418
+ weight_filler {
419
+ type: "msra"
420
+ }
421
+ bias_filler {
422
+ type: "constant"
423
+ value: 0.0
424
+ }
425
+ }
426
+ }
427
+ layer {
428
+ name: "layer_256_1_bn2"
429
+ type: "BatchNorm"
430
+ bottom: "layer_256_1_conv1"
431
+ top: "layer_256_1_conv1"
432
+ param {
433
+ lr_mult: 0.0
434
+ }
435
+ param {
436
+ lr_mult: 0.0
437
+ }
438
+ param {
439
+ lr_mult: 0.0
440
+ }
441
+ }
442
+ layer {
443
+ name: "layer_256_1_scale2"
444
+ type: "Scale"
445
+ bottom: "layer_256_1_conv1"
446
+ top: "layer_256_1_conv1"
447
+ param {
448
+ lr_mult: 1.0
449
+ decay_mult: 1.0
450
+ }
451
+ param {
452
+ lr_mult: 2.0
453
+ decay_mult: 1.0
454
+ }
455
+ scale_param {
456
+ bias_term: true
457
+ }
458
+ }
459
+ layer {
460
+ name: "layer_256_1_relu2"
461
+ type: "ReLU"
462
+ bottom: "layer_256_1_conv1"
463
+ top: "layer_256_1_conv1"
464
+ }
465
+ layer {
466
+ name: "layer_256_1_conv2"
467
+ type: "Convolution"
468
+ bottom: "layer_256_1_conv1"
469
+ top: "layer_256_1_conv2"
470
+ param {
471
+ lr_mult: 1.0
472
+ decay_mult: 1.0
473
+ }
474
+ convolution_param {
475
+ num_output: 256
476
+ bias_term: false
477
+ pad: 1
478
+ kernel_size: 3
479
+ stride: 1
480
+ weight_filler {
481
+ type: "msra"
482
+ }
483
+ bias_filler {
484
+ type: "constant"
485
+ value: 0.0
486
+ }
487
+ }
488
+ }
489
+ layer {
490
+ name: "layer_256_1_conv_expand"
491
+ type: "Convolution"
492
+ bottom: "layer_256_1_bn1"
493
+ top: "layer_256_1_conv_expand"
494
+ param {
495
+ lr_mult: 1.0
496
+ decay_mult: 1.0
497
+ }
498
+ convolution_param {
499
+ num_output: 256
500
+ bias_term: false
501
+ pad: 0
502
+ kernel_size: 1
503
+ stride: 2
504
+ weight_filler {
505
+ type: "msra"
506
+ }
507
+ bias_filler {
508
+ type: "constant"
509
+ value: 0.0
510
+ }
511
+ }
512
+ }
513
+ layer {
514
+ name: "layer_256_1_sum"
515
+ type: "Eltwise"
516
+ bottom: "layer_256_1_conv2"
517
+ bottom: "layer_256_1_conv_expand"
518
+ top: "layer_256_1_sum"
519
+ }
520
+ layer {
521
+ name: "layer_512_1_bn1"
522
+ type: "BatchNorm"
523
+ bottom: "layer_256_1_sum"
524
+ top: "layer_512_1_bn1"
525
+ param {
526
+ lr_mult: 0.0
527
+ }
528
+ param {
529
+ lr_mult: 0.0
530
+ }
531
+ param {
532
+ lr_mult: 0.0
533
+ }
534
+ }
535
+ layer {
536
+ name: "layer_512_1_scale1"
537
+ type: "Scale"
538
+ bottom: "layer_512_1_bn1"
539
+ top: "layer_512_1_bn1"
540
+ param {
541
+ lr_mult: 1.0
542
+ decay_mult: 1.0
543
+ }
544
+ param {
545
+ lr_mult: 2.0
546
+ decay_mult: 1.0
547
+ }
548
+ scale_param {
549
+ bias_term: true
550
+ }
551
+ }
552
+ layer {
553
+ name: "layer_512_1_relu1"
554
+ type: "ReLU"
555
+ bottom: "layer_512_1_bn1"
556
+ top: "layer_512_1_bn1"
557
+ }
558
+ layer {
559
+ name: "layer_512_1_conv1_h"
560
+ type: "Convolution"
561
+ bottom: "layer_512_1_bn1"
562
+ top: "layer_512_1_conv1_h"
563
+ param {
564
+ lr_mult: 1.0
565
+ decay_mult: 1.0
566
+ }
567
+ convolution_param {
568
+ num_output: 128
569
+ bias_term: false
570
+ pad: 1
571
+ kernel_size: 3
572
+ stride: 1 # 2
573
+ weight_filler {
574
+ type: "msra"
575
+ }
576
+ bias_filler {
577
+ type: "constant"
578
+ value: 0.0
579
+ }
580
+ }
581
+ }
582
+ layer {
583
+ name: "layer_512_1_bn2_h"
584
+ type: "BatchNorm"
585
+ bottom: "layer_512_1_conv1_h"
586
+ top: "layer_512_1_conv1_h"
587
+ param {
588
+ lr_mult: 0.0
589
+ }
590
+ param {
591
+ lr_mult: 0.0
592
+ }
593
+ param {
594
+ lr_mult: 0.0
595
+ }
596
+ }
597
+ layer {
598
+ name: "layer_512_1_scale2_h"