ksmcg commited on
Commit
9985a81
·
1 Parent(s): 86d4644

Upload config

Browse files
Files changed (1) hide show
  1. config.json +93 -0
config.json ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "act_layer": null,
3
+ "attn_drop_rate": 0.0,
4
+ "backbone": "hybrid",
5
+ "c_head_num": null,
6
+ "channel_dims": null,
7
+ "channels": 256,
8
+ "cls_attn_layers": 2,
9
+ "decoder_hidden_size": 768,
10
+ "depth": 22,
11
+ "depths": [
12
+ 3,
13
+ 5
14
+ ],
15
+ "dims": [
16
+ 128,
17
+ 256,
18
+ 512,
19
+ 1024
20
+ ],
21
+ "drop_path_rate": 0.0,
22
+ "drop_rate": 0.0,
23
+ "dropout_ratio": 0.1,
24
+ "embed_dim": 480,
25
+ "eta": 1.0,
26
+ "feat_downsample": false,
27
+ "feature_strides": [
28
+ 4,
29
+ 8,
30
+ 16,
31
+ 32
32
+ ],
33
+ "hybrid_patch_size": 2,
34
+ "img_size": [
35
+ 224,
36
+ 224
37
+ ],
38
+ "in_channels": [
39
+ 128,
40
+ 256,
41
+ 480,
42
+ 480
43
+ ],
44
+ "in_chans": 3,
45
+ "in_index": [
46
+ 0,
47
+ 1,
48
+ 2,
49
+ 3
50
+ ],
51
+ "initializer_range": 1.0,
52
+ "mlp_ratio": 4.0,
53
+ "model_type": "fan",
54
+ "norm_layer": null,
55
+ "num_classes": 1000,
56
+ "num_heads": 10,
57
+ "out_index": -1,
58
+ "patch_size": 16,
59
+ "qkv_bias": true,
60
+ "reshape_last_stage": false,
61
+ "rounding_mode": "floor",
62
+ "se_mlp": false,
63
+ "sharpen_attn": false,
64
+ "sr_ratio": [
65
+ 1,
66
+ 1,
67
+ 1,
68
+ 1,
69
+ 1,
70
+ 1,
71
+ 1,
72
+ 1,
73
+ 1,
74
+ 1,
75
+ 1,
76
+ 1,
77
+ 1,
78
+ 1,
79
+ 1,
80
+ 1,
81
+ 1,
82
+ 1,
83
+ 1,
84
+ 1,
85
+ 1,
86
+ 1
87
+ ],
88
+ "tokens_norm": true,
89
+ "transformers_version": "4.22.0.dev0",
90
+ "use_checkpoint": false,
91
+ "use_head": false,
92
+ "use_pos_embed": true
93
+ }