nielsr HF staff commited on
Commit
760b167
1 Parent(s): 871282c

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +18 -8
config.json CHANGED
@@ -3,16 +3,23 @@
3
  "DPTForDepthEstimation"
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
 
6
  "channels": 256,
7
- "expand_channels": false,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.0,
10
  "hidden_size": 1024,
11
  "image_size": 384,
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 4096,
14
  "layer_norm_eps": 1e-12,
15
  "model_type": "dpt",
 
 
 
 
 
 
16
  "num_attention_heads": 16,
17
  "num_channels": 3,
18
  "num_hidden_layers": 24,
@@ -23,15 +30,18 @@
23
  23
24
  ],
25
  "patch_size": 16,
26
- "post_process_channels": [
27
- 256,
28
- 512,
29
- 1024,
30
- 1024
31
- ],
32
  "qkv_bias": true,
33
  "readout_type": "project",
 
 
 
 
 
 
 
 
34
  "torch_dtype": "float32",
35
- "transformers_version": "4.17.0.dev0",
 
36
  "use_batch_norm": false
37
  }
 
3
  "DPTForDepthEstimation"
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
+ "auxiliary_loss_weight": 0.4,
7
  "channels": 256,
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.0,
10
  "hidden_size": 1024,
11
  "image_size": 384,
12
+ "in_index": -1,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 4096,
15
  "layer_norm_eps": 1e-12,
16
  "model_type": "dpt",
17
+ "neck_hidden_sizes": [
18
+ 256,
19
+ 512,
20
+ 1024,
21
+ 1024
22
+ ],
23
  "num_attention_heads": 16,
24
  "num_channels": 3,
25
  "num_hidden_layers": 24,
 
30
  23
31
  ],
32
  "patch_size": 16,
 
 
 
 
 
 
33
  "qkv_bias": true,
34
  "readout_type": "project",
35
+ "reassemble_factors": [
36
+ 4,
37
+ 2,
38
+ 1,
39
+ 0.5
40
+ ],
41
+ "semantic_classifier_dropout": 0.1,
42
+ "semantic_loss_ignore_index": 255,
43
  "torch_dtype": "float32",
44
+ "transformers_version": "4.18.0.dev0",
45
+ "use_auxiliary_head": true,
46
  "use_batch_norm": false
47
  }