nielsr HF staff commited on
Commit
16030b5
1 Parent(s): 82af3b7

Upload DepthAnythingForDepthEstimation

Browse files
Files changed (2) hide show
  1. config.json +6 -28
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,11 +1,8 @@
1
  {
2
  "_commit_hash": null,
3
- "add_projection": false,
4
  "architectures": [
5
- "DPTForDepthEstimation"
6
  ],
7
- "attention_probs_dropout_prob": null,
8
- "auxiliary_loss_weight": 0.4,
9
  "backbone_config": {
10
  "_name_or_path": "",
11
  "add_cross_attention": false,
@@ -117,44 +114,25 @@
117
  "use_bfloat16": false,
118
  "use_swiglu_ffn": false
119
  },
120
- "backbone_featmap_shape": null,
121
- "backbone_out_indices": null,
122
  "fusion_hidden_size": 64,
 
123
  "head_in_index": -1,
124
- "hidden_act": "gelu",
125
- "hidden_dropout_prob": null,
126
- "hidden_size": 768,
127
- "image_size": null,
128
  "initializer_range": 0.02,
129
- "intermediate_size": null,
130
- "is_hybrid": false,
131
- "layer_norm_eps": null,
132
- "model_type": "dpt",
133
  "neck_hidden_sizes": [
134
  48,
135
  96,
136
  192,
137
  384
138
  ],
139
- "neck_ignore_stages": [],
140
- "num_attention_heads": null,
141
- "num_channels": null,
142
- "num_hidden_layers": null,
143
- "patch_size": null,
144
- "qkv_bias": null,
145
- "readout_type": "ignore",
146
  "reassemble_factors": [
147
  4,
148
  2,
149
  1,
150
  0.5
151
  ],
152
- "semantic_classifier_dropout": 0.1,
153
- "semantic_loss_ignore_index": 255,
154
  "torch_dtype": "float32",
155
- "transformers_version": null,
156
- "use_auxiliary_head": true,
157
- "use_batch_norm_in_fusion_residual": false,
158
- "use_bias_in_fusion_residual": true,
159
- "use_size": true
160
  }
 
1
  {
2
  "_commit_hash": null,
 
3
  "architectures": [
4
+ "DepthAnythingForDepthEstimation"
5
  ],
 
 
6
  "backbone_config": {
7
  "_name_or_path": "",
8
  "add_cross_attention": false,
 
114
  "use_bfloat16": false,
115
  "use_swiglu_ffn": false
116
  },
 
 
117
  "fusion_hidden_size": 64,
118
+ "head_hidden_size": 32,
119
  "head_in_index": -1,
 
 
 
 
120
  "initializer_range": 0.02,
121
+ "model_type": "depth_anything",
 
 
 
122
  "neck_hidden_sizes": [
123
  48,
124
  96,
125
  192,
126
  384
127
  ],
128
+ "patch_size": 14,
 
 
 
 
 
 
129
  "reassemble_factors": [
130
  4,
131
  2,
132
  1,
133
  0.5
134
  ],
135
+ "reassemble_hidden_size": 384,
 
136
  "torch_dtype": "float32",
137
+ "transformers_version": null
 
 
 
 
138
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f926b37bf7e7aaec4bd8ba788d53d7961943ceb8e25885955cae42b163103db
3
- size 99173668
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7997c812d8964a741eec21e6816ec2db1e442b5109ea2e7db26dcb03c9060ef0
3
+ size 99173660