nielsr HF staff commited on
Commit
7431696
1 Parent(s): e20640b

Upload DepthAnythingForDepthEstimation

Browse files
Files changed (2) hide show
  1. config.json +6 -28
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,11 +1,8 @@
1
  {
2
  "_commit_hash": null,
3
- "add_projection": false,
4
  "architectures": [
5
- "DPTForDepthEstimation"
6
  ],
7
- "attention_probs_dropout_prob": null,
8
- "auxiliary_loss_weight": 0.4,
9
  "backbone_config": {
10
  "_name_or_path": "",
11
  "add_cross_attention": false,
@@ -117,44 +114,25 @@
117
  "use_bfloat16": false,
118
  "use_swiglu_ffn": false
119
  },
120
- "backbone_featmap_shape": null,
121
- "backbone_out_indices": null,
122
  "fusion_hidden_size": 128,
 
123
  "head_in_index": -1,
124
- "hidden_act": "gelu",
125
- "hidden_dropout_prob": null,
126
- "hidden_size": 768,
127
- "image_size": null,
128
  "initializer_range": 0.02,
129
- "intermediate_size": null,
130
- "is_hybrid": false,
131
- "layer_norm_eps": null,
132
- "model_type": "dpt",
133
  "neck_hidden_sizes": [
134
  96,
135
  192,
136
  384,
137
  768
138
  ],
139
- "neck_ignore_stages": [],
140
- "num_attention_heads": null,
141
- "num_channels": null,
142
- "num_hidden_layers": null,
143
- "patch_size": null,
144
- "qkv_bias": null,
145
- "readout_type": "ignore",
146
  "reassemble_factors": [
147
  4,
148
  2,
149
  1,
150
  0.5
151
  ],
152
- "semantic_classifier_dropout": 0.1,
153
- "semantic_loss_ignore_index": 255,
154
  "torch_dtype": "float32",
155
- "transformers_version": null,
156
- "use_auxiliary_head": true,
157
- "use_batch_norm_in_fusion_residual": false,
158
- "use_bias_in_fusion_residual": true,
159
- "use_size": true
160
  }
 
1
  {
2
  "_commit_hash": null,
 
3
  "architectures": [
4
+ "DepthAnythingForDepthEstimation"
5
  ],
 
 
6
  "backbone_config": {
7
  "_name_or_path": "",
8
  "add_cross_attention": false,
 
114
  "use_bfloat16": false,
115
  "use_swiglu_ffn": false
116
  },
 
 
117
  "fusion_hidden_size": 128,
118
+ "head_hidden_size": 32,
119
  "head_in_index": -1,
 
 
 
 
120
  "initializer_range": 0.02,
121
+ "model_type": "depth_anything",
 
 
 
122
  "neck_hidden_sizes": [
123
  96,
124
  192,
125
  384,
126
  768
127
  ],
128
+ "patch_size": 14,
 
 
 
 
 
 
129
  "reassemble_factors": [
130
  4,
131
  2,
132
  1,
133
  0.5
134
  ],
135
+ "reassemble_hidden_size": 768,
 
136
  "torch_dtype": "float32",
137
+ "transformers_version": null
 
 
 
 
138
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6313430460a18987c764770e1ac08c40be9ed68c0f6aa8ad2b66e0569eb84f5c
3
- size 389916988
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91a6561491e7cca06cec2522f2353037f59a8d8ec8e3436a7520883d1cf99d65
3
+ size 389916980