DepthPro / config.json
geetu040's picture
Upload config.json with huggingface_hub
b5d3b5e verified
raw
history blame
1.08 kB
{
"apply_layernorm": true,
"architectures": [
"DepthProForDepthEstimation"
],
"attention_probs_dropout_prob": 0.0,
"drop_path_rate": 0.0,
"fusion_hidden_size": 256,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 1024,
"image_size": 1536,
"initializer_range": 0.02,
"intermediate_feature_dims": [
256,
256
],
"intermediate_hook_ids": [
11,
5
],
"layer_norm_eps": 1e-06,
"layerscale_value": 1.0,
"mlp_ratio": 4,
"model_type": "depth_pro",
"num_attention_heads": 16,
"num_channels": 3,
"num_fov_head_layers": 2,
"num_hidden_layers": 24,
"patch_embeddings_size": 16,
"patch_size": 384,
"qkv_bias": true,
"reshape_hidden_states": true,
"scaled_images_feature_dims": [
1024,
1024,
512
],
"scaled_images_overlap_ratios": [
0.0,
0.5,
0.25
],
"scaled_images_ratios": [
0.25,
0.5,
1
],
"torch_dtype": "float16",
"transformers_version": "4.47.0.dev0",
"use_batch_norm_in_fusion": false,
"use_fov_model": true,
"use_swiglu_ffn": false
}