Leyo's picture
Upload folder using huggingface_hub
778a0be
raw
history blame
942 Bytes
{
"_name_or_path": "HuggingFaceM4/siglip-so400m-14-384-flash-attn",
"architectures": [
"SiglipModel"
],
"auto_map": {
"AutoConfig": "HuggingFaceM4/siglip-so400m-14-384-flash-attn--configuration_siglip.SiglipConfig",
"AutoModel": "HuggingFaceM4/siglip-so400m-14-384-flash-attn--modeling_siglip.SiglipModel"
},
"initializer_factor": 1.0,
"logit_scale_init_value": 2.6592,
"model_type": "siglip",
"projection_dim": 512,
"text_config": {
"hidden_size": 1152,
"intermediate_size": 4304,
"model_type": "siglip_text_model",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"vocab_size": 32000
},
"torch_dtype": "float32",
"transformers_version": "4.35.2",
"vision_config": {
"hidden_size": 1152,
"image_size": 384,
"intermediate_size": 4304,
"model_type": "siglip_vision_model",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"patch_size": 14
}
}