ijepa_vith16_1k / config.json
Xenova's picture
Xenova HF staff
Upload folder using huggingface_hub
470d67c verified
raw
history blame contribute delete
553 Bytes
{
"_attn_implementation_autoset": true,
"_name_or_path": "jmtzt/ijepa_vith16_1k",
"architectures": [
"IJepaModel"
],
"attention_probs_dropout_prob": 0.0,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 1280,
"image_size": 448,
"initializer_range": 0.02,
"intermediate_size": 5120,
"layer_norm_eps": 1e-06,
"mlp_ratio": 4,
"model_type": "ijepa",
"num_attention_heads": 16,
"num_channels": 3,
"num_hidden_layers": 32,
"patch_size": 16,
"qkv_bias": true,
"transformers_version": "4.47.0.dev0"
}