brainlm / old_13M /config.json
Mihir Tripathy
add vitmae mdoels
fdaf3df
raw
history blame
1.09 kB
{
"_name_or_path": "/home/ahf38/palmer_scratch/brainlm/training-runs/2023-07-17-19_00_00/checkpoint-1750",
"architectures": [
"BrainLMForPretraining"
],
"attention_probs_dropout_prob": 0.1,
"conv_kernel_size": 65,
"decoder_hidden_size": 512,
"decoder_intermediate_size": 1024,
"decoder_num_attention_heads": 4,
"decoder_num_hidden_layers": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 512,
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 1024,
"inv_coeff_init_option": false,
"layer_norm_eps": 1e-12,
"loss_fn": "mse",
"mask_ratio": 0.2,
"max_eval_samples": 800,
"model_type": "brainlm_mae",
"norm_pix_loss": false,
"num_attention_heads": 4,
"num_brain_voxels": 424,
"num_channels": 3,
"num_hidden_layers": 4,
"num_landmarks": 64,
"num_timepoints_per_voxel": 200,
"patch_size": 16,
"qkv_bias": true,
"segment_means_seq_len": 64,
"timepoint_patching_size": 20,
"torch_dtype": "float32",
"transformers_version": "4.30.0.dev0",
"use_tanh_decoder": false,
"weight_decay": 1e-05
}