jxie commited on
Commit
02db9d7
1 Parent(s): 2de1a09

Upload SMAForSSL

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/iris/u/jwxie/workspace/releases/domain-agnostic-pretraining/examples/saved_models/physics_pretrained/higgs_guided_self_rand_select_masking_recon_small_noise_mask_self_random_mix-normalized-adamw_torch-lr1e-4-wd0.01-ws10000-masking_schedule_length0.25-mr0.2",
3
  "architectures": [
4
  "SMAForSSL"
5
  ],
@@ -40,7 +40,7 @@
40
  "layernorm_eps": 1e-12,
41
  "loss_fn": "mse",
42
  "max_position_embeddings": 28,
43
- "model_type": "perceiver_sma",
44
  "num_blocks": 1,
45
  "num_cross_attention_heads": 8,
46
  "num_discrete_tokens": 262,
 
1
  {
2
+ "_name_or_path": "/iris/u/jwxie/workspace/releases/domain-agnostic-pretraining/examples/saved_models/physics_pretrained/higgs_guided_self_rand_select_masking_recon_small_noise_mask_self_random_mix-68k_normalized-adamw_torch-lr1e-4-wd0.01-ws10000-masking_schedule_length0.25-mr0.2",
3
  "architectures": [
4
  "SMAForSSL"
5
  ],
 
40
  "layernorm_eps": 1e-12,
41
  "loss_fn": "mse",
42
  "max_position_embeddings": 28,
43
+ "model_type": "sma",
44
  "num_blocks": 1,
45
  "num_cross_attention_heads": 8,
46
  "num_discrete_tokens": 262,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c96639bcf32c123c23acbff5e7bf1b91415d95d2e7c6117c3975bdd6978e65f0
3
  size 2458681
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61d47ef2a9eaa33a5548e5c047d838df84894017bdc71efec5cbe9f28c7a77d6
3
  size 2458681