upload the version of phenom-beta that was actually trained on JUMP+RxRx3
#1
by
kiankaydee
- opened
- config.json +4 -3
- model.safetensors +2 -2
config.json
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
{
|
|
|
2 |
"apply_loss_unmasked": false,
|
3 |
"architectures": [
|
4 |
"MAEModel"
|
@@ -6,8 +7,8 @@
|
|
6 |
"crop_size": -1,
|
7 |
"decoder": {
|
8 |
"_target_": "mae_modules.CAMAEDecoder",
|
9 |
-
"depth":
|
10 |
-
"embed_dim":
|
11 |
"mlp_ratio": 4,
|
12 |
"norm_layer": {
|
13 |
"_partial_": true,
|
@@ -77,7 +78,7 @@
|
|
77 |
"weight_decay": 0.05
|
78 |
},
|
79 |
"torch_dtype": "float32",
|
80 |
-
"transformers_version": "4.
|
81 |
"trim_encoder_blocks": null,
|
82 |
"use_MAE_weight_init": false
|
83 |
}
|
|
|
1 |
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
"apply_loss_unmasked": false,
|
4 |
"architectures": [
|
5 |
"MAEModel"
|
|
|
7 |
"crop_size": -1,
|
8 |
"decoder": {
|
9 |
"_target_": "mae_modules.CAMAEDecoder",
|
10 |
+
"depth": 8,
|
11 |
+
"embed_dim": 512,
|
12 |
"mlp_ratio": 4,
|
13 |
"norm_layer": {
|
14 |
"_partial_": true,
|
|
|
78 |
"weight_decay": 0.05
|
79 |
},
|
80 |
"torch_dtype": "float32",
|
81 |
+
"transformers_version": "4.46.1",
|
82 |
"trim_encoder_blocks": null,
|
83 |
"use_MAE_weight_init": false
|
84 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1f1ad069f4478524c55525ed4c19f9eb82b0ea9a44995c87b312991f9ad9473
|
3 |
+
size 712434294
|