hdallatorre
commited on
Commit
•
e5571ba
1
Parent(s):
b3abd7b
Update config.json
Browse files- config.json +6 -2
config.json
CHANGED
@@ -1,12 +1,16 @@
|
|
1 |
{
|
2 |
"add_bias_fnn": false,
|
3 |
"architectures": [
|
4 |
-
"EsmForMaskedLM"
|
|
|
|
|
5 |
],
|
6 |
"attention_probs_dropout_prob": 0.0,
|
7 |
"auto_map": {
|
8 |
"AutoConfig": "esm_config.EsmConfig",
|
9 |
-
"AutoModelForMaskedLM": "modeling_esm.EsmForMaskedLM"
|
|
|
|
|
10 |
},
|
11 |
"emb_layer_norm_before": false,
|
12 |
"esmfold_config": null,
|
|
|
1 |
{
|
2 |
"add_bias_fnn": false,
|
3 |
"architectures": [
|
4 |
+
"EsmForMaskedLM",
|
5 |
+
"EsmForTokenClassification",
|
6 |
+
"EsmForSequenceClassification"
|
7 |
],
|
8 |
"attention_probs_dropout_prob": 0.0,
|
9 |
"auto_map": {
|
10 |
"AutoConfig": "esm_config.EsmConfig",
|
11 |
+
"AutoModelForMaskedLM": "modeling_esm.EsmForMaskedLM",
|
12 |
+
"AutoModelForTokenClassification": "modeling_esm.EsmForTokenClassification",
|
13 |
+
"AutoModelForSequenceClassification": "modeling_esm.EsmForSequenceClassification"
|
14 |
},
|
15 |
"emb_layer_norm_before": false,
|
16 |
"esmfold_config": null,
|