pchambon commited on
Commit
a4a7a18
1 Parent(s): 25d662f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -2
config.json CHANGED
@@ -1,14 +1,24 @@
1
  {
2
  "architectures": [
3
- "BertForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "gradient_checkpointing": false,
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
 
 
 
 
 
10
  "initializer_range": 0.02,
11
  "intermediate_size": 3072,
 
 
 
 
 
12
  "layer_norm_eps": 1e-12,
13
  "max_position_embeddings": 512,
14
  "model_type": "bert",
@@ -17,4 +27,4 @@
17
  "pad_token_id": 0,
18
  "type_vocab_size": 2,
19
  "vocab_size": 28996
20
- }
1
  {
2
  "architectures": [
3
+ "AutoModel"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "gradient_checkpointing": false,
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "no COVID-19",
12
+ "1": "uncertain COVID-19",
13
+ "2": "COVID-19"
14
+ },
15
  "initializer_range": 0.02,
16
  "intermediate_size": 3072,
17
+ "label2id": {
18
+ "no COVID-19": 0,
19
+ "uncertain COVID-19": 1,
20
+ "COVID-19": 2
21
+ },
22
  "layer_norm_eps": 1e-12,
23
  "max_position_embeddings": 512,
24
  "model_type": "bert",
27
  "pad_token_id": 0,
28
  "type_vocab_size": 2,
29
  "vocab_size": 28996
30
+ }