system HF staff commited on
Commit
e8df676
•
1 Parent(s): 1d79623

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -8
config.json CHANGED
@@ -3,30 +3,26 @@
3
  "RobertaForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
 
6
  "directionality": "bidi",
7
- "finetuning_task": null,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 3072,
13
- "is_decoder": false,
14
  "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 512,
 
16
  "num_attention_heads": 12,
17
  "num_hidden_layers": 12,
18
- "num_labels": 2,
19
- "output_attentions": false,
20
- "output_hidden_states": false,
21
  "output_past": true,
 
22
  "pooler_fc_size": 768,
23
  "pooler_num_attention_heads": 12,
24
  "pooler_num_fc_layers": 3,
25
  "pooler_size_per_head": 128,
26
  "pooler_type": "first_token_transform",
27
- "pruned_heads": {},
28
- "torchscript": false,
29
  "type_vocab_size": 2,
30
- "use_bfloat16": false,
31
  "vocab_size": 21128
32
  }
3
  "RobertaForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 0,
7
  "directionality": "bidi",
8
+ "eos_token_id": 2,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 3072,
 
14
  "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 512,
16
+ "model_type": "roberta",
17
  "num_attention_heads": 12,
18
  "num_hidden_layers": 12,
 
 
 
19
  "output_past": true,
20
+ "pad_token_id": 1,
21
  "pooler_fc_size": 768,
22
  "pooler_num_attention_heads": 12,
23
  "pooler_num_fc_layers": 3,
24
  "pooler_size_per_head": 128,
25
  "pooler_type": "first_token_transform",
 
 
26
  "type_vocab_size": 2,
 
27
  "vocab_size": 21128
28
  }