tobiaslee commited on
Commit
82f115e
1 Parent(s): 602f66e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -19
config.json CHANGED
@@ -1,19 +1 @@
1
- {
2
- "architectures": [
3
- "BertForMaskedLM"
4
- ],
5
- "attention_probs_dropout_prob": 0.1,
6
- "hidden_act": "gelu",
7
- "hidden_dropout_prob": 0.1,
8
- "hidden_size": 768,
9
- "initializer_range": 0.02,
10
- "intermediate_size": 3072,
11
- "layer_norm_eps": 1e-12,
12
- "max_position_embeddings": 512,
13
- "model_type": "bert",
14
- "num_attention_heads": 12,
15
- "num_hidden_layers": 2,
16
- "pad_token_id": 0,
17
- "type_vocab_size": 2,
18
- "vocab_size": 30522
19
- }
1
+ {"hidden_size": 768, "hidden_act": "gelu", "initializer_range": 0.02, "vocab_size": 30522, "hidden_dropout_prob": 0.1, "num_attention_heads": 12, "type_vocab_size": 2, "max_position_embeddings": 512, "num_hidden_layers": 2, "intermediate_size": 3072, "attention_probs_dropout_prob": 0.1, "model_type": "bert"}