system HF staff commited on
Commit
0cde2d1
1 Parent(s): 6e2149f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +30 -17
config.json CHANGED
@@ -2,21 +2,34 @@
2
  "architectures": [
3
  "BertForMaskedLM"
4
  ],
5
- "vocab_size":30522,
6
- "temp_dir":".",
7
- "finetune_bert":false,
8
- "large":false,
9
- "share_emb":true,
10
- "encoder":"bert",
11
- "max_pos":512,
12
- "enc_layers":6,
13
- "enc_hidden_size":512,
14
- "enc_heads":8,
15
- "enc_ff_size":512,
16
- "enc_dropout":0.2,
17
- "dec_layers":6,
18
- "dec_hidden_size":768,
19
- "dec_heads":8,
20
- "dec_ff_size":2048,
21
- "dec_dropout":0.2
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  }
 
2
  "architectures": [
3
  "BertForMaskedLM"
4
  ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "dec_dropout": 0.2,
7
+ "dec_ff_size": 2048,
8
+ "dec_heads": 8,
9
+ "dec_hidden_size": 768,
10
+ "dec_layers": 6,
11
+ "enc_dropout": 0.2,
12
+ "enc_ff_size": 512,
13
+ "enc_heads": 8,
14
+ "enc_hidden_size": 512,
15
+ "enc_layers": 6,
16
+ "encoder": "bert",
17
+ "finetune_bert": false,
18
+ "hidden_act": "gelu",
19
+ "hidden_dropout_prob": 0.1,
20
+ "hidden_size": 768,
21
+ "initializer_range": 0.02,
22
+ "intermediate_size": 3072,
23
+ "large": false,
24
+ "layer_norm_eps": 1e-12,
25
+ "max_pos": 512,
26
+ "max_position_embeddings": 512,
27
+ "model_type": "bert",
28
+ "num_attention_heads": 12,
29
+ "num_hidden_layers": 12,
30
+ "pad_token_id": 0,
31
+ "share_emb": true,
32
+ "temp_dir": ".",
33
+ "type_vocab_size": 2,
34
+ "vocab_size": 30522
35
  }