system
Update config.json 6f47849
1 {
2 "attention_probs_dropout_prob": 0.1,
3 "hidden_act": "gelu",
4 "hidden_dropout_prob": 0.1,
5 "hidden_size": 768,
6 "initializer_range": 0.02,
7 "intermediate_size": 3072,
8 "max_position_embeddings": 512,
9 "num_attention_heads": 12,
10 "num_hidden_layers": 12,
11 "type_vocab_size": 2,
12 "vocab_size": 28996
13 }
14