system
Update config.json b9dcecb
1 {
2 "attention_probs_dropout_prob": 0.1,
3 "directionality": "bidi",
4 "embedding_size": 768,
5 "hidden_act": "gelu",
6 "hidden_dropout_prob": 0.1,
7 "hidden_size": 768,
8 "initializer_range": 0.02,
9 "intermediate_size": 3072,
10 "layer_norm_eps": 1e-12,
11 "max_position_embeddings": 512,
12 "model_type": "electra",
13 "num_attention_heads": 12,
14 "num_hidden_layers": 12,
15 "pad_token_id": 0,
16 "summary_activation": "gelu",
17 "summary_last_dropout": 0.1,
18 "summary_type": "first",
19 "summary_use_proj": true,
20 "type_vocab_size": 2,
21 "vocab_size": 21128
22 }
23