1 {
2 "architectures": [
3 "BertForMaskedLM"
4 ],
5 "model_type": "bert",
6 "attention_probs_dropout_prob": 0.1,
7 "hidden_act": "gelu",
8 "hidden_dropout_prob": 0.1,
9 "hidden_size": 768,
10 "initializer_range": 0.02,
11 "intermediate_size": 3072,
12 "max_position_embeddings": 512,
13 "num_attention_heads": 12,
14 "num_hidden_layers": 12,
15 "type_vocab_size": 2,
16 "vocab_size": 30522
17 }
18