random-bert-base-cased / config.json
1 {
2 "_name_or_path": "random-bert-base-cased",
3 "adapters": {
4 "adapters": {},
5 "config_map": {}
6 },
7 "architectures": [
8 "BertModel"
9 ],
10 "attention_probs_dropout_prob": 0.1,
11 "gradient_checkpointing": false,
12 "hidden_act": "gelu",
13 "hidden_dropout_prob": 0.1,
14 "hidden_size": 768,
15 "id2label": {
16 "0": "LABEL_0",
17 "1": "LABEL_1",
18 "2": "LABEL_2",
19 "3": "LABEL_3",
20 "4": "LABEL_4"
21 },
22 "initializer_range": 0.02,
23 "intermediate_size": 3072,
24 "label2id": {
25 "LABEL_0": 0,
26 "LABEL_1": 1,
27 "LABEL_2": 2,
28 "LABEL_3": 3,
29 "LABEL_4": 4
30 },
31 "layer_norm_eps": 1e-12,
32 "max_position_embeddings": 512,
33 "model_type": "bert",
34 "num_attention_heads": 12,
35 "num_hidden_layers": 12,
36 "pad_token_id": 0,
37 "position_embedding_type": "absolute",
38 "transformers_version": "2.0.1",
39 "type_vocab_size": 2,
40 "use_cache": true,
41 "vocab_size": 28996
42 }
43