Text Classification
Transformers
PyTorch
English
bert
QA
Inference Endpoints
haritzpuerto commited on
Commit
d61262a
1 Parent(s): 61ce4c0

added name to the labels

Browse files
Files changed (1) hide show
  1. config.json +32 -32
config.json CHANGED
@@ -10,42 +10,42 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "LABEL_0",
14
- "1": "LABEL_1",
15
- "2": "LABEL_2",
16
- "3": "LABEL_3",
17
- "4": "LABEL_4",
18
- "5": "LABEL_5",
19
- "6": "LABEL_6",
20
- "7": "LABEL_7",
21
- "8": "LABEL_8",
22
- "9": "LABEL_9",
23
- "10": "LABEL_10",
24
- "11": "LABEL_11",
25
- "12": "LABEL_12",
26
- "13": "LABEL_13",
27
- "14": "LABEL_14",
28
- "15": "LABEL_15"
29
  },
30
  "initializer_range": 0.02,
31
  "intermediate_size": 3072,
32
  "label2id": {
33
- "LABEL_0": 0,
34
- "LABEL_1": 1,
35
- "LABEL_10": 10,
36
- "LABEL_11": 11,
37
- "LABEL_12": 12,
38
- "LABEL_13": 13,
39
- "LABEL_14": 14,
40
- "LABEL_15": 15,
41
- "LABEL_2": 2,
42
- "LABEL_3": 3,
43
- "LABEL_4": 4,
44
- "LABEL_5": 5,
45
- "LABEL_6": 6,
46
- "LABEL_7": 7,
47
- "LABEL_8": 8,
48
- "LABEL_9": 9
49
  },
50
  "layer_norm_eps": 1e-12,
51
  "max_position_embeddings": 512,
 
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "boolq",
14
+ "1": "commonsense_qa",
15
+ "2": "drop",
16
+ "3": "duorc",
17
+ "4": "hellaswag",
18
+ "5": "hotpot_qa",
19
+ "6": "hybrid_qa",
20
+ "7": "narrativeqa",
21
+ "8": "natural_questions",
22
+ "9": "newsqa",
23
+ "10": "biu-nlp/qamr",
24
+ "11": "race",
25
+ "12": "search_qa",
26
+ "13": "social_i_qa",
27
+ "14": "squad",
28
+ "15": "trivia_qa"
29
  },
30
  "initializer_range": 0.02,
31
  "intermediate_size": 3072,
32
  "label2id": {
33
+ "boolq": 0,
34
+ "commonsense_qa": 1,
35
+ "biu-nlp/qamr": 10,
36
+ "race": 11,
37
+ "search_qa": 12,
38
+ "social_i_qa": 13,
39
+ "squad": 14,
40
+ "trivia_qa": 15,
41
+ "drop": 2,
42
+ "duorc": 3,
43
+ "hellaswag": 4,
44
+ "hotpot_qa": 5,
45
+ "hybrid_qa": 6,
46
+ "narrativeqa": 7,
47
+ "natural_questions": 8,
48
+ "newsqa": 9
49
  },
50
  "layer_norm_eps": 1e-12,
51
  "max_position_embeddings": 512,