Anwaarma commited on
Commit
3cbee5f
1 Parent(s): 795d6ed

Upload config

Browse files
Files changed (1) hide show
  1. config.json +8 -0
config.json CHANGED
@@ -8,8 +8,16 @@
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 128,
 
 
 
 
11
  "initializer_range": 0.02,
12
  "intermediate_size": 512,
 
 
 
 
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
  "model_type": "bert",
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 128,
11
+ "id2label": {
12
+ "0": "Taken",
13
+ "1": "Not Taken"
14
+ },
15
  "initializer_range": 0.02,
16
  "intermediate_size": 512,
17
+ "label2id": {
18
+ "Not Taken": 1,
19
+ "Taken": 0
20
+ },
21
  "layer_norm_eps": 1e-12,
22
  "max_position_embeddings": 512,
23
  "model_type": "bert",