noahjadallah commited on
Commit
392a781
1 Parent(s): 2ef9416

Change label names

Browse files
Files changed (1) hide show
  1. config.json +7 -9
config.json CHANGED
@@ -1,24 +1,22 @@
1
  {
2
  "_name_or_path": "bert-base-uncased",
3
- "architectures": [
4
- "BertForTokenClassification"
5
- ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "gradient_checkpointing": false,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
- "0": "LABEL_0",
13
- "1": "LABEL_1",
14
- "2": "LABEL_2"
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
- "LABEL_0": 0,
20
- "LABEL_1": 1,
21
- "LABEL_2": 2
22
  },
23
  "layer_norm_eps": 1e-12,
24
  "max_position_embeddings": 512,
 
1
  {
2
  "_name_or_path": "bert-base-uncased",
3
+ "architectures": ["BertForTokenClassification"],
 
 
4
  "attention_probs_dropout_prob": 0.1,
5
  "gradient_checkpointing": false,
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
8
  "hidden_size": 768,
9
  "id2label": {
10
+ "0": "OTHER",
11
+ "1": "CAUSE",
12
+ "2": "EFFECT"
13
  },
14
  "initializer_range": 0.02,
15
  "intermediate_size": 3072,
16
  "label2id": {
17
+ "OTHER": 0,
18
+ "CAUSE": 1,
19
+ "EFFECT": 2
20
  },
21
  "layer_norm_eps": 1e-12,
22
  "max_position_embeddings": 512,