noahjadallah commited on
Commit
942847b
1 Parent(s): 7a1543f

updated model

Browse files
Files changed (2) hide show
  1. config.json +10 -4
  2. training_args.bin +2 -2
config.json CHANGED
@@ -8,15 +8,19 @@
8
  "hidden_size": 768,
9
  "id2label": {
10
  "0": "OTHER",
11
- "1": "CAUSE",
12
- "2": "EFFECT"
 
 
13
  },
14
  "initializer_range": 0.02,
15
  "intermediate_size": 3072,
16
  "label2id": {
17
  "OTHER": 0,
18
- "CAUSE": 1,
19
- "EFFECT": 2
 
 
20
  },
21
  "layer_norm_eps": 1e-12,
22
  "max_position_embeddings": 512,
@@ -25,6 +29,8 @@
25
  "num_hidden_layers": 12,
26
  "pad_token_id": 0,
27
  "position_embedding_type": "absolute",
 
28
  "type_vocab_size": 2,
 
29
  "vocab_size": 30522
30
  }
 
8
  "hidden_size": 768,
9
  "id2label": {
10
  "0": "OTHER",
11
+ "1": "B-CAUSE",
12
+ "2": "I-CAUSE",
13
+ "3": "B-EFFECT",
14
+ "4": "I-EFFECT"
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
  "OTHER": 0,
20
+ "B-CAUSE": 1,
21
+ "I-CAUSE": 2,
22
+ "B-EFFECT": 3,
23
+ "I-EFFECT": 4
24
  },
25
  "layer_norm_eps": 1e-12,
26
  "max_position_embeddings": 512,
 
29
  "num_hidden_layers": 12,
30
  "pad_token_id": 0,
31
  "position_embedding_type": "absolute",
32
+ "transformers_version": "4.3.2",
33
  "type_vocab_size": 2,
34
+ "use_cache": true,
35
  "vocab_size": 30522
36
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8c2f0c19de5b84ea7f5ac791c3128588950bc578d4f5714caf0e6c2d1df466d
3
- size 1839
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8940abe19d6f5a3849b095d57e937784f174f81fa251e45a24642a7fc3d14b8a
3
+ size 2159