maximuspowers commited on
Commit
f656fcc
·
verified ·
1 Parent(s): 0786f1e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -10
config.json CHANGED
@@ -25,17 +25,17 @@
25
  "initializer_range": 0.02,
26
  "intermediate_size": 3072,
27
  "label2id": {
28
- "B-GEN": 7,
29
- "B-GROUP": 3,
30
  "B-IND": 1,
31
- "B-STEREO": 5,
32
- "B-UNFAIR": 9,
33
- "I-GEN": 8,
34
- "I-GROUP": 4,
35
  "I-IND": 2,
 
 
 
36
  "I-STEREO": 6,
37
- "I-UNFAIR": 10,
38
- "O": 0
 
 
39
  },
40
  "layer_norm_eps": 1e-12,
41
  "max_position_embeddings": 512,
@@ -44,8 +44,8 @@
44
  "num_hidden_layers": 12,
45
  "pad_token_id": 0,
46
  "position_embedding_type": "absolute",
47
- "problem_type": "multi_label_classification",
48
- "transformers_version": "4.42.4",
49
  "type_vocab_size": 2,
50
  "use_cache": true,
51
  "vocab_size": 30522
 
25
  "initializer_range": 0.02,
26
  "intermediate_size": 3072,
27
  "label2id": {
28
+ "O": 0,
 
29
  "B-IND": 1,
 
 
 
 
30
  "I-IND": 2,
31
+ "B-GROUP": 3,
32
+ "I-GROUP": 4,
33
+ "B-STEREO": 5,
34
  "I-STEREO": 6,
35
+ "B-GEN": 7,
36
+ "I-GEN": 8,
37
+ "B-UNFAIR": 9,
38
+ "I-UNFAIR": 10
39
  },
40
  "layer_norm_eps": 1e-12,
41
  "max_position_embeddings": 512,
 
44
  "num_hidden_layers": 12,
45
  "pad_token_id": 0,
46
  "position_embedding_type": "absolute",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.41.2",
49
  "type_vocab_size": 2,
50
  "use_cache": true,
51
  "vocab_size": 30522