system HF staff commited on
Commit
aa631bb
1 Parent(s): 1aeb516

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -7
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "BertForSequenceClassification"
4
  ],
@@ -17,7 +18,6 @@
17
  },
18
  "initializer_range": 0.02,
19
  "intermediate_size": 3072,
20
- "is_decoder": false,
21
  "label2id": {
22
  "1 star": 0,
23
  "2 stars": 1,
@@ -27,20 +27,16 @@
27
  },
28
  "layer_norm_eps": 1e-12,
29
  "max_position_embeddings": 512,
 
30
  "num_attention_heads": 12,
31
  "num_hidden_layers": 12,
32
- "num_labels": 5,
33
- "output_attentions": false,
34
- "output_hidden_states": false,
35
  "output_past": true,
 
36
  "pooler_fc_size": 768,
37
  "pooler_num_attention_heads": 12,
38
  "pooler_num_fc_layers": 3,
39
  "pooler_size_per_head": 128,
40
  "pooler_type": "first_token_transform",
41
- "pruned_heads": {},
42
- "torchscript": false,
43
  "type_vocab_size": 2,
44
- "use_bfloat16": false,
45
  "vocab_size": 105879
46
  }
1
  {
2
+ "_num_labels": 5,
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
18
  },
19
  "initializer_range": 0.02,
20
  "intermediate_size": 3072,
 
21
  "label2id": {
22
  "1 star": 0,
23
  "2 stars": 1,
27
  },
28
  "layer_norm_eps": 1e-12,
29
  "max_position_embeddings": 512,
30
+ "model_type": "bert",
31
  "num_attention_heads": 12,
32
  "num_hidden_layers": 12,
 
 
 
33
  "output_past": true,
34
+ "pad_token_id": 0,
35
  "pooler_fc_size": 768,
36
  "pooler_num_attention_heads": 12,
37
  "pooler_num_fc_layers": 3,
38
  "pooler_size_per_head": 128,
39
  "pooler_type": "first_token_transform",
 
 
40
  "type_vocab_size": 2,
 
41
  "vocab_size": 105879
42
  }