system HF staff commited on
Commit
ef60286
1 Parent(s): 13da4a6

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -5
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_num_labels": 3,
3
  "architectures": [
4
- "MaskedBertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bad_words_ids": null,
@@ -30,12 +30,10 @@
30
  },
31
  "layer_norm_eps": 1e-12,
32
  "length_penalty": 1.0,
33
- "mask_init": "constant",
34
- "mask_scale": 0.0,
35
  "max_length": 20,
36
  "max_position_embeddings": 512,
37
  "min_length": 0,
38
- "model_type": "masked_bert",
39
  "no_repeat_ngram_size": 0,
40
  "num_attention_heads": 12,
41
  "num_beams": 1,
@@ -47,7 +45,6 @@
47
  "pad_token_id": 0,
48
  "prefix": null,
49
  "pruned_heads": {},
50
- "pruning_method": "sigmoied_threshold",
51
  "repetition_penalty": 1.0,
52
  "task_specific_params": null,
53
  "temperature": 1.0,
1
  {
2
  "_num_labels": 3,
3
  "architectures": [
4
+ "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bad_words_ids": null,
30
  },
31
  "layer_norm_eps": 1e-12,
32
  "length_penalty": 1.0,
 
 
33
  "max_length": 20,
34
  "max_position_embeddings": 512,
35
  "min_length": 0,
36
+ "model_type": "bert",
37
  "no_repeat_ngram_size": 0,
38
  "num_attention_heads": 12,
39
  "num_beams": 1,
45
  "pad_token_id": 0,
46
  "prefix": null,
47
  "pruned_heads": {},
 
48
  "repetition_penalty": 1.0,
49
  "task_specific_params": null,
50
  "temperature": 1.0,