system HF staff commited on
Commit
2a1ac76
1 Parent(s): 842aaaf

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -1
config.json CHANGED
@@ -2,7 +2,11 @@
2
  "_num_labels": 3,
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
 
5
  "add_final_layer_norm": true,
 
 
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 0,
8
  "classif_dropout": 0.0,
@@ -30,12 +34,14 @@
30
  "LABEL_2": 2
31
  },
32
  "max_position_embeddings": 1024,
33
- "model_type": "bart",
34
  "normalize_before": true,
 
35
  "num_beams": 5,
36
  "num_hidden_layers": 12,
37
  "output_past": true,
38
  "pad_token_id": 1,
39
  "scale_embedding": true,
 
40
  "vocab_size": 250027
41
  }
 
2
  "_num_labels": 3,
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "add_bias_logits": false,
6
  "add_final_layer_norm": true,
7
+ "architectures": [
8
+ "BartForConditionalGeneration"
9
+ ],
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
 
34
  "LABEL_2": 2
35
  },
36
  "max_position_embeddings": 1024,
37
+ "model_type": "mbart",
38
  "normalize_before": true,
39
+ "normalize_embedding": true,
40
  "num_beams": 5,
41
  "num_hidden_layers": 12,
42
  "output_past": true,
43
  "pad_token_id": 1,
44
  "scale_embedding": true,
45
+ "static_position_embeddings": false,
46
  "vocab_size": 250027
47
  }