system HF staff commited on
Commit
0b46f5c
1 Parent(s): 6c3821c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -20
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_num_labels": 3,
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
 
5
  "add_final_layer_norm": false,
6
  "architectures": [
7
  "BartForConditionalGeneration"
@@ -9,20 +9,22 @@
9
  "attention_dropout": 0.0,
10
  "bos_token_id": 0,
11
  "classif_dropout": 0.0,
 
12
  "d_model": 768,
13
  "decoder_attention_heads": 12,
14
  "decoder_ffn_dim": 3072,
15
  "decoder_layerdrop": 0.0,
16
  "decoder_layers": 6,
17
  "decoder_start_token_id": 2,
 
18
  "dropout": 0.1,
19
- "early_stopping": true,
20
  "encoder_attention_heads": 12,
21
  "encoder_ffn_dim": 3072,
22
  "encoder_layerdrop": 0.0,
23
  "encoder_layers": 6,
24
  "eos_token_id": 2,
25
- "force_bos_token_to_be_generated": true,
 
26
  "id2label": {
27
  "0": "LABEL_0",
28
  "1": "LABEL_1",
@@ -35,29 +37,14 @@
35
  "LABEL_1": 1,
36
  "LABEL_2": 2
37
  },
38
- "length_penalty": 2.0,
39
- "max_length": 142,
40
  "max_position_embeddings": 1024,
41
- "min_length": 56,
42
  "model_type": "bart",
43
- "no_repeat_ngram_size": 3,
44
  "normalize_before": false,
45
  "normalize_embedding": true,
46
- "num_beams": 4,
47
  "num_hidden_layers": 6,
48
- "output_past": true,
49
  "pad_token_id": 1,
50
- "prefix": " ",
51
  "scale_embedding": false,
52
- "task_specific_params": {
53
- "summarization": {
54
- "early_stopping": true,
55
- "length_penalty": 2.0,
56
- "max_length": 142,
57
- "min_length": 56,
58
- "no_repeat_ngram_size": 3,
59
- "num_beams": 4
60
- }
61
- },
62
  "vocab_size": 50265
63
  }
 
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
4
+ "add_bias_logits": false,
5
  "add_final_layer_norm": false,
6
  "architectures": [
7
  "BartForConditionalGeneration"
 
9
  "attention_dropout": 0.0,
10
  "bos_token_id": 0,
11
  "classif_dropout": 0.0,
12
+ "classifier_dropout": 0.0,
13
  "d_model": 768,
14
  "decoder_attention_heads": 12,
15
  "decoder_ffn_dim": 3072,
16
  "decoder_layerdrop": 0.0,
17
  "decoder_layers": 6,
18
  "decoder_start_token_id": 2,
19
+ "do_blenderbot_90_layernorm": false,
20
  "dropout": 0.1,
 
21
  "encoder_attention_heads": 12,
22
  "encoder_ffn_dim": 3072,
23
  "encoder_layerdrop": 0.0,
24
  "encoder_layers": 6,
25
  "eos_token_id": 2,
26
+ "extra_pos_embeddings": 2,
27
+ "force_bos_token_to_be_generated": false,
28
  "id2label": {
29
  "0": "LABEL_0",
30
  "1": "LABEL_1",
 
37
  "LABEL_1": 1,
38
  "LABEL_2": 2
39
  },
 
 
40
  "max_position_embeddings": 1024,
 
41
  "model_type": "bart",
 
42
  "normalize_before": false,
43
  "normalize_embedding": true,
 
44
  "num_hidden_layers": 6,
 
45
  "pad_token_id": 1,
46
+ "save_step": 3,
47
  "scale_embedding": false,
48
+ "static_position_embeddings": false,
 
 
 
 
 
 
 
 
 
49
  "vocab_size": 50265
50
  }