gangyeolkim commited on
Commit
1d31880
1 Parent(s): dcf9eb4

Upload BartForConditionalGeneration

Browse files
Files changed (1) hide show
  1. config.json +6 -3
config.json CHANGED
@@ -20,11 +20,15 @@
20
  "decoder_start_token_id": 1,
21
  "do_blenderbot_90_layernorm": false,
22
  "dropout": 0.1,
 
23
  "encoder_attention_heads": 16,
24
  "encoder_ffn_dim": 3072,
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 6,
27
- "eos_token_id": 1,
 
 
 
28
  "extra_pos_embeddings": 2,
29
  "force_bos_token_to_be_generated": false,
30
  "forced_eos_token_id": 1,
@@ -41,13 +45,12 @@
41
  "POSITIVE": 1
42
  },
43
  "length_penalty": 2.0,
44
- "max_length": 130,
45
  "max_position_embeddings": 1026,
46
  "min_length": 30,
47
  "model_type": "bart",
48
  "normalize_before": false,
49
  "normalize_embedding": true,
50
- "num_beams": 4,
51
  "num_hidden_layers": 6,
52
  "pad_token_id": 3,
53
  "scale_embedding": false,
 
20
  "decoder_start_token_id": 1,
21
  "do_blenderbot_90_layernorm": false,
22
  "dropout": 0.1,
23
+ "early_stopping": true,
24
  "encoder_attention_heads": 16,
25
  "encoder_ffn_dim": 3072,
26
  "encoder_layerdrop": 0.0,
27
  "encoder_layers": 6,
28
+ "eos_token_id": [
29
+ 1700,
30
+ 230
31
+ ],
32
  "extra_pos_embeddings": 2,
33
  "force_bos_token_to_be_generated": false,
34
  "forced_eos_token_id": 1,
 
45
  "POSITIVE": 1
46
  },
47
  "length_penalty": 2.0,
48
+ "max_length": 1024,
49
  "max_position_embeddings": 1026,
50
  "min_length": 30,
51
  "model_type": "bart",
52
  "normalize_before": false,
53
  "normalize_embedding": true,
 
54
  "num_hidden_layers": 6,
55
  "pad_token_id": 3,
56
  "scale_embedding": false,