root commited on
Commit
1502314
1 Parent(s): 4d73f12

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -0
config.json CHANGED
@@ -20,6 +20,16 @@
20
  "pad_token_id": 0,
21
  "relative_attention_max_distance": 128,
22
  "relative_attention_num_buckets": 32,
 
 
 
 
 
 
 
 
 
 
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
  "transformers_version": "4.18.0",
20
  "pad_token_id": 0,
21
  "relative_attention_max_distance": 128,
22
  "relative_attention_num_buckets": 32,
23
+ "task_specific_params": {
24
+ "summarization": {
25
+ "early_stopping": true,
26
+ "length_penalty": 2.0,
27
+ "max_length": 256,
28
+ "no_repeat_ngram_size": 3,
29
+ "num_beams": 4,
30
+ "prefix": "summarization: "
31
+ }
32
+ },
33
  "tie_word_embeddings": false,
34
  "torch_dtype": "float32",
35
  "transformers_version": "4.18.0",