Joemgu commited on
Commit
e199f62
1 Parent(s): a71cba0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -0
config.json CHANGED
@@ -28,6 +28,18 @@
28
  "relative_attention_max_distance": 128,
29
  "relative_attention_num_buckets": 32,
30
  "tie_word_embeddings": false,
 
 
 
 
 
 
 
 
 
 
 
 
31
  "torch_dtype": "float32",
32
  "transformers_version": "4.30.2",
33
  "use_cache": false,
 
28
  "relative_attention_max_distance": 128,
29
  "relative_attention_num_buckets": 32,
30
  "tie_word_embeddings": false,
31
+ "task_specific_params": {
32
+ "summarization": {
33
+ "early_stopping": true,
34
+ "length_penalty": 1.0,
35
+ "max_length": 256,
36
+ "min_length": 16,
37
+ "no_repeat_ngram_size": 5,
38
+ "num_beams": 4,
39
+ "prefix": "Write a title and summarize: ",
40
+ "repetition_penalty": 3.5
41
+ }
42
+ },
43
  "torch_dtype": "float32",
44
  "transformers_version": "4.30.2",
45
  "use_cache": false,