ThatGuyVanquish commited on
Commit
4458516
1 Parent(s): e714d1b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -0
config.json CHANGED
@@ -24,6 +24,11 @@
24
  "relative_attention_num_buckets": 32,
25
  "tie_word_embeddings": false,
26
  "tokenizer_class": "T5Tokenizer",
 
 
 
 
 
27
  "torch_dtype": "float32",
28
  "transformers_version": "4.27.0.dev0",
29
  "use_cache": true,
 
24
  "relative_attention_num_buckets": 32,
25
  "tie_word_embeddings": false,
26
  "tokenizer_class": "T5Tokenizer",
27
+ "output_past": true,
28
+ "max_length": 200,
29
+ "min_length": 5,
30
+ "num_beams": 4,
31
+ "no_repeat_ngram_size": 3,
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.27.0.dev0",
34
  "use_cache": true,