pszemraj commited on
Commit
b32ab0d
1 Parent(s): 395c120

use_cache to true for better inference

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -64,6 +64,6 @@
64
  "repetition_penalty": 3.5,
65
  "torch_dtype": "float32",
66
  "transformers_version": "4.27.4",
67
- "use_cache": false,
68
  "vocab_size": 50265
69
  }
 
64
  "repetition_penalty": 3.5,
65
  "torch_dtype": "float32",
66
  "transformers_version": "4.27.4",
67
+ "use_cache": true,
68
  "vocab_size": 50265
69
  }