mmoskal commited on
Commit
81a9a92
1 Parent(s): 30e77f1

Set torch_dtype to bf16

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -19,7 +19,7 @@
19
  "rope_scaling": null,
20
  "rope_theta": 10000.0,
21
  "tie_word_embeddings": false,
22
- "torch_dtype": "float32",
23
  "transformers_version": "4.33.1",
24
  "use_cache": true,
25
  "vocab_size": 32003
 
19
  "rope_scaling": null,
20
  "rope_theta": 10000.0,
21
  "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
  "transformers_version": "4.33.1",
24
  "use_cache": true,
25
  "vocab_size": 32003