hanbin commited on
Commit
b8e1daf
1 Parent(s): 5bd3edf

update torch type to float16

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "rope_scaling": null,
22
  "rope_theta": 10000,
23
  "tie_word_embeddings": false,
24
- "torch_dtype": "bfloat16",
25
  "transformers_version": "4.35.0",
26
  "use_cache": true,
27
  "vocab_size": 32016
 
21
  "rope_scaling": null,
22
  "rope_theta": 10000,
23
  "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
  "transformers_version": "4.35.0",
26
  "use_cache": true,
27
  "vocab_size": 32016