abhinavkulkarni commited on
Commit
8fde6d2
1 Parent(s): e0ad64e

Upload config

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -20,7 +20,7 @@
20
  "rope_scaling": null,
21
  "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
23
- "torch_dtype": "float32",
24
  "transformers_version": "4.33.1",
25
  "use_cache": true,
26
  "vocab_size": 51200
 
20
  "rope_scaling": null,
21
  "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
23
+ "torch_dtype": "float16",
24
  "transformers_version": "4.33.1",
25
  "use_cache": true,
26
  "vocab_size": 51200