Muennighoff commited on
Commit
5b2ea9e
1 Parent(s): 56dd468

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "seq_length": 20,
22
  "skip_bias_add": true,
23
  "slow_but_exact": true,
24
- "torch_dtype": "torch.bfloat16",
25
  "transformers_version": "4.21.0.dev0",
26
  "use_cache": false,
27
  "vocab_size": 250880
 
21
  "seq_length": 20,
22
  "skip_bias_add": true,
23
  "slow_but_exact": true,
24
+ "torch_dtype": "float16",
25
  "transformers_version": "4.21.0.dev0",
26
  "use_cache": false,
27
  "vocab_size": 250880