pere commited on
Commit
2d4285d
1 Parent(s): 310ee8e

with bloat16

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. run_step1.sh +1 -0
config.json CHANGED
@@ -20,7 +20,7 @@
20
  "num_hidden_layers": 12,
21
  "pad_token_id": 1,
22
  "position_embedding_type": "absolute",
23
- "torch_dtype": "float32",
24
  "transformers_version": "4.15.0.dev0",
25
  "type_vocab_size": 1,
26
  "use_cache": true,
 
20
  "num_hidden_layers": 12,
21
  "pad_token_id": 1,
22
  "position_embedding_type": "absolute",
23
+ "torch_dtype": "bloat16",
24
  "transformers_version": "4.15.0.dev0",
25
  "type_vocab_size": 1,
26
  "use_cache": true,
run_step1.sh CHANGED
@@ -22,4 +22,5 @@
22
  --preprocessing_num_workers="64" \
23
  --auth_token="True" \
24
  --static_learning_rate="True" \
 
25
  --push_to_hub
 
22
  --preprocessing_num_workers="64" \
23
  --auth_token="True" \
24
  --static_learning_rate="True" \
25
+ --dtype="bloat16" \
26
  --push_to_hub