pere commited on
Commit
6e6e937
1 Parent(s): 361e672

Saving weights and logs of step 1000

Browse files
events.out.tfevents.1644752234.t1v-n-79f0077b-w-0.16365.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba7a603585bf059429e046e56fe9e436fc8ec750fb0c8f249638ed575babf108
3
+ size 147136
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e87f2bfbc92fd8b3d74253235448f92a50dd2654de6698969fcab07d0afb1018
3
  size 498796983
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8de9d66d0e8008a170089cefec2ee882d37cbb8f55284e50a01b61216ac3c12
3
  size 498796983
run_128_scandinavian_recover100k.sh ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # --per_device_train_batch_size="232" \
2
+ # --per_device_eval_batch_size="232" \
3
+
4
+
5
+ python run_mlm_flax.py \
6
+ --output_dir="./" \
7
+ --model_type="roberta" \
8
+ --model_name_or_path="./" \
9
+ --config_name="roberta-base" \
10
+ --tokenizer_name="NbAiLab/nb-roberta-base" \
11
+ --dataset_name="NbAiLab/scandinavian" \
12
+ --cache_dir="/mnt/disks/flaxdisk/cache/" \
13
+ --max_seq_length="128" \
14
+ --weight_decay="0.01" \
15
+ --per_device_train_batch_size="116" \
16
+ --per_device_eval_batch_size="116" \
17
+ --pad_to_max_length \
18
+ --learning_rate="2e-4" \
19
+ --warmup_steps="10000" \
20
+ --overwrite_output_dir \
21
+ --num_train_epochs="9" \
22
+ --adam_beta1="0.9" \
23
+ --adam_beta2="0.98" \
24
+ --adam_epsilon="1e-6" \
25
+ --logging_steps="1000" \
26
+ --save_steps="1000" \
27
+ --eval_steps="1000" \
28
+ --auth_token="True" \
29
+ --do_train \
30
+ --do_eval \
31
+ --dtype="bfloat16" \
32
+ --push_to_hub