pere commited on
Commit
25ccad9
1 Parent(s): f9f9b50

created backup

Browse files
events.out.tfevents.1641299828.t1v-n-358ff5d1-w-0.355877.3.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c479c58a311b3c3bef4df1082214c3f83ba33424f1553351cf618d2d7d2c4ad1
3
+ size 40
flax_model_backup_final.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5255008970a84f6e4d55177ca133107efb4c186c01834c6dac31da4bc0e34e91
3
+ size 498796983
run_step2.sh ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ./run_mlm_flax.py \
2
+ --output_dir="./" \
3
+ --model_type="roberta" \
4
+ --model_name_or_path="./" \
5
+ --config_name="./" \
6
+ --tokenizer_name="./" \
7
+ --dataset_name="NbAiLab/NCC" \
8
+ --cache_dir="/mnt/disks/flaxdisk/cache/" \
9
+ --max_seq_length="128" \
10
+ --weight_decay="0.01" \
11
+ --per_device_train_batch_size="200" \
12
+ --per_device_eval_batch_size="200" \
13
+ --learning_rate="4e-4" \
14
+ --warmup_steps="10000" \
15
+ --overwrite_output_dir \
16
+ --num_train_epochs="10" \
17
+ --adam_beta1="0.9" \
18
+ --adam_beta2="0.98" \
19
+ --adam_epsilon="1e-6" \
20
+ --logging_steps="10000" \
21
+ --save_steps="10000" \
22
+ --eval_steps="10000" \
23
+ --preprocessing_num_workers="64" \
24
+ --auth_token="True" \
25
+ --dtype="bfloat16" \
26
+ --push_to_hub