dat
Saving weights and logs of step 165000
cf2304e
raw
history blame
828 Bytes
#!/usr/bin/env bash
#export TOKENIZERS_PARALLELISM=0
python ./run_mlm_flax_no_accum.py \
--push_to_hub \
--output_dir="./" \
--model_type="big_bird" \
--config_name="./" \
--tokenizer_name="./" \
--max_seq_length="4096" \
--weight_decay="0.0095" \
--warmup_steps="10000" \
--overwrite_output_dir \
--adam_beta1="0.9" \
--adam_beta2="0.98" \
--logging_steps="50" \
--eval_steps="6000" \
--num_train_epochs="3"\
--preprocessing_num_workers="96" \
--save_steps="15000" \
--learning_rate="3e-5" \
--per_device_train_batch_size="1" \
--per_device_eval_batch_size="1" \
--save_total_limit="20"\
--max_eval_samples="4000"\
--resume_from_checkpoint="./"\
#--gradient_accumulation_steps="4"\
#--adafactor \
#--dtype="bfloat16" \