nb-t5-base-v3 / run.sh
pere's picture
readme
0efb42b
./run_t5_mlm_flax_streaming.py \
--output_dir="." \
--model_type="t5" \
--config_name="./config.json" \
--tokenizer_name="." \
--dataset_name="NbAiLab/nbailab_extended" \
--max_seq_length="512" \
--weight_decay="0.01" \
--per_device_train_batch_size="32" \
--per_device_eval_batch_size="32" \
--learning_rate="8e-3" \
--warmup_steps="2000" \
--overwrite_output_dir \
--cache_dir /mnt/disks/flaxdisk/cache/ \
--num_train_steps="1000000" \
--adam_beta1="0.9" \
--adam_beta2="0.98" \
--logging_steps="5000" \
--save_steps="5000" \
--eval_steps="5000" \
--preprocessing_num_workers="64" \
--push_to_hub