export model_dir=arabic-t5-small | |
export train_batch_size=48 | |
export eval_batch_size=96 | |
python ./run_t5_mlm_flax.py \ | |
--model_type t5 \ | |
--config_name ${model_dir} \ | |
--tokenizer_name ${model_dir} \ | |
--use_fast_tokenizer True \ | |
--dtype float32 \ | |
--max_seq_length 512 \ | |
--preprocessing_num_workers 96 \ | |
--output_dir ${model_dir} \ | |
--overwrite_output_dir True \ | |
--do_train \ | |
--per_device_train_batch_size ${train_batch_size} \ | |
--per_device_eval_batch_size ${eval_batch_size} \ | |
--learning_rate 1e-2 \ | |
--num_train_epochs 1 \ | |
--logging_steps 100 \ | |
--eval_steps 1000 \ | |
--save_steps 1000 \ | |
--seed 12 \ | |
--adafactor True \ | |
--push_to_hub \ | |
--cache_dir ./training_cache \ | |