python run_clm_mp.py \ | |
--model_name_or_path "gpt-neo-1.3B" \ | |
--tokenizer_name "./" \ | |
--train_file /mnt/disks/flaxdisk/corpus/social_train.json \ | |
--validation_file /mnt/disks/flaxdisk/corpus/social_validation.json \ | |
--do_train \ | |
--do_eval \ | |
--block_size 1024 \ | |
--num_train_epochs 5 \ | |
--learning_rate 1e-5 \ | |
--per_device_train_batch_size 2 \ | |
--per_device_eval_batch_size 2 \ | |
--overwrite_output_dir \ | |
--output_dir "./" \ | |
--cache_dir /mnt/disks/flaxdisk/cache/ \ | |
--preprocessing_num_workers 96 \ | |
--dtype bfloat16 \ | |
--logging_steps 50000 \ | |
--eval_steps 50000 \ | |
--warmup_steps 3000 \ | |
--push_to_hub | |