python run_clm_mp.py \ --model_name_or_path "." \ --tokenizer_name gpt2 \ --dataset_name wikitext --dataset_config_name wikitext-2-raw-v1 \ --do_train \ --do_eval \ --block_size 1024 \ --num_train_epochs 5 \ --learning_rate 4e-6 \ --per_device_train_batch_size 2 \ --per_device_eval_batch_size 2 \ --overwrite_output_dir \ --output_dir /mnt/disks/flaxdisk/output/ \ --cache_dir /mnt/disks/flaxdisk/cache/ \ --dtype bfloat16 \ --logging_steps 97 \ --eval_steps 96