gpt2-finnish / start_train.sh
aapot's picture
Saving weights and logs of step 10000
2e1433f
# set train hyperparams
unset LD_PRELOAD
export HF_DATASETS_CACHE="/researchdisk/datasets_cache"
export USE_TORCH=0
python3 run_clm_flax.py \
--output_dir="./" \
--model_type="gpt2" \
--config_name="./" \
--tokenizer_name="./" \
--dataset_filepath="/researchdisk/training_dataset_full_deduplicated" \
--do_train --do_eval \
--block_size="512" \
--per_device_train_batch_size="32" \
--per_device_eval_batch_size="32" \
--preprocessing_num_workers="1" \
--distributed_shampoo \
--learning_rate="1e-4" \
--warmup_steps="4000" \
--cosine_decay \
--overwrite_output_dir \
--logging_steps="500" \
--eval_steps="10000" \
--save_steps="10000" \
--num_train_epochs="10" \
--dtype="bfloat16" \
--push_to_hub \
--hub_model_id="Finnish-NLP/gpt2-finnish"