roberta-base-danish / continue_run_mlm_flax_stream.sh
maltehb
111001
fd91202
export MODEL_DIR=/home/Z6HJB/train-roberta-base-danish/roberta-base-danish/
source /home/Z6HJB/test/bin/activate
python3 ./src/danish_run_mlm_flax_stream.py \
--model_name_or_path="${MODEL_DIR}" \
--output_dir="${MODEL_DIR}" \
--tokenizer_name="${MODEL_DIR}" \
--dataset_name="mc4" \
--dataset_config_name="unshuffled_deduplicated_en" \
--max_seq_length="128" \
--per_device_train_batch_size="128" \
--per_device_eval_batch_size="128" \
--learning_rate="1.087e-6" \
--warmup_steps="1000" \
--overwrite_output_dir \
--adam_beta1="0.9" \
--adam_beta2="0.98" \
--num_train_steps="200000" \
--num_eval_samples="5000" \
--save_steps="1000" \
--logging_steps="250" \
--eval_steps="1000" \
#--push_to_hub \
#--config_name="${MODEL_DIR}" \
#--model_type="roberta" \