dalle-mini / seq2seq /do_big_run.sh
boris's picture
feat: split script for small and big runs
5e244d0
raw
history blame
522 Bytes
python run_seq2seq_flax.py \
--max_source_length 128 \
--train_file /data/CC12M/encoded-small-train.tsv \ # ignored for now in our script
--validation_file /data/CC12M/encoded-small-valid.tsv \ # ignored for now in our script
--output_dir output \
--per_device_train_batch_size 56 \
--per_device_eval_batch_size 56 \
--preprocessing_num_workers 80 \
--warmup_steps 125 \
--gradient_accumulation_steps 8 \
--do_train \
--do_eval \
--adafactor \
--num_train_epochs 10 \
--log_model \
--learning_rate 0.001