|
python3 ./xla_spawn.py \ |
|
--num_cores 8 \ |
|
./run_mlm.py \ |
|
--output_dir="./" \ |
|
--model_type="roberta" \ |
|
--config_name="./" \ |
|
--tokenizer_name="./" \ |
|
--dataset_name="NbAiLab/NCC_small" \ |
|
--max_seq_length="128" \ |
|
--weight_decay="0.01" \ |
|
--per_device_train_batch_size="232" \ |
|
--per_device_eval_batch_size="232" \ |
|
--learning_rate="6e-4" \ |
|
--warmup_steps="10000" \ |
|
--overwrite_output_dir \ |
|
--num_train_epochs ="10" \ |
|
--adam_beta1="0.9" \ |
|
--adam_beta2="0.98" \ |
|
--logging_steps="10000" \ |
|
--save_steps="50000" \ |
|
--eval_steps="50000" \ |
|
--adafactor \ |
|
--push_to_hub |
|
|