File size: 1,669 Bytes
4eb931a
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
Namespace(align_suffix=None, alignfile=None, all_gather_list_size=16384, azureml_logging=False, bf16=False, bpe=None, cpu=False, criterion='cross_entropy', dataset_impl='mmap', destdir='data/glue-bin/stsb/input0', empty_cache_freq=0, fp16=False, fp16_init_scale=128, fp16_no_flatten_grads=False, fp16_scale_tolerance=0.0, fp16_scale_window=None, joined_dictionary=False, log_format=None, log_interval=100, lr_scheduler='fixed', memory_efficient_bf16=False, memory_efficient_fp16=False, min_loss_scale=0.0001, model_parallel_size=1, no_progress_bar=False, nwordssrc=-1, nwordstgt=-1, only_source=True, optimizer=None, padding_factor=8, plasma_path='/tmp/plasma', profile=False, quantization_config_path=None, reset_logging=False, scoring='bleu', seed=1, simul_type=None, slurm_job_id=None, slurm_job_name=None, source_lang=None, srcdict='data/gpt2_bpe/dict.txt', suppress_crashes=False, tag=None, target_lang=None, task='translation', tensorboard_logdir=None, testpref='data/glue/stsb/input0.test.bpe', tgtdict=None, threshold_loss_scale=None, thresholdsrc=0, thresholdtgt=0, tokenizer=None, tpu=False, trainpref='data/glue/stsb/input0.train.bpe', use_plasma_view=False, user_dir=None, validpref='data/glue/stsb/input0.val.bpe', wandb_project=None, workers=3)
[None] Dictionary: 50264 types
[None] data/glue/stsb/input0.train.bpe: 5749 sents, 77781 tokens, 0.0% replaced by <unk>
[None] Dictionary: 50264 types
[None] data/glue/stsb/input0.val.bpe: 1500 sents, 22719 tokens, 0.0% replaced by <unk>
[None] Dictionary: 50264 types
[None] data/glue/stsb/input0.test.bpe: 1379 sents, 18131 tokens, 0.0% replaced by <unk>
Wrote preprocessed data to data/glue-bin/stsb/input0