mnli-6 / cartography_config_train.json
mahdiyar77's picture
add model weights and configs
5bec763
{
"adam_epsilon": 1e-08,
"cache_dir": "",
"config_name": "",
"data_dir": "/content/data/glue/MNLI",
"dev": null,
"device": "cuda",
"do_eval": true,
"do_lower_case": true,
"do_test": false,
"do_train": true,
"eval_all_checkpoints": false,
"evaluate_during_training": true,
"evaluate_during_training_epoch": false,
"features_cache_dir": "/content/data/glue/MNLI/cache_36891",
"fp16": false,
"fp16_opt_level": "01",
"gradient_accumulation_steps": 1,
"learning_rate": 1.0993071205018916e-05,
"local_rank": -1,
"logging_steps": 1000,
"max_grad_norm": 1.0,
"max_seq_length": 128,
"max_steps": -1,
"model_name_or_path": "roberta-base",
"model_type": "roberta",
"n_gpu": 1,
"no_cuda": false,
"num_train_epochs": 6,
"output_dir": "/content/results",
"output_mode": "classification",
"overwrite_cache": false,
"overwrite_output_dir": false,
"patience": 3,
"per_gpu_eval_batch_size": 96,
"per_gpu_train_batch_size": 64,
"save_steps": 0,
"seed": 36891,
"server_ip": "",
"server_port": "",
"task_name": "mnli",
"test": null,
"tokenizer_name": "",
"train": null,
"warmup_steps": 0,
"weight_decay": 0.0
}