madlag's picture
Adding model, graphs and metadata.
1b1e01b
{
"checkpoint_path": "/data_2to/devel_data/nn_pruning/output/squadv2_test_2/hp_mnop-bert-large-uncased-whole-word-masking_dn-squad_v2_v2wn1_od-__data_2to__devel_data__nn_pruning__output__squadv2_test_2___es-steps_pdtbs8_pdebs128_nte20_ls250_stl50_est5000_rn---58ebffa395d47d66/checkpoint-325000",
"config": {
"_name_or_path": "/tmp/tmpitf3rdr5",
"architectures": ["BertForQuestionAnswering"],
"attention_probs_dropout_prob": 0.1,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"initializer_range": 0.02,
"intermediate_size": 4096,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"pruned_heads": {
"0": [0, 1, 3, 4, 5, 8, 9, 13, 15],
"1": [0, 1, 3, 5, 7, 9, 10, 13, 14],
"10": [1, 2, 4, 5, 6, 8, 11, 13],
"11": [0, 2, 5, 6, 7, 8, 10, 12, 15],
"12": [0, 2, 6, 8, 9, 11, 13],
"13": [2, 6, 10, 12, 15],
"14": [1, 5, 6, 10, 11, 15],
"15": [0, 9],
"16": [5, 7],
"17": [1, 4, 8, 12, 14],
"18": [3, 4, 11],
"19": [0, 5, 12],
"2": [0, 1, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
"20": [0, 4, 10, 12],
"21": [0, 2, 3, 4, 8, 11, 12, 15],
"22": [0, 1, 3, 4, 7, 9, 10, 11, 13, 15],
"23": [2, 4, 8, 9, 10, 13, 14, 15],
"3": [0, 3, 5, 6, 7, 8, 9, 11, 13, 14, 15],
"4": [0, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 14],
"5": [1, 3, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15],
"6": [0, 1, 2, 3, 4, 7, 8, 9, 10, 11, 12, 15],
"7": [0, 3, 4, 5, 8, 9, 10, 11, 12, 13, 14],
"8": [3, 4, 5, 7, 8, 9, 10, 11, 12],
"9": [0, 1, 2, 3, 5, 6, 7, 9, 10, 13, 14, 15]
},
"transformers_version": "4.5.1",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 30522
},
"eval_metrics": {
"HasAns_exact": 74.8144399460189,
"HasAns_f1": 80.555306012496,
"HasAns_total": 5928,
"NoAns_exact": 84.57527333894029,
"NoAns_f1": 84.57527333894029,
"NoAns_total": 5945,
"best_exact": 79.70184452118251,
"best_exact_thresh": 0.0,
"best_f1": 82.56816761071966,
"best_f1_thresh": 0.0,
"exact": 79.70184452118251,
"f1": 82.56816761071981,
"main_metric": 82.56816761071981,
"total": 11873
},
"model_args": {
"cache_dir": null,
"config_name": null,
"model_name_or_path": "bert-large-uncased-whole-word-masking",
"tokenizer_name": null,
"use_fast_tokenizer": true
},
"sparse_args": {
"ampere_pruning_method": "disabled",
"attention_block_cols": 32,
"attention_block_rows": 32,
"attention_lambda": 1.0,
"attention_output_with_dense": 0,
"attention_pruning_method": "sigmoied_threshold",
"bias_mask": true,
"dense_block_cols": 1,
"dense_block_rows": 1,
"dense_lambda": 1.0,
"dense_pruning_method": "sigmoied_threshold:1d_alt",
"distil_alpha_ce": 0.1,
"distil_alpha_teacher": 0.9,
"distil_teacher_name_or_path": "madlag/bert-large-uncased-whole-word-masking-finetuned-squadv2",
"distil_temperature": 2.0,
"eval_with_current_patch_params": false,
"final_ampere_temperature": 20.0,
"final_finetune": false,
"final_threshold": 0.1,
"final_warmup": 10,
"gelu_patch": 0,
"gelu_patch_steps": 50000,
"initial_ampere_temperature": 0.0,
"initial_threshold": 0,
"initial_warmup": 1,
"layer_norm_patch": 0,
"layer_norm_patch_start_delta": 0.99,
"layer_norm_patch_steps": 50000,
"linear_min_parameters": 0.005,
"mask_init": "constant",
"mask_scale": 0.0,
"mask_scores_learning_rate": 0.01,
"qat": false,
"qconfig": "default",
"regularization": "l1",
"regularization_final_lambda": 5,
"rewind_model_name_or_path": null
},
"speed": {
"cuda_eval_elapsed_time": 52.126098907470706,
"eval_elapsed_time": 56.62768604200005
},
"speedup": 0.8466287123910415,
"stats": {
"layers": {
"0": {
"linear_attention_nnz": 998400,
"linear_attention_total": 4194304,
"linear_dense_nnz": 587776,
"linear_dense_total": 8388608,
"linear_nnz": 1586176,
"linear_total": 12582912,
"nnz": 1593727,
"total": 12594496
},
"1": {
"linear_attention_nnz": 1025024,
"linear_attention_total": 4194304,
"linear_dense_nnz": 624640,
"linear_dense_total": 8388608,
"linear_nnz": 1649664,
"linear_total": 12582912,
"nnz": 1657297,
"total": 12594496
},
"10": {
"linear_attention_nnz": 1531904,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1267712,
"linear_dense_total": 8388608,
"linear_nnz": 2799616,
"linear_total": 12582912,
"nnz": 2807915,
"total": 12594688
},
"11": {
"linear_attention_nnz": 1508352,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1351680,
"linear_dense_total": 8388608,
"linear_nnz": 2860032,
"linear_total": 12582912,
"nnz": 2868180,
"total": 12594496
},
"12": {
"linear_attention_nnz": 1525760,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1236992,
"linear_dense_total": 8388608,
"linear_nnz": 2762752,
"linear_total": 12582912,
"nnz": 2771132,
"total": 12594880
},
"13": {
"linear_attention_nnz": 1813504,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1423360,
"linear_dense_total": 8388608,
"linear_nnz": 3236864,
"linear_total": 12582912,
"nnz": 3245559,
"total": 12595264
},
"14": {
"linear_attention_nnz": 1774592,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1153024,
"linear_dense_total": 8388608,
"linear_nnz": 2927616,
"linear_total": 12582912,
"nnz": 2936051,
"total": 12595072
},
"15": {
"linear_attention_nnz": 1909760,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1077248,
"linear_dense_total": 8388608,
"linear_nnz": 2987008,
"linear_total": 12582912,
"nnz": 2996110,
"total": 12595840
},
"16": {
"linear_attention_nnz": 2169856,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1091584,
"linear_dense_total": 8388608,
"linear_nnz": 3261440,
"linear_total": 12582912,
"nnz": 3270645,
"total": 12595840
},
"17": {
"linear_attention_nnz": 1823744,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1071104,
"linear_dense_total": 8388608,
"linear_nnz": 2894848,
"linear_total": 12582912,
"nnz": 2903531,
"total": 12595264
},
"18": {
"linear_attention_nnz": 1886208,
"linear_attention_total": 4194304,
"linear_dense_nnz": 774144,
"linear_dense_total": 8388608,
"linear_nnz": 2660352,
"linear_total": 12582912,
"nnz": 2669146,
"total": 12595648
},
"19": {
"linear_attention_nnz": 1472512,
"linear_attention_total": 4194304,
"linear_dense_nnz": 446464,
"linear_dense_total": 8388608,
"linear_nnz": 1918976,
"linear_total": 12582912,
"nnz": 1927354,
"total": 12595648
},
"2": {
"linear_attention_nnz": 595968,
"linear_attention_total": 4194304,
"linear_dense_nnz": 876544,
"linear_dense_total": 8388608,
"linear_nnz": 1472512,
"linear_total": 12582912,
"nnz": 1479660,
"total": 12593728
},
"20": {
"linear_attention_nnz": 1079296,
"linear_attention_total": 4194304,
"linear_dense_nnz": 299008,
"linear_dense_total": 8388608,
"linear_nnz": 1378304,
"linear_total": 12582912,
"nnz": 1386290,
"total": 12595456
},
"21": {
"linear_attention_nnz": 512000,
"linear_attention_total": 4194304,
"linear_dense_nnz": 155648,
"linear_dense_total": 8388608,
"linear_nnz": 667648,
"linear_total": 12582912,
"nnz": 674764,
"total": 12594688
},
"22": {
"linear_attention_nnz": 375808,
"linear_attention_total": 4194304,
"linear_dense_nnz": 90112,
"linear_dense_total": 8388608,
"linear_nnz": 465920,
"linear_total": 12582912,
"nnz": 472716,
"total": 12594304
},
"23": {
"linear_attention_nnz": 343040,
"linear_attention_total": 4194304,
"linear_dense_nnz": 221184,
"linear_dense_total": 8388608,
"linear_nnz": 564224,
"linear_total": 12582912,
"nnz": 571244,
"total": 12594688
},
"3": {
"linear_attention_nnz": 916480,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1085440,
"linear_dense_total": 8388608,
"linear_nnz": 2001920,
"linear_total": 12582912,
"nnz": 2009554,
"total": 12594112
},
"4": {
"linear_attention_nnz": 678912,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1146880,
"linear_dense_total": 8388608,
"linear_nnz": 1825792,
"linear_total": 12582912,
"nnz": 1833264,
"total": 12593920
},
"5": {
"linear_attention_nnz": 509952,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1308672,
"linear_dense_total": 8388608,
"linear_nnz": 1818624,
"linear_total": 12582912,
"nnz": 1825983,
"total": 12593920
},
"6": {
"linear_attention_nnz": 717824,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1441792,
"linear_dense_total": 8388608,
"linear_nnz": 2159616,
"linear_total": 12582912,
"nnz": 2167168,
"total": 12593920
},
"7": {
"linear_attention_nnz": 1009664,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1468416,
"linear_dense_total": 8388608,
"linear_nnz": 2478080,
"linear_total": 12582912,
"nnz": 2485901,
"total": 12594112
},
"8": {
"linear_attention_nnz": 1327104,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1468416,
"linear_dense_total": 8388608,
"linear_nnz": 2795520,
"linear_total": 12582912,
"nnz": 2803661,
"total": 12594496
},
"9": {
"linear_attention_nnz": 631808,
"linear_attention_total": 4194304,
"linear_dense_nnz": 1531904,
"linear_dense_total": 8388608,
"linear_nnz": 2163712,
"linear_total": 12582912,
"nnz": 2171276,
"total": 12593920
}
},
"linear_nnz": 51337216,
"linear_sparsity": 83.00035264756944,
"linear_total": 301989888,
"nnz": 83313090,
"pruned_heads": {
"0": [0, 1, 3, 4, 5, 8, 9, 13, 15],
"1": [0, 1, 3, 5, 7, 9, 10, 13, 14],
"10": [1, 2, 4, 5, 6, 8, 11, 13],
"11": [0, 2, 5, 6, 7, 8, 10, 12, 15],
"12": [0, 2, 6, 8, 9, 11, 13],
"13": [2, 6, 10, 12, 15],
"14": [1, 5, 6, 10, 11, 15],
"15": [0, 9],
"16": [5, 7],
"17": [1, 4, 8, 12, 14],
"18": [3, 11, 4],
"19": [0, 12, 5],
"2": [0, 1, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
"20": [0, 10, 4, 12],
"21": [0, 2, 3, 4, 8, 11, 12, 15],
"22": [0, 1, 3, 4, 7, 9, 10, 11, 13, 15],
"23": [2, 4, 8, 9, 10, 13, 14, 15],
"3": [0, 3, 5, 6, 7, 8, 9, 11, 13, 14, 15],
"4": [0, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 14],
"5": [1, 3, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15],
"6": [0, 1, 2, 3, 4, 7, 8, 9, 10, 11, 12, 15],
"7": [0, 3, 4, 5, 8, 9, 10, 11, 12, 13, 14],
"8": [3, 4, 5, 7, 8, 9, 10, 11, 12],
"9": [0, 1, 2, 3, 5, 6, 7, 9, 10, 13, 14, 15]
},
"total": 334057858,
"total_sparsity": 75.06028132408129
},
"training_args": {
"_n_gpu": 1,
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-08,
"dataloader_drop_last": false,
"dataloader_num_workers": 0,
"dataloader_pin_memory": true,
"ddp_find_unused_parameters": null,
"debug": false,
"deepspeed": null,
"disable_tqdm": false,
"do_eval": 1,
"do_predict": false,
"do_train": 1,
"eval_accumulation_steps": null,
"eval_steps": 5000,
"evaluation_strategy": "steps",
"fp16": false,
"fp16_backend": "auto",
"fp16_full_eval": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 1,
"greater_is_better": null,
"group_by_length": false,
"ignore_data_skip": false,
"label_names": null,
"label_smoothing_factor": 0.0,
"learning_rate": 3e-05,
"length_column_name": "length",
"load_best_model_at_end": false,
"local_rank": -1,
"logging_dir": "/data_2to/devel_data/nn_pruning/output/squadv2_test_2/",
"logging_first_step": false,
"logging_steps": 250,
"logging_strategy": "steps",
"lr_scheduler_type": "linear",
"max_grad_norm": 1.0,
"max_steps": -1,
"metric_for_best_model": null,
"mp_parameters": "",
"no_cuda": false,
"num_train_epochs": 20,
"optimize_model_before_eval": "disabled",
"output_dir": "/data_2to/devel_data/nn_pruning/output/squadv2_test_2/",
"overwrite_output_dir": 1,
"past_index": -1,
"per_device_eval_batch_size": 128,
"per_device_train_batch_size": 8,
"per_gpu_eval_batch_size": null,
"per_gpu_train_batch_size": null,
"prediction_loss_only": false,
"remove_unused_columns": true,
"report_to": ["tensorboard", "wandb"],
"run_name": "/data_2to/devel_data/nn_pruning/output/squadv2_test_2/",
"save_steps": 5000,
"save_strategy": "steps",
"save_total_limit": 50,
"seed": 17,
"sharded_ddp": [],
"skip_memory_metrics": false,
"tpu_metrics_debug": false,
"tpu_num_cores": null,
"warmup_ratio": 0.0,
"warmup_steps": 5400,
"weight_decay": 0.0
}
}