File size: 10,619 Bytes
86c732c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
{
    "checkpoint_path": "/data_2to/devel_data/nn_pruning/output/squad_test_8_mvp_lt/hp_od-__data_2to__devel_data__nn_pruning__output__squad_test_8_mvp_lt___es-steps_nte20_ls250_stl50_est5000_rn-__data_2to__devel_data__nn_pruning__output__squad_test_8_mvp_lt___dpm-si--7fe43555f854fbb6/checkpoint-110000", 
    "config": {
        "_name_or_path": "/tmp/tmpdczbpf2s", 
        "architectures": ["BertForQuestionAnswering"], 
        "attention_probs_dropout_prob": 0.1, 
        "gradient_checkpointing": false, 
        "hidden_act": "gelu", 
        "hidden_dropout_prob": 0.1, 
        "hidden_size": 768, 
        "initializer_range": 0.02, 
        "intermediate_size": 3072, 
        "layer_norm_eps": 1e-12, 
        "max_position_embeddings": 512, 
        "model_type": "bert", 
        "num_attention_heads": 12, 
        "num_hidden_layers": 12, 
        "pad_token_id": 0, 
        "position_embedding_type": "absolute", 
        "pruned_heads": {
            "0": [9], 
            "1": [], 
            "10": [1, 2, 4], 
            "11": [5, 7, 8, 11], 
            "2": [8], 
            "3": [2, 4], 
            "4": [], 
            "5": [1], 
            "6": [2, 3], 
            "7": [1, 7], 
            "8": [0], 
            "9": [1, 4, 5, 7, 10]
        }, 
        "transformers_version": "4.4.2", 
        "type_vocab_size": 2, 
        "use_cache": true, 
        "vocab_size": 30522
    }, 
    "eval_metrics": {
        "exact_match": 80.93661305581836, 
        "f1": 88.11360890595924, 
        "main_metric": 88.11360890595924
    }, 
    "model_args": {
        "cache_dir": null, 
        "config_name": null, 
        "model_name_or_path": "bert-base-uncased", 
        "tokenizer_name": null, 
        "use_fast_tokenizer": true
    }, 
    "sparse_args": {
        "ampere_pruning_method": "disabled", 
        "attention_block_cols": 1, 
        "attention_block_rows": 1, 
        "attention_lambda": 1.0, 
        "attention_output_with_dense": 0, 
        "attention_pruning_method": "sigmoied_threshold", 
        "bias_mask": true, 
        "dense_block_cols": 1, 
        "dense_block_rows": 1, 
        "dense_lambda": 1.0, 
        "dense_pruning_method": "sigmoied_threshold", 
        "distil_alpha_ce": 0.1, 
        "distil_alpha_teacher": 0.9, 
        "distil_teacher_name_or_path": "bert-large-uncased-whole-word-masking-finetuned-squad", 
        "distil_temperature": 2.0, 
        "final_ampere_temperature": 20.0, 
        "final_finetune": false, 
        "final_threshold": 0.1, 
        "final_warmup": 10, 
        "gelu_patch": 0, 
        "initial_ampere_temperature": 0.0, 
        "initial_threshold": 0, 
        "initial_warmup": 1, 
        "layer_norm_patch": 0, 
        "mask_init": "constant", 
        "mask_scale": 0.0, 
        "mask_scores_learning_rate": 0.01, 
        "regularization": "l1", 
        "regularization_final_lambda": 120
    }, 
    "speed": {
        "cuda_eval_elapsed_time": 33.17768461608887, 
        "eval_elapsed_time": 40.18306041043252
    }, 
    "speedup": 1.1632636047982534, 
    "stats": {
        "layers": {
            "0": {
                "linear_attention_nnz": 108638, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 743508, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 852146, 
                "linear_total": 7077888, 
                "nnz": 860873, 
                "total": 7087872
            }, 
            "1": {
                "linear_attention_nnz": 171340, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 780119, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 951459, 
                "linear_total": 7077888, 
                "nnz": 960245, 
                "total": 7087872
            }, 
            "10": {
                "linear_attention_nnz": 67316, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 69353, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 136669, 
                "linear_total": 7077888, 
                "nnz": 143162, 
                "total": 7087872
            }, 
            "11": {
                "linear_attention_nnz": 41497, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 55855, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 97352, 
                "linear_total": 7077888, 
                "nnz": 103286, 
                "total": 7087872
            }, 
            "2": {
                "linear_attention_nnz": 221074, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 756625, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 977699, 
                "linear_total": 7077888, 
                "nnz": 986901, 
                "total": 7087872
            }, 
            "3": {
                "linear_attention_nnz": 258229, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 705908, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 964137, 
                "linear_total": 7077888, 
                "nnz": 973427, 
                "total": 7087872
            }, 
            "4": {
                "linear_attention_nnz": 255136, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 593338, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 848474, 
                "linear_total": 7077888, 
                "nnz": 858037, 
                "total": 7087872
            }, 
            "5": {
                "linear_attention_nnz": 179994, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 531061, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 711055, 
                "linear_total": 7077888, 
                "nnz": 720395, 
                "total": 7087872
            }, 
            "6": {
                "linear_attention_nnz": 165167, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 406391, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 571558, 
                "linear_total": 7077888, 
                "nnz": 580963, 
                "total": 7087872
            }, 
            "7": {
                "linear_attention_nnz": 139907, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 272514, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 412421, 
                "linear_total": 7077888, 
                "nnz": 421032, 
                "total": 7087872
            }, 
            "8": {
                "linear_attention_nnz": 113253, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 163778, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 277031, 
                "linear_total": 7077888, 
                "nnz": 285536, 
                "total": 7087872
            }, 
            "9": {
                "linear_attention_nnz": 84915, 
                "linear_attention_total": 2359296, 
                "linear_dense_nnz": 71190, 
                "linear_dense_total": 4718592, 
                "linear_nnz": 156105, 
                "linear_total": 7077888, 
                "nnz": 162775, 
                "total": 7087872
            }
        }, 
        "linear_nnz": 6956106, 
        "linear_sparsity": 91.81004983407479, 
        "linear_total": 84934656, 
        "nnz": 30895354, 
        "pruned_heads": {
            "0": [9], 
            "1": [], 
            "10": [1, 2, 4], 
            "11": [8, 11, 5, 7], 
            "2": [8], 
            "3": [2, 4], 
            "4": [], 
            "5": [1], 
            "6": [2, 3], 
            "7": [1, 7], 
            "8": [0], 
            "9": [1, 4, 5, 7, 10]
        }, 
        "total": 108893186, 
        "total_sparsity": 71.62783537254572
    }, 
    "training_args": {
        "_n_gpu": -1, 
        "adafactor": false, 
        "adam_beta1": 0.9, 
        "adam_beta2": 0.999, 
        "adam_epsilon": 1e-08, 
        "dataloader_drop_last": false, 
        "dataloader_num_workers": 0, 
        "dataloader_pin_memory": true, 
        "ddp_find_unused_parameters": null, 
        "debug": false, 
        "deepspeed": null, 
        "disable_tqdm": false, 
        "do_eval": 1, 
        "do_predict": false, 
        "do_train": 1, 
        "eval_accumulation_steps": null, 
        "eval_steps": 5000, 
        "evaluation_strategy": "steps", 
        "fp16": false, 
        "fp16_backend": "auto", 
        "fp16_full_eval": false, 
        "fp16_opt_level": "O1", 
        "gradient_accumulation_steps": 1, 
        "greater_is_better": null, 
        "group_by_length": false, 
        "ignore_data_skip": false, 
        "label_names": null, 
        "label_smoothing_factor": 0.0, 
        "learning_rate": 3e-05, 
        "length_column_name": "length", 
        "load_best_model_at_end": false, 
        "local_rank": -1, 
        "logging_dir": "/data_2to/devel_data/nn_pruning/output/squad_test_8_mvp_lt/", 
        "logging_first_step": false, 
        "logging_steps": 250, 
        "logging_strategy": "steps", 
        "lr_scheduler_type": "linear", 
        "max_grad_norm": 1.0, 
        "max_steps": -1, 
        "metric_for_best_model": null, 
        "mp_parameters": "", 
        "no_cuda": false, 
        "num_train_epochs": 20, 
        "optimize_model_before_eval": "disabled", 
        "output_dir": "/data_2to/devel_data/nn_pruning/output/squad_test_8_mvp_lt/", 
        "overwrite_output_dir": 1, 
        "past_index": -1, 
        "per_device_eval_batch_size": 8, 
        "per_device_train_batch_size": 16, 
        "per_gpu_eval_batch_size": null, 
        "per_gpu_train_batch_size": null, 
        "prediction_loss_only": false, 
        "remove_unused_columns": true, 
        "report_to": null, 
        "run_name": "/data_2to/devel_data/nn_pruning/output/squad_test_8_mvp_lt/", 
        "save_steps": 5000, 
        "save_strategy": "steps", 
        "save_total_limit": 50, 
        "seed": 17, 
        "sharded_ddp": "", 
        "skip_memory_metrics": false, 
        "tpu_metrics_debug": false, 
        "tpu_num_cores": null, 
        "warmup_ratio": 0.0, 
        "warmup_steps": 5400, 
        "weight_decay": 0.0
    }
}