Vui Seng Chua
commited on
Commit
•
17658f8
1
Parent(s):
a9425ed
Add collaterals
Browse files- 8bit_ref_bert_squad_nncf_mvmt.json +67 -0
- all_results.json +11 -0
- checkpoint-106000/config.json +25 -0
- checkpoint-106000/nncf-mvmt-p3.json +1 -0
- checkpoint-106000/pytorch_model.bin +3 -0
- checkpoint-106000/rng_state.pth +3 -0
- checkpoint-106000/scheduler.pt +3 -0
- checkpoint-106000/special_tokens_map.json +1 -0
- checkpoint-106000/tokenizer.json +0 -0
- checkpoint-106000/tokenizer_config.json +1 -0
- checkpoint-106000/training_args.bin +3 -0
- checkpoint-106000/vocab.txt +0 -0
- compressed_graph.dot +0 -0
- config.json +25 -0
- eval_predictions.json +0 -0
- eval_results.json +6 -0
- ir/sparsity_structures.csv +73 -0
- ir/sparsity_structures.md +74 -0
- ir/sparsity_structures.pkl +3 -0
- ir/squad-BertForQuestionAnswering.crop_cfg.sd.8bit.pkl +3 -0
- ir/squad-BertForQuestionAnswering.cropped.8bit.bin +3 -0
- ir/squad-BertForQuestionAnswering.cropped.8bit.mapping +0 -0
- ir/squad-BertForQuestionAnswering.cropped.8bit.onnx +3 -0
- ir/squad-BertForQuestionAnswering.cropped.8bit.xml +0 -0
- original_graph.dot +0 -0
- special_tokens_map.json +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
- train_results.json +8 -0
- training_args.bin +3 -0
- vocab.txt +0 -0
8bit_ref_bert_squad_nncf_mvmt.json
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"input_info": [
|
3 |
+
{
|
4 |
+
"sample_size": [1, 384],
|
5 |
+
"type": "long"
|
6 |
+
},
|
7 |
+
{
|
8 |
+
"sample_size": [1, 384],
|
9 |
+
"type": "long"
|
10 |
+
},
|
11 |
+
{
|
12 |
+
"sample_size": [1, 384],
|
13 |
+
"type": "long"
|
14 |
+
}
|
15 |
+
],
|
16 |
+
"compression":
|
17 |
+
[
|
18 |
+
{
|
19 |
+
"algorithm": "movement_sparsity",
|
20 |
+
"params": {
|
21 |
+
"schedule": "threshold_polynomial_decay",
|
22 |
+
"power": 3,
|
23 |
+
"init_importance_threshold": 0.0,
|
24 |
+
"final_importance_threshold": 0.1,
|
25 |
+
"warmup_start_epoch": 1,
|
26 |
+
"warmup_end_epoch": 10,
|
27 |
+
"steps_per_epoch": 5533,
|
28 |
+
"importance_regularization_factor": 0.0095,
|
29 |
+
"update_per_optimizer_step": true,
|
30 |
+
},
|
31 |
+
"sparse_structure_by_scopes": [
|
32 |
+
["block", [32, 32], "{re}.*BertAttention*"],
|
33 |
+
["per_dim", [0], "{re}.*BertIntermediate*"],
|
34 |
+
["per_dim", [1], "{re}.*BertOutput*"]
|
35 |
+
],
|
36 |
+
"ignored_scopes": ["{re}.*NNCFEmbedding", "{re}.*qa_outputs*"]
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"algorithm": "quantization",
|
40 |
+
"initializer": {
|
41 |
+
"range": {
|
42 |
+
"num_init_samples": 32,
|
43 |
+
"type": "percentile",
|
44 |
+
"params":
|
45 |
+
{
|
46 |
+
"min_percentile": 0.01,
|
47 |
+
"max_percentile": 99.99
|
48 |
+
}
|
49 |
+
},
|
50 |
+
|
51 |
+
"batchnorm_adaptation": {
|
52 |
+
"num_bn_adaptation_samples": 200
|
53 |
+
}
|
54 |
+
},
|
55 |
+
"activations":
|
56 |
+
{
|
57 |
+
"mode": "symmetric"
|
58 |
+
},
|
59 |
+
"weights":
|
60 |
+
{
|
61 |
+
"mode": "symmetric",
|
62 |
+
"signed": true,
|
63 |
+
"per_channel": false
|
64 |
+
}
|
65 |
+
}
|
66 |
+
]
|
67 |
+
}
|
all_results.json
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 20.0,
|
3 |
+
"eval_exact_match": 83.75591296121098,
|
4 |
+
"eval_f1": 90.22612551043655,
|
5 |
+
"eval_samples": 10784,
|
6 |
+
"train_loss": 0.9409083591845138,
|
7 |
+
"train_runtime": 242213.996,
|
8 |
+
"train_samples": 88524,
|
9 |
+
"train_samples_per_second": 7.31,
|
10 |
+
"train_steps_per_second": 0.457
|
11 |
+
}
|
checkpoint-106000/config.json
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "bert-base-uncased",
|
3 |
+
"architectures": [
|
4 |
+
"NNCFNetwork"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"gradient_checkpointing": false,
|
8 |
+
"hidden_act": "gelu",
|
9 |
+
"hidden_dropout_prob": 0.1,
|
10 |
+
"hidden_size": 768,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 3072,
|
13 |
+
"layer_norm_eps": 1e-12,
|
14 |
+
"max_position_embeddings": 512,
|
15 |
+
"model_type": "bert",
|
16 |
+
"num_attention_heads": 12,
|
17 |
+
"num_hidden_layers": 12,
|
18 |
+
"pad_token_id": 0,
|
19 |
+
"position_embedding_type": "absolute",
|
20 |
+
"torch_dtype": "float32",
|
21 |
+
"transformers_version": "4.9.1",
|
22 |
+
"type_vocab_size": 2,
|
23 |
+
"use_cache": true,
|
24 |
+
"vocab_size": 30522
|
25 |
+
}
|
checkpoint-106000/nncf-mvmt-p3.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
../8bit_ref_bert_squad_nncf_mvmt.json
|
checkpoint-106000/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a01a23b33edaacc520a30c7914bfc69f0810c1feabda327a7bf6a43f914ea809
|
3 |
+
size 776898425
|
checkpoint-106000/rng_state.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c27ef9cb81f4613af54a49c73970686cdf4cd7ce1f96f4d332e6e18c10c3aa0f
|
3 |
+
size 14503
|
checkpoint-106000/scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a6968194d3271d247aa67eeadc91e3074c6bfbc51675956a4377dfda7369c781
|
3 |
+
size 623
|
checkpoint-106000/special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
|
checkpoint-106000/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
checkpoint-106000/tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-uncased", "tokenizer_class": "BertTokenizer"}
|
checkpoint-106000/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bab812d2aa2fc47334c1d9a14b071f3f4ec6150a0bcf197f927db3b0246d0645
|
3 |
+
size 3055
|
checkpoint-106000/vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
compressed_graph.dot
ADDED
The diff for this file is too large to render.
See raw diff
|
|
config.json
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "bert-base-uncased",
|
3 |
+
"architectures": [
|
4 |
+
"NNCFNetwork"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"gradient_checkpointing": false,
|
8 |
+
"hidden_act": "gelu",
|
9 |
+
"hidden_dropout_prob": 0.1,
|
10 |
+
"hidden_size": 768,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 3072,
|
13 |
+
"layer_norm_eps": 1e-12,
|
14 |
+
"max_position_embeddings": 512,
|
15 |
+
"model_type": "bert",
|
16 |
+
"num_attention_heads": 12,
|
17 |
+
"num_hidden_layers": 12,
|
18 |
+
"pad_token_id": 0,
|
19 |
+
"position_embedding_type": "absolute",
|
20 |
+
"torch_dtype": "float32",
|
21 |
+
"transformers_version": "4.9.1",
|
22 |
+
"type_vocab_size": 2,
|
23 |
+
"use_cache": true,
|
24 |
+
"vocab_size": 30522
|
25 |
+
}
|
eval_predictions.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
eval_results.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 20.0,
|
3 |
+
"eval_exact_match": 83.75591296121098,
|
4 |
+
"eval_f1": 90.22612551043655,
|
5 |
+
"eval_samples": 10784
|
6 |
+
}
|
ir/sparsity_structures.csv
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
pt_module_name,block_id,orig_w_shape,final_w_shape,orig_b_shape,final_b_shape,prune_by,id_to_keep,head_id_to_keep,nncf_graph_node
|
2 |
+
nncf_module.bert.encoder.layer.0.attention.self.query,0,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[3, 7, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
3 |
+
nncf_module.bert.encoder.layer.0.attention.self.key,0,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[3, 7, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
4 |
+
nncf_module.bert.encoder.layer.0.attention.output.dense,0,"(768, 768)","(768, 256)","(768,)","(768,)",group of 64 cols,See pkl,"[3, 7, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
5 |
+
nncf_module.bert.encoder.layer.0.attention.self.value,0,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[3, 7, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
6 |
+
nncf_module.bert.encoder.layer.0.intermediate.dense,1,"(3072, 768)","(2199, 768)","(3072,)","(2199,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
7 |
+
nncf_module.bert.encoder.layer.0.output.dense,1,"(768, 3072)","(768, 2199)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
8 |
+
nncf_module.bert.encoder.layer.1.attention.output.dense,2,"(768, 768)","(768, 320)","(768,)","(768,)",group of 64 cols,See pkl,"[1, 4, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
9 |
+
nncf_module.bert.encoder.layer.1.attention.self.key,2,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[1, 4, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
10 |
+
nncf_module.bert.encoder.layer.1.attention.self.query,2,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[1, 4, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
11 |
+
nncf_module.bert.encoder.layer.1.attention.self.value,2,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[1, 4, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
12 |
+
nncf_module.bert.encoder.layer.1.intermediate.dense,3,"(3072, 768)","(2102, 768)","(3072,)","(2102,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
13 |
+
nncf_module.bert.encoder.layer.1.output.dense,3,"(768, 3072)","(768, 2102)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
14 |
+
nncf_module.bert.encoder.layer.2.attention.self.query,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
15 |
+
nncf_module.bert.encoder.layer.2.attention.output.dense,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
16 |
+
nncf_module.bert.encoder.layer.2.attention.self.key,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
17 |
+
nncf_module.bert.encoder.layer.2.attention.self.value,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
18 |
+
nncf_module.bert.encoder.layer.2.output.dense,5,"(768, 3072)","(768, 2304)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
19 |
+
nncf_module.bert.encoder.layer.2.intermediate.dense,5,"(3072, 768)","(2304, 768)","(3072,)","(2304,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
20 |
+
nncf_module.bert.encoder.layer.3.attention.self.value,6,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
21 |
+
nncf_module.bert.encoder.layer.3.attention.self.query,6,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
22 |
+
nncf_module.bert.encoder.layer.3.attention.output.dense,6,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
23 |
+
nncf_module.bert.encoder.layer.3.attention.self.key,6,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
24 |
+
nncf_module.bert.encoder.layer.3.intermediate.dense,7,"(3072, 768)","(2243, 768)","(3072,)","(2243,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
25 |
+
nncf_module.bert.encoder.layer.3.output.dense,7,"(768, 3072)","(768, 2243)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
26 |
+
nncf_module.bert.encoder.layer.4.attention.self.query,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
27 |
+
nncf_module.bert.encoder.layer.4.attention.output.dense,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
28 |
+
nncf_module.bert.encoder.layer.4.attention.self.key,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
29 |
+
nncf_module.bert.encoder.layer.4.attention.self.value,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
30 |
+
nncf_module.bert.encoder.layer.4.intermediate.dense,9,"(3072, 768)","(2042, 768)","(3072,)","(2042,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
31 |
+
nncf_module.bert.encoder.layer.4.output.dense,9,"(768, 3072)","(768, 2042)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
32 |
+
nncf_module.bert.encoder.layer.5.attention.self.key,10,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
33 |
+
nncf_module.bert.encoder.layer.5.attention.self.query,10,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
34 |
+
nncf_module.bert.encoder.layer.5.attention.self.value,10,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
35 |
+
nncf_module.bert.encoder.layer.5.attention.output.dense,10,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
36 |
+
nncf_module.bert.encoder.layer.5.intermediate.dense,11,"(3072, 768)","(2007, 768)","(3072,)","(2007,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
37 |
+
nncf_module.bert.encoder.layer.5.output.dense,11,"(768, 3072)","(768, 2007)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
38 |
+
nncf_module.bert.encoder.layer.6.attention.self.value,12,"(768, 768)","(384, 768)","(768,)","(384,)",group of 64 rows,See pkl,"[0, 1, 7, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
39 |
+
nncf_module.bert.encoder.layer.6.attention.self.query,12,"(768, 768)","(384, 768)","(768,)","(384,)",group of 64 rows,See pkl,"[0, 1, 7, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
40 |
+
nncf_module.bert.encoder.layer.6.attention.self.key,12,"(768, 768)","(384, 768)","(768,)","(384,)",group of 64 rows,See pkl,"[0, 1, 7, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
41 |
+
nncf_module.bert.encoder.layer.6.attention.output.dense,12,"(768, 768)","(768, 384)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 7, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
42 |
+
nncf_module.bert.encoder.layer.6.intermediate.dense,13,"(3072, 768)","(1610, 768)","(3072,)","(1610,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
43 |
+
nncf_module.bert.encoder.layer.6.output.dense,13,"(768, 3072)","(768, 1610)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
44 |
+
nncf_module.bert.encoder.layer.7.attention.output.dense,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
45 |
+
nncf_module.bert.encoder.layer.7.attention.self.query,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
46 |
+
nncf_module.bert.encoder.layer.7.attention.self.value,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
47 |
+
nncf_module.bert.encoder.layer.7.attention.self.key,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
48 |
+
nncf_module.bert.encoder.layer.7.intermediate.dense,15,"(3072, 768)","(1262, 768)","(3072,)","(1262,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
49 |
+
nncf_module.bert.encoder.layer.7.output.dense,15,"(768, 3072)","(768, 1262)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
50 |
+
nncf_module.bert.encoder.layer.8.attention.self.value,16,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
51 |
+
nncf_module.bert.encoder.layer.8.attention.self.query,16,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
52 |
+
nncf_module.bert.encoder.layer.8.attention.self.key,16,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
53 |
+
nncf_module.bert.encoder.layer.8.attention.output.dense,16,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
54 |
+
nncf_module.bert.encoder.layer.8.output.dense,17,"(768, 3072)","(768, 794)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
55 |
+
nncf_module.bert.encoder.layer.8.intermediate.dense,17,"(3072, 768)","(794, 768)","(3072,)","(794,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
56 |
+
nncf_module.bert.encoder.layer.9.attention.self.query,18,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
57 |
+
nncf_module.bert.encoder.layer.9.attention.self.key,18,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
58 |
+
nncf_module.bert.encoder.layer.9.attention.self.value,18,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
59 |
+
nncf_module.bert.encoder.layer.9.attention.output.dense,18,"(768, 768)","(768, 320)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
60 |
+
nncf_module.bert.encoder.layer.9.intermediate.dense,19,"(3072, 768)","(305, 768)","(3072,)","(305,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
61 |
+
nncf_module.bert.encoder.layer.9.output.dense,19,"(768, 3072)","(768, 305)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
62 |
+
nncf_module.bert.encoder.layer.10.attention.self.value,20,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[3, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
63 |
+
nncf_module.bert.encoder.layer.10.attention.self.query,20,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[3, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
64 |
+
nncf_module.bert.encoder.layer.10.attention.output.dense,20,"(768, 768)","(768, 256)","(768,)","(768,)",group of 64 cols,See pkl,"[3, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
65 |
+
nncf_module.bert.encoder.layer.10.attention.self.key,20,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[3, 7, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
66 |
+
nncf_module.bert.encoder.layer.10.intermediate.dense,21,"(3072, 768)","(305, 768)","(3072,)","(305,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
67 |
+
nncf_module.bert.encoder.layer.10.output.dense,21,"(768, 3072)","(768, 305)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
68 |
+
nncf_module.bert.encoder.layer.11.attention.self.query,22,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[1, 2, 3, 4, 8]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
69 |
+
nncf_module.bert.encoder.layer.11.attention.output.dense,22,"(768, 768)","(768, 320)","(768,)","(768,)",group of 64 cols,See pkl,"[1, 2, 3, 4, 8]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
70 |
+
nncf_module.bert.encoder.layer.11.attention.self.value,22,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[1, 2, 3, 4, 8]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
71 |
+
nncf_module.bert.encoder.layer.11.attention.self.key,22,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[1, 2, 3, 4, 8]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
72 |
+
nncf_module.bert.encoder.layer.11.intermediate.dense,23,"(3072, 768)","(364, 768)","(3072,)","(364,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
73 |
+
nncf_module.bert.encoder.layer.11.output.dense,23,"(768, 3072)","(768, 364)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
ir/sparsity_structures.md
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
| | pt_module_name | block_id | orig_w_shape | final_w_shape | orig_b_shape | final_b_shape | prune_by | id_to_keep | head_id_to_keep | nncf_graph_node |
|
2 |
+
|---:|:---------------------------------------------------------|-----------:|:---------------|:----------------|:---------------|:----------------|:-----------------|:-------------|:---------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
3 |
+
| 0 | nncf_module.bert.encoder.layer.0.attention.self.query | 0 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [3, 7, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
4 |
+
| 1 | nncf_module.bert.encoder.layer.0.attention.self.key | 0 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [3, 7, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
5 |
+
| 2 | nncf_module.bert.encoder.layer.0.attention.output.dense | 0 | (768, 768) | (768, 256) | (768,) | (768,) | group of 64 cols | See pkl | [3, 7, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
6 |
+
| 3 | nncf_module.bert.encoder.layer.0.attention.self.value | 0 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [3, 7, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
7 |
+
| 4 | nncf_module.bert.encoder.layer.0.intermediate.dense | 1 | (3072, 768) | (2199, 768) | (3072,) | (2199,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
8 |
+
| 5 | nncf_module.bert.encoder.layer.0.output.dense | 1 | (768, 3072) | (768, 2199) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
9 |
+
| 6 | nncf_module.bert.encoder.layer.1.attention.output.dense | 2 | (768, 768) | (768, 320) | (768,) | (768,) | group of 64 cols | See pkl | [1, 4, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
10 |
+
| 7 | nncf_module.bert.encoder.layer.1.attention.self.key | 2 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [1, 4, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
11 |
+
| 8 | nncf_module.bert.encoder.layer.1.attention.self.query | 2 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [1, 4, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
12 |
+
| 9 | nncf_module.bert.encoder.layer.1.attention.self.value | 2 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [1, 4, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
13 |
+
| 10 | nncf_module.bert.encoder.layer.1.intermediate.dense | 3 | (3072, 768) | (2102, 768) | (3072,) | (2102,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
14 |
+
| 11 | nncf_module.bert.encoder.layer.1.output.dense | 3 | (768, 3072) | (768, 2102) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
15 |
+
| 12 | nncf_module.bert.encoder.layer.2.attention.self.query | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
16 |
+
| 13 | nncf_module.bert.encoder.layer.2.attention.output.dense | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
17 |
+
| 14 | nncf_module.bert.encoder.layer.2.attention.self.key | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
18 |
+
| 15 | nncf_module.bert.encoder.layer.2.attention.self.value | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
19 |
+
| 16 | nncf_module.bert.encoder.layer.2.output.dense | 5 | (768, 3072) | (768, 2304) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
20 |
+
| 17 | nncf_module.bert.encoder.layer.2.intermediate.dense | 5 | (3072, 768) | (2304, 768) | (3072,) | (2304,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
21 |
+
| 18 | nncf_module.bert.encoder.layer.3.attention.self.value | 6 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
22 |
+
| 19 | nncf_module.bert.encoder.layer.3.attention.self.query | 6 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
23 |
+
| 20 | nncf_module.bert.encoder.layer.3.attention.output.dense | 6 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
24 |
+
| 21 | nncf_module.bert.encoder.layer.3.attention.self.key | 6 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
25 |
+
| 22 | nncf_module.bert.encoder.layer.3.intermediate.dense | 7 | (3072, 768) | (2243, 768) | (3072,) | (2243,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
26 |
+
| 23 | nncf_module.bert.encoder.layer.3.output.dense | 7 | (768, 3072) | (768, 2243) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
27 |
+
| 24 | nncf_module.bert.encoder.layer.4.attention.self.query | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
28 |
+
| 25 | nncf_module.bert.encoder.layer.4.attention.output.dense | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
29 |
+
| 26 | nncf_module.bert.encoder.layer.4.attention.self.key | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
30 |
+
| 27 | nncf_module.bert.encoder.layer.4.attention.self.value | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
31 |
+
| 28 | nncf_module.bert.encoder.layer.4.intermediate.dense | 9 | (3072, 768) | (2042, 768) | (3072,) | (2042,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
32 |
+
| 29 | nncf_module.bert.encoder.layer.4.output.dense | 9 | (768, 3072) | (768, 2042) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
33 |
+
| 30 | nncf_module.bert.encoder.layer.5.attention.self.key | 10 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
34 |
+
| 31 | nncf_module.bert.encoder.layer.5.attention.self.query | 10 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
35 |
+
| 32 | nncf_module.bert.encoder.layer.5.attention.self.value | 10 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
36 |
+
| 33 | nncf_module.bert.encoder.layer.5.attention.output.dense | 10 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
37 |
+
| 34 | nncf_module.bert.encoder.layer.5.intermediate.dense | 11 | (3072, 768) | (2007, 768) | (3072,) | (2007,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
38 |
+
| 35 | nncf_module.bert.encoder.layer.5.output.dense | 11 | (768, 3072) | (768, 2007) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
39 |
+
| 36 | nncf_module.bert.encoder.layer.6.attention.self.value | 12 | (768, 768) | (384, 768) | (768,) | (384,) | group of 64 rows | See pkl | [0, 1, 7, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
40 |
+
| 37 | nncf_module.bert.encoder.layer.6.attention.self.query | 12 | (768, 768) | (384, 768) | (768,) | (384,) | group of 64 rows | See pkl | [0, 1, 7, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
41 |
+
| 38 | nncf_module.bert.encoder.layer.6.attention.self.key | 12 | (768, 768) | (384, 768) | (768,) | (384,) | group of 64 rows | See pkl | [0, 1, 7, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
42 |
+
| 39 | nncf_module.bert.encoder.layer.6.attention.output.dense | 12 | (768, 768) | (768, 384) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 7, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
43 |
+
| 40 | nncf_module.bert.encoder.layer.6.intermediate.dense | 13 | (3072, 768) | (1610, 768) | (3072,) | (1610,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
44 |
+
| 41 | nncf_module.bert.encoder.layer.6.output.dense | 13 | (768, 3072) | (768, 1610) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
45 |
+
| 42 | nncf_module.bert.encoder.layer.7.attention.output.dense | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
46 |
+
| 43 | nncf_module.bert.encoder.layer.7.attention.self.query | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
47 |
+
| 44 | nncf_module.bert.encoder.layer.7.attention.self.value | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
48 |
+
| 45 | nncf_module.bert.encoder.layer.7.attention.self.key | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
49 |
+
| 46 | nncf_module.bert.encoder.layer.7.intermediate.dense | 15 | (3072, 768) | (1262, 768) | (3072,) | (1262,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
50 |
+
| 47 | nncf_module.bert.encoder.layer.7.output.dense | 15 | (768, 3072) | (768, 1262) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
51 |
+
| 48 | nncf_module.bert.encoder.layer.8.attention.self.value | 16 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
52 |
+
| 49 | nncf_module.bert.encoder.layer.8.attention.self.query | 16 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
53 |
+
| 50 | nncf_module.bert.encoder.layer.8.attention.self.key | 16 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
54 |
+
| 51 | nncf_module.bert.encoder.layer.8.attention.output.dense | 16 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
55 |
+
| 52 | nncf_module.bert.encoder.layer.8.output.dense | 17 | (768, 3072) | (768, 794) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
56 |
+
| 53 | nncf_module.bert.encoder.layer.8.intermediate.dense | 17 | (3072, 768) | (794, 768) | (3072,) | (794,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
57 |
+
| 54 | nncf_module.bert.encoder.layer.9.attention.self.query | 18 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
58 |
+
| 55 | nncf_module.bert.encoder.layer.9.attention.self.key | 18 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
59 |
+
| 56 | nncf_module.bert.encoder.layer.9.attention.self.value | 18 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
60 |
+
| 57 | nncf_module.bert.encoder.layer.9.attention.output.dense | 18 | (768, 768) | (768, 320) | (768,) | (768,) | group of 64 cols | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
61 |
+
| 58 | nncf_module.bert.encoder.layer.9.intermediate.dense | 19 | (3072, 768) | (305, 768) | (3072,) | (305,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
62 |
+
| 59 | nncf_module.bert.encoder.layer.9.output.dense | 19 | (768, 3072) | (768, 305) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
63 |
+
| 60 | nncf_module.bert.encoder.layer.10.attention.self.value | 20 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [3, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
64 |
+
| 61 | nncf_module.bert.encoder.layer.10.attention.self.query | 20 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [3, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
65 |
+
| 62 | nncf_module.bert.encoder.layer.10.attention.output.dense | 20 | (768, 768) | (768, 256) | (768,) | (768,) | group of 64 cols | See pkl | [3, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
66 |
+
| 63 | nncf_module.bert.encoder.layer.10.attention.self.key | 20 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [3, 7, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
67 |
+
| 64 | nncf_module.bert.encoder.layer.10.intermediate.dense | 21 | (3072, 768) | (305, 768) | (3072,) | (305,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
68 |
+
| 65 | nncf_module.bert.encoder.layer.10.output.dense | 21 | (768, 3072) | (768, 305) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
69 |
+
| 66 | nncf_module.bert.encoder.layer.11.attention.self.query | 22 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [1, 2, 3, 4, 8] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
70 |
+
| 67 | nncf_module.bert.encoder.layer.11.attention.output.dense | 22 | (768, 768) | (768, 320) | (768,) | (768,) | group of 64 cols | See pkl | [1, 2, 3, 4, 8] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
71 |
+
| 68 | nncf_module.bert.encoder.layer.11.attention.self.value | 22 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [1, 2, 3, 4, 8] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
72 |
+
| 69 | nncf_module.bert.encoder.layer.11.attention.self.key | 22 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [1, 2, 3, 4, 8] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
73 |
+
| 70 | nncf_module.bert.encoder.layer.11.intermediate.dense | 23 | (3072, 768) | (364, 768) | (3072,) | (364,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
74 |
+
| 71 | nncf_module.bert.encoder.layer.11.output.dense | 23 | (768, 3072) | (768, 364) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
ir/sparsity_structures.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:52dbb205b9a91a3a0c5d7d6c249dc2f0bf5c40db9cd32131329e0b4196313839
|
3 |
+
size 194991
|
ir/squad-BertForQuestionAnswering.crop_cfg.sd.8bit.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5292cfc4dcbf261ee9f0bd1fbfaf4e51122151f2d136f01f8bf472f5c5f36c18
|
3 |
+
size 283454425
|
ir/squad-BertForQuestionAnswering.cropped.8bit.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:27101393a4bfce89e7fbfc1bd9dcfabc91649de134020fe211cf94126125f93e
|
3 |
+
size 71012444
|
ir/squad-BertForQuestionAnswering.cropped.8bit.mapping
ADDED
The diff for this file is too large to render.
See raw diff
|
|
ir/squad-BertForQuestionAnswering.cropped.8bit.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66e9b1edc71bb07a4bc679b02814f86e68a187ab7199bb195c7ed388a347e449
|
3 |
+
size 283044900
|
ir/squad-BertForQuestionAnswering.cropped.8bit.xml
ADDED
The diff for this file is too large to render.
See raw diff
|
|
original_graph.dot
ADDED
The diff for this file is too large to render.
See raw diff
|
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-uncased", "tokenizer_class": "BertTokenizer"}
|
train_results.json
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 20.0,
|
3 |
+
"train_loss": 0.9409083591845138,
|
4 |
+
"train_runtime": 242213.996,
|
5 |
+
"train_samples": 88524,
|
6 |
+
"train_samples_per_second": 7.31,
|
7 |
+
"train_steps_per_second": 0.457
|
8 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bab812d2aa2fc47334c1d9a14b071f3f4ec6150a0bcf197f927db3b0246d0645
|
3 |
+
size 3055
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|