0 |
nncf_module.bert.embeddings.word_embeddings |
NNCFEmbedding |
weight |
[30522, 768] |
23440896 |
23440896 |
0 |
1 |
nncf_module.bert.embeddings.position_embeddings |
NNCFEmbedding |
weight |
[512, 768] |
393216 |
393216 |
0 |
2 |
nncf_module.bert.embeddings.token_type_embeddings |
NNCFEmbedding |
weight |
[2, 768] |
1536 |
1536 |
0 |
3 |
nncf_module.bert.embeddings.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
4 |
nncf_module.bert.embeddings.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
5 |
nncf_module.bert.encoder.layer.0.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
6 |
nncf_module.bert.encoder.layer.0.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
7 |
nncf_module.bert.encoder.layer.0.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
8 |
nncf_module.bert.encoder.layer.0.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
9 |
nncf_module.bert.encoder.layer.0.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
10 |
nncf_module.bert.encoder.layer.0.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
11 |
nncf_module.bert.encoder.layer.0.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
12 |
nncf_module.bert.encoder.layer.0.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
13 |
nncf_module.bert.encoder.layer.0.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
14 |
nncf_module.bert.encoder.layer.0.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
15 |
nncf_module.bert.encoder.layer.0.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
16 |
nncf_module.bert.encoder.layer.0.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
17 |
nncf_module.bert.encoder.layer.0.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
18 |
nncf_module.bert.encoder.layer.0.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
19 |
nncf_module.bert.encoder.layer.0.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
20 |
nncf_module.bert.encoder.layer.0.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
21 |
nncf_module.bert.encoder.layer.1.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
22 |
nncf_module.bert.encoder.layer.1.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
23 |
nncf_module.bert.encoder.layer.1.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
24 |
nncf_module.bert.encoder.layer.1.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
25 |
nncf_module.bert.encoder.layer.1.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
26 |
nncf_module.bert.encoder.layer.1.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
27 |
nncf_module.bert.encoder.layer.1.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
28 |
nncf_module.bert.encoder.layer.1.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
29 |
nncf_module.bert.encoder.layer.1.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
30 |
nncf_module.bert.encoder.layer.1.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
31 |
nncf_module.bert.encoder.layer.1.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
32 |
nncf_module.bert.encoder.layer.1.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
33 |
nncf_module.bert.encoder.layer.1.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
34 |
nncf_module.bert.encoder.layer.1.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
35 |
nncf_module.bert.encoder.layer.1.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
36 |
nncf_module.bert.encoder.layer.1.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
37 |
nncf_module.bert.encoder.layer.2.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
38 |
nncf_module.bert.encoder.layer.2.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
39 |
nncf_module.bert.encoder.layer.2.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
40 |
nncf_module.bert.encoder.layer.2.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
41 |
nncf_module.bert.encoder.layer.2.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
42 |
nncf_module.bert.encoder.layer.2.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
43 |
nncf_module.bert.encoder.layer.2.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
44 |
nncf_module.bert.encoder.layer.2.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
45 |
nncf_module.bert.encoder.layer.2.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
46 |
nncf_module.bert.encoder.layer.2.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
47 |
nncf_module.bert.encoder.layer.2.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
48 |
nncf_module.bert.encoder.layer.2.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
49 |
nncf_module.bert.encoder.layer.2.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
50 |
nncf_module.bert.encoder.layer.2.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
51 |
nncf_module.bert.encoder.layer.2.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
52 |
nncf_module.bert.encoder.layer.2.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
53 |
nncf_module.bert.encoder.layer.3.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
54 |
nncf_module.bert.encoder.layer.3.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
55 |
nncf_module.bert.encoder.layer.3.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
56 |
nncf_module.bert.encoder.layer.3.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
57 |
nncf_module.bert.encoder.layer.3.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
58 |
nncf_module.bert.encoder.layer.3.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
59 |
nncf_module.bert.encoder.layer.3.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
60 |
nncf_module.bert.encoder.layer.3.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
61 |
nncf_module.bert.encoder.layer.3.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
62 |
nncf_module.bert.encoder.layer.3.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
63 |
nncf_module.bert.encoder.layer.3.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
64 |
nncf_module.bert.encoder.layer.3.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
65 |
nncf_module.bert.encoder.layer.3.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
66 |
nncf_module.bert.encoder.layer.3.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
67 |
nncf_module.bert.encoder.layer.3.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
68 |
nncf_module.bert.encoder.layer.3.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
69 |
nncf_module.bert.encoder.layer.4.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
70 |
nncf_module.bert.encoder.layer.4.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
71 |
nncf_module.bert.encoder.layer.4.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
72 |
nncf_module.bert.encoder.layer.4.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
73 |
nncf_module.bert.encoder.layer.4.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
74 |
nncf_module.bert.encoder.layer.4.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
75 |
nncf_module.bert.encoder.layer.4.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
76 |
nncf_module.bert.encoder.layer.4.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
77 |
nncf_module.bert.encoder.layer.4.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
78 |
nncf_module.bert.encoder.layer.4.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
79 |
nncf_module.bert.encoder.layer.4.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
80 |
nncf_module.bert.encoder.layer.4.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
81 |
nncf_module.bert.encoder.layer.4.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
82 |
nncf_module.bert.encoder.layer.4.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
83 |
nncf_module.bert.encoder.layer.4.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
84 |
nncf_module.bert.encoder.layer.4.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
85 |
nncf_module.bert.encoder.layer.5.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
86 |
nncf_module.bert.encoder.layer.5.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
87 |
nncf_module.bert.encoder.layer.5.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
88 |
nncf_module.bert.encoder.layer.5.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
89 |
nncf_module.bert.encoder.layer.5.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
90 |
nncf_module.bert.encoder.layer.5.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
91 |
nncf_module.bert.encoder.layer.5.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
92 |
nncf_module.bert.encoder.layer.5.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
93 |
nncf_module.bert.encoder.layer.5.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
94 |
nncf_module.bert.encoder.layer.5.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
95 |
nncf_module.bert.encoder.layer.5.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
96 |
nncf_module.bert.encoder.layer.5.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
97 |
nncf_module.bert.encoder.layer.5.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
98 |
nncf_module.bert.encoder.layer.5.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
99 |
nncf_module.bert.encoder.layer.5.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
100 |
nncf_module.bert.encoder.layer.5.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
101 |
nncf_module.bert.encoder.layer.6.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
102 |
nncf_module.bert.encoder.layer.6.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
103 |
nncf_module.bert.encoder.layer.6.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
104 |
nncf_module.bert.encoder.layer.6.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
105 |
nncf_module.bert.encoder.layer.6.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
106 |
nncf_module.bert.encoder.layer.6.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
107 |
nncf_module.bert.encoder.layer.6.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
108 |
nncf_module.bert.encoder.layer.6.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
109 |
nncf_module.bert.encoder.layer.6.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
110 |
nncf_module.bert.encoder.layer.6.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
111 |
nncf_module.bert.encoder.layer.6.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
112 |
nncf_module.bert.encoder.layer.6.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
113 |
nncf_module.bert.encoder.layer.6.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
114 |
nncf_module.bert.encoder.layer.6.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
115 |
nncf_module.bert.encoder.layer.6.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
116 |
nncf_module.bert.encoder.layer.6.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
117 |
nncf_module.bert.encoder.layer.7.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
118 |
nncf_module.bert.encoder.layer.7.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
119 |
nncf_module.bert.encoder.layer.7.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
120 |
nncf_module.bert.encoder.layer.7.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
121 |
nncf_module.bert.encoder.layer.7.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
122 |
nncf_module.bert.encoder.layer.7.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
123 |
nncf_module.bert.encoder.layer.7.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
124 |
nncf_module.bert.encoder.layer.7.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
125 |
nncf_module.bert.encoder.layer.7.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
126 |
nncf_module.bert.encoder.layer.7.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
127 |
nncf_module.bert.encoder.layer.7.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
128 |
nncf_module.bert.encoder.layer.7.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
129 |
nncf_module.bert.encoder.layer.7.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235929 |
0.9 |
130 |
nncf_module.bert.encoder.layer.7.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
131 |
nncf_module.bert.encoder.layer.7.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
132 |
nncf_module.bert.encoder.layer.7.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
133 |
nncf_module.bert.encoder.layer.8.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
134 |
nncf_module.bert.encoder.layer.8.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
135 |
nncf_module.bert.encoder.layer.8.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
136 |
nncf_module.bert.encoder.layer.8.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
137 |
nncf_module.bert.encoder.layer.8.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
138 |
nncf_module.bert.encoder.layer.8.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
139 |
nncf_module.bert.encoder.layer.8.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
140 |
nncf_module.bert.encoder.layer.8.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
141 |
nncf_module.bert.encoder.layer.8.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
142 |
nncf_module.bert.encoder.layer.8.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
143 |
nncf_module.bert.encoder.layer.8.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
144 |
nncf_module.bert.encoder.layer.8.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
145 |
nncf_module.bert.encoder.layer.8.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
146 |
nncf_module.bert.encoder.layer.8.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
147 |
nncf_module.bert.encoder.layer.8.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
148 |
nncf_module.bert.encoder.layer.8.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
149 |
nncf_module.bert.encoder.layer.9.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
150 |
nncf_module.bert.encoder.layer.9.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
151 |
nncf_module.bert.encoder.layer.9.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
152 |
nncf_module.bert.encoder.layer.9.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
153 |
nncf_module.bert.encoder.layer.9.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
154 |
nncf_module.bert.encoder.layer.9.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
155 |
nncf_module.bert.encoder.layer.9.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
156 |
nncf_module.bert.encoder.layer.9.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
157 |
nncf_module.bert.encoder.layer.9.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
158 |
nncf_module.bert.encoder.layer.9.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
159 |
nncf_module.bert.encoder.layer.9.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
160 |
nncf_module.bert.encoder.layer.9.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
161 |
nncf_module.bert.encoder.layer.9.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
162 |
nncf_module.bert.encoder.layer.9.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
163 |
nncf_module.bert.encoder.layer.9.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
164 |
nncf_module.bert.encoder.layer.9.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
165 |
nncf_module.bert.encoder.layer.10.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
166 |
nncf_module.bert.encoder.layer.10.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
167 |
nncf_module.bert.encoder.layer.10.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
168 |
nncf_module.bert.encoder.layer.10.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
169 |
nncf_module.bert.encoder.layer.10.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
170 |
nncf_module.bert.encoder.layer.10.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
171 |
nncf_module.bert.encoder.layer.10.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
172 |
nncf_module.bert.encoder.layer.10.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
173 |
nncf_module.bert.encoder.layer.10.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
174 |
nncf_module.bert.encoder.layer.10.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
175 |
nncf_module.bert.encoder.layer.10.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
176 |
nncf_module.bert.encoder.layer.10.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
177 |
nncf_module.bert.encoder.layer.10.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235929 |
0.9 |
178 |
nncf_module.bert.encoder.layer.10.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
179 |
nncf_module.bert.encoder.layer.10.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
180 |
nncf_module.bert.encoder.layer.10.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
181 |
nncf_module.bert.encoder.layer.11.attention.self.query |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
182 |
nncf_module.bert.encoder.layer.11.attention.self.query |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
183 |
nncf_module.bert.encoder.layer.11.attention.self.key |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
184 |
nncf_module.bert.encoder.layer.11.attention.self.key |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
185 |
nncf_module.bert.encoder.layer.11.attention.self.value |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
186 |
nncf_module.bert.encoder.layer.11.attention.self.value |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
187 |
nncf_module.bert.encoder.layer.11.attention.output.dense |
NNCFLinear |
weight |
[768, 768] |
589824 |
58983 |
0.899999 |
188 |
nncf_module.bert.encoder.layer.11.attention.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
189 |
nncf_module.bert.encoder.layer.11.attention.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
190 |
nncf_module.bert.encoder.layer.11.attention.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
191 |
nncf_module.bert.encoder.layer.11.intermediate.dense |
NNCFLinear |
weight |
[3072, 768] |
2359296 |
235930 |
0.9 |
192 |
nncf_module.bert.encoder.layer.11.intermediate.dense |
NNCFLinear |
bias |
[3072] |
3072 |
3072 |
0 |
193 |
nncf_module.bert.encoder.layer.11.output.dense |
NNCFLinear |
weight |
[768, 3072] |
2359296 |
235930 |
0.9 |
194 |
nncf_module.bert.encoder.layer.11.output.dense |
NNCFLinear |
bias |
[768] |
768 |
768 |
0 |
195 |
nncf_module.bert.encoder.layer.11.output.LayerNorm |
LayerNorm |
weight |
[768] |
768 |
768 |
0 |
196 |
nncf_module.bert.encoder.layer.11.output.LayerNorm |
LayerNorm |
bias |
[768] |
768 |
768 |
0 |
197 |
nncf_module.qa_outputs |
NNCFLinear |
weight |
[2, 768] |
1536 |
1536 |
0 |
198 |
nncf_module.qa_outputs |
NNCFLinear |
bias |
[2] |
2 |
2 |
0 |