layer_id
int64
0
223
name
stringlengths
26
32
D
float64
0.07
0.18
M
int64
1.02k
4.1k
N
int64
4.1k
14.3k
Q
float64
1
4
alpha
float64
1.67
8.98
alpha_weighted
float64
-8.36
1.59
entropy
float64
0.8
1.31
has_esd
bool
1 class
lambda_max
float32
0.03
5.52
layer_type
stringclasses
1 value
log_alpha_norm
float64
-8.25
1.61
log_norm
float32
-0.65
1.02
log_spectral_norm
float32
-1.47
0.74
matrix_rank
int64
64
64
norm
float32
0.22
10.5
num_evals
int64
1.02k
4.1k
num_pl_spikes
int64
5
24
rank_loss
int64
960
4.03k
rf
int64
1
1
sigma
float64
0.14
3.57
spectral_norm
float32
0.03
5.52
stable_rank
float32
1.46
10.7
status
stringclasses
1 value
sv_max
float64
0.18
2.35
sv_min
float64
0
0
warning
stringclasses
3 values
weak_rank_loss
int64
960
4.03k
xmax
float64
0.03
5.52
xmin
float64
0
0.72
100
model.layers.14.mlp.up_proj
0.131757
4,096
14,336
3.5
3.305637
-0.190549
1.247377
true
0.875702
dense
0.208224
0.787785
-0.057644
64
6.134576
4,096
10
4,032
1
0.729106
0.875702
7.005324
success
0.935789
0.000003
4,032
0.875702
0.289319
101
model.layers.14.self_attn.k_proj
0.127773
1,024
4,096
4
2.202169
-2.054667
0.931148
true
0.116676
dense
-1.825839
-0.226163
-0.93302
64
0.59407
1,024
12
960
1
0.347036
0.116676
5.091632
success
0.341578
0.000001
960
0.116676
0.01184
102
model.layers.14.self_attn.o_proj
0.136108
4,096
4,096
1
3.720698
-2.886898
1.293951
true
0.167532
dense
-2.644132
0.082323
-0.775902
64
1.208711
4,096
9
4,032
1
0.906899
0.167532
7.21481
success
0.409307
0
4,032
0.167532
0.060565
103
model.layers.14.self_attn.q_proj
0.102481
4,096
4,096
1
2.354288
-0.407783
1.214213
true
0.671106
dense
-0.295325
0.336158
-0.173209
64
2.168491
4,096
10
4,032
1
0.428264
0.671106
3.231219
success
0.819211
0
4,032
0.671106
0.059556
104
model.layers.14.self_attn.v_proj
0.131518
1,024
4,096
4
4.733695
-6.06428
0.934887
true
0.052349
dense
-5.994823
-0.440754
-1.281088
64
0.362448
1,024
6
960
1
1.524274
0.052349
6.92363
success
0.2288
0.000001
960
0.052349
0.021353
105
model.layers.15.mlp.down_proj
0.171227
4,096
14,336
3.5
1.99487
-0.556694
1.252345
true
0.525941
dense
-0.042527
0.516673
-0.279063
64
3.28604
4,096
16
4,032
1
0.248718
0.525941
6.247919
success
0.725218
0.000002
over-trained
4,032
0.525941
0.056064
106
model.layers.15.mlp.gate_proj
0.119746
4,096
14,336
3.5
2.603469
0.808726
1.200097
true
2.04472
dense
0.959656
0.914757
0.310634
64
8.217831
4,096
9
4,032
1
0.53449
2.04472
4.019049
success
1.429937
0.000003
4,032
2.04472
0.336225
107
model.layers.15.mlp.up_proj
0.147782
4,096
14,336
3.5
3.05811
-0.096554
1.254071
true
0.92988
dense
0.227045
0.797362
-0.031573
64
6.271364
4,096
12
4,032
1
0.594125
0.92988
6.744273
success
0.964303
0.000003
4,032
0.92988
0.251676
108
model.layers.15.self_attn.k_proj
0.163836
1,024
4,096
4
1.713777
-1.625523
0.928907
true
0.112589
dense
-1.161297
-0.166777
-0.948503
64
0.68112
1,024
24
960
1
0.145699
0.112589
6.049599
success
0.335543
0.000001
over-trained
960
0.112589
0.00327
109
model.layers.15.self_attn.o_proj
0.157265
4,096
4,096
1
8.975181
-7.85196
1.30193
true
0.133397
dense
-7.763737
0.078522
-0.874853
64
1.198179
4,096
5
4,032
1
3.56661
0.133397
8.982023
success
0.365236
0
under-trained
4,032
0.133397
0.089839
110
model.layers.15.self_attn.q_proj
0.082726
4,096
4,096
1
2.323039
-0.420482
1.21887
true
0.659166
dense
-0.304926
0.340136
-0.181005
64
2.188446
4,096
10
4,032
1
0.418382
0.659166
3.320023
success
0.81189
0
4,032
0.659166
0.060314
111
model.layers.15.self_attn.v_proj
0.139696
1,024
4,096
4
5.781924
-8.355104
0.947199
true
0.035889
dense
-8.245389
-0.485453
-1.445039
64
0.326999
1,024
7
960
1
1.807397
0.035889
9.111416
success
0.189444
0.000001
960
0.035889
0.017794
112
model.layers.16.mlp.down_proj
0.102523
4,096
14,336
3.5
2.874258
-0.30288
1.235517
true
0.784555
dense
-0.173643
0.533015
-0.105377
64
3.412051
4,096
7
4,032
1
0.708403
0.784555
4.349028
success
0.885751
0.000002
4,032
0.784555
0.172833
113
model.layers.16.mlp.gate_proj
0.123873
4,096
14,336
3.5
3.09891
1.062916
1.195146
true
2.20291
dense
1.147086
0.929532
0.342997
64
8.502209
4,096
6
4,032
1
0.856877
2.20291
3.859535
success
1.484221
0.000003
4,032
2.20291
0.525521
114
model.layers.16.mlp.up_proj
0.169814
4,096
14,336
3.5
3.818406
-0.336112
1.252713
true
0.816537
dense
0.045831
0.797702
-0.088024
64
6.276278
4,096
8
4,032
1
0.996457
0.816537
7.686459
success
0.903624
0.000003
4,032
0.816537
0.360256
115
model.layers.16.self_attn.k_proj
0.111774
1,024
4,096
4
1.834971
-1.671906
0.905654
true
0.122706
dense
-1.235767
-0.158247
-0.911135
64
0.694629
1,024
17
960
1
0.20251
0.122706
5.660932
success
0.350294
0.000001
over-trained
960
0.122706
0.006415
116
model.layers.16.self_attn.o_proj
0.173298
4,096
4,096
1
5.412847
-5.165209
1.307261
true
0.111109
dense
-4.66492
0.070274
-0.95425
64
1.175639
4,096
8
4,032
1
1.560177
0.111109
10.580933
success
0.33333
0
4,032
0.111109
0.068463
117
model.layers.16.self_attn.q_proj
0.121785
4,096
4,096
1
2.412913
-0.405691
1.225183
true
0.678995
dense
-0.337676
0.330572
-0.168133
64
2.140781
4,096
11
4,032
1
0.426009
0.678995
3.152866
success
0.824012
0
4,032
0.678995
0.057183
118
model.layers.16.self_attn.v_proj
0.138603
1,024
4,096
4
5.562963
-7.838267
0.931375
true
0.038993
dense
-7.66819
-0.473419
-1.409009
64
0.336187
1,024
5
960
1
2.040619
0.038993
8.621655
success
0.197467
0.000002
960
0.038993
0.02216
119
model.layers.17.mlp.down_proj
0.122398
4,096
14,336
3.5
2.266409
-0.025874
1.215847
true
0.974056
dense
0.131994
0.560576
-0.011416
64
3.635595
4,096
12
4,032
1
0.365581
0.974056
3.732431
success
0.986943
0.000002
4,032
0.974056
0.092465
120
model.layers.17.mlp.gate_proj
0.170991
4,096
14,336
3.5
4.450642
1.345386
1.209233
true
2.005813
dense
1.381316
0.940736
0.30229
64
8.724406
4,096
5
4,032
1
1.543174
2.005813
4.349561
success
1.416267
0.000003
4,032
2.005813
0.719006
121
model.layers.17.mlp.up_proj
0.142645
4,096
14,336
3.5
3.587006
-0.113954
1.249289
true
0.929462
dense
0.17591
0.797667
-0.031769
64
6.275765
4,096
8
4,032
1
0.914645
0.929462
6.752044
success
0.964086
0.000003
4,032
0.929462
0.345897
122
model.layers.17.self_attn.k_proj
0.066383
1,024
4,096
4
2.068498
-1.309209
0.84157
true
0.232848
dense
-1.199792
-0.137876
-0.632927
64
0.727987
1,024
15
960
1
0.275885
0.232848
3.126445
success
0.482543
0.000001
960
0.232848
0.008742
123
model.layers.17.self_attn.o_proj
0.173403
4,096
4,096
1
2.915579
-2.406947
1.299654
true
0.149435
dense
-1.859997
0.127951
-0.825547
64
1.342613
4,096
14
4,032
1
0.51196
0.149435
8.984578
success
0.386569
0
4,032
0.149435
0.046568
124
model.layers.17.self_attn.q_proj
0.098829
4,096
4,096
1
2.25746
-0.652985
1.236042
true
0.51374
dense
-0.42149
0.319594
-0.289256
64
2.087345
4,096
13
4,032
1
0.348757
0.51374
4.063035
success
0.716757
0
4,032
0.51374
0.044559
125
model.layers.17.self_attn.v_proj
0.150562
1,024
4,096
4
3.044586
-4.463512
0.943646
true
0.034194
dense
-3.898037
-0.449787
-1.466049
64
0.354987
1,024
15
960
1
0.52791
0.034194
10.381537
success
0.184916
0.000001
960
0.034194
0.010664
126
model.layers.18.mlp.down_proj
0.101494
4,096
14,336
3.5
2.655248
-0.433637
1.247586
true
0.686573
dense
-0.172236
0.549447
-0.163313
64
3.543616
4,096
13
4,032
1
0.459083
0.686573
5.161307
success
0.828597
0.000002
4,032
0.686573
0.109767
127
model.layers.18.mlp.gate_proj
0.106351
4,096
14,336
3.5
2.48854
0.980212
1.177173
true
2.476798
dense
1.112914
0.938791
0.393891
64
8.685427
4,096
9
4,032
1
0.49618
2.476798
3.506715
success
1.573785
0.000003
4,032
2.476798
0.325471
128
model.layers.18.mlp.up_proj
0.159954
4,096
14,336
3.5
3.164737
0.089185
1.247528
true
1.06704
dense
0.400486
0.847518
0.028181
64
7.03912
4,096
11
4,032
1
0.652693
1.06704
6.596863
success
1.032977
0.000003
4,032
1.06704
0.302959
129
model.layers.18.self_attn.k_proj
0.098069
1,024
4,096
4
2.073608
-1.889186
0.911503
true
0.122726
dense
-1.558499
-0.179031
-0.911062
64
0.66217
1,024
15
960
1
0.277204
0.122726
5.395501
success
0.350323
0.000001
960
0.122726
0.009846
130
model.layers.18.self_attn.o_proj
0.126288
4,096
4,096
1
3.261624
-2.498722
1.288925
true
0.171357
dense
-2.095774
0.126366
-0.766097
64
1.337722
4,096
13
4,032
1
0.627262
0.171357
7.806625
success
0.413953
0
4,032
0.171357
0.051503
131
model.layers.18.self_attn.q_proj
0.125804
4,096
4,096
1
2.773273
-0.795442
1.264608
true
0.516625
dense
-0.659577
0.356131
-0.286824
64
2.27055
4,096
7
4,032
1
0.670234
0.516625
4.394965
success
0.718767
0
4,032
0.516625
0.106271
132
model.layers.18.self_attn.v_proj
0.132688
1,024
4,096
4
3.232591
-4.593876
0.934964
true
0.037922
dense
-4.118278
-0.455066
-1.421113
64
0.350699
1,024
12
960
1
0.644494
0.037922
9.247979
success
0.194735
0.000002
960
0.037922
0.012452
133
model.layers.19.mlp.down_proj
0.110463
4,096
14,336
3.5
3.511398
-0.302393
1.248383
true
0.820129
dense
-0.222282
0.594121
-0.086118
64
3.927545
4,096
7
4,032
1
0.949219
0.820129
4.788933
success
0.90561
0.000002
4,032
0.820129
0.219565
134
model.layers.19.mlp.gate_proj
0.127735
4,096
14,336
3.5
1.961178
0.908702
1.169757
true
2.906329
dense
1.083771
0.975006
0.463345
64
9.440733
4,096
16
4,032
1
0.240295
2.906329
3.248336
success
1.704796
0.000003
over-trained
4,032
2.906329
0.133207
135
model.layers.19.mlp.up_proj
0.108822
4,096
14,336
3.5
2.994149
0.269234
1.238254
true
1.230042
dense
0.50501
0.838824
0.08992
64
6.8996
4,096
11
4,032
1
0.601259
1.230042
5.60924
success
1.109073
0.000003
4,032
1.230042
0.28134
136
model.layers.19.self_attn.k_proj
0.122538
1,024
4,096
4
2.111212
-2.244048
0.938762
true
0.086513
dense
-1.79674
-0.220276
-1.062919
64
0.602177
1,024
16
960
1
0.277803
0.086513
6.960551
success
0.294131
0.000001
960
0.086513
0.009454
137
model.layers.19.self_attn.o_proj
0.149104
4,096
4,096
1
7.248808
-5.496727
1.294596
true
0.174464
dense
-5.382581
0.184012
-0.758294
64
1.527606
4,096
5
4,032
1
2.794552
0.174464
8.755992
success
0.417689
0
under-trained
4,032
0.174464
0.108327
138
model.layers.19.self_attn.q_proj
0.14307
4,096
4,096
1
2.332933
-1.245261
1.28701
true
0.292567
dense
-0.803735
0.276818
-0.533775
64
1.891552
4,096
14
4,032
1
0.356241
0.292567
6.465368
success
0.540894
0
4,032
0.292567
0.046604
139
model.layers.19.self_attn.v_proj
0.178021
1,024
4,096
4
2.471091
-3.390252
0.931619
true
0.042465
dense
-2.789483
-0.390873
-1.371966
64
0.406562
1,024
16
960
1
0.367773
0.042465
9.573986
success
0.206071
0.000002
960
0.042465
0.009732
140
model.layers.20.mlp.down_proj
0.134991
4,096
14,336
3.5
3.565723
-0.706781
1.259107
true
0.633555
dense
-0.43203
0.615181
-0.198215
64
4.122694
4,096
8
4,032
1
0.90712
0.633555
6.507235
success
0.795962
0.000002
4,032
0.633555
0.221898
141
model.layers.20.mlp.gate_proj
0.119756
4,096
14,336
3.5
2.5688
1.073327
1.184726
true
2.61717
dense
1.217209
0.990694
0.417832
64
9.787998
4,096
8
4,032
1
0.554655
2.61717
3.739917
success
1.617767
0.000003
4,032
2.61717
0.418332
142
model.layers.20.mlp.up_proj
0.095826
4,096
14,336
3.5
3.658596
0.321758
1.244753
true
1.224463
dense
0.472281
0.852261
0.087946
64
7.116415
4,096
9
4,032
1
0.886199
1.224463
5.811864
success
1.106555
0.000003
4,032
1.224463
0.360461
143
model.layers.20.self_attn.k_proj
0.098899
1,024
4,096
4
2.026788
-1.852291
0.896916
true
0.121926
dense
-1.477753
-0.171408
-0.913905
64
0.673895
1,024
15
960
1
0.265115
0.121926
5.527092
success
0.349179
0.000001
960
0.121926
0.009882
144
model.layers.20.self_attn.o_proj
0.178187
4,096
4,096
1
2.849758
-2.2384
1.289553
true
0.163881
dense
-1.477618
0.24374
-0.78547
64
1.752832
4,096
16
4,032
1
0.46244
0.163881
10.69573
success
0.404823
0
4,032
0.163881
0.056486
145
model.layers.20.self_attn.q_proj
0.111037
4,096
4,096
1
1.913623
-0.521249
1.230252
true
0.534086
dense
-0.216707
0.34908
-0.272389
64
2.233985
4,096
17
4,032
1
0.221586
0.534086
4.182818
success
0.730812
0
over-trained
4,032
0.534086
0.026308
146
model.layers.20.self_attn.v_proj
0.148586
1,024
4,096
4
4.596109
-5.818303
0.928441
true
0.05421
dense
-5.626064
-0.359667
-1.265919
64
0.436851
1,024
7
960
1
1.359201
0.05421
8.058464
success
0.232831
0.000001
960
0.05421
0.024016
147
model.layers.21.mlp.down_proj
0.08553
4,096
14,336
3.5
3.078118
-0.055919
1.243093
true
0.959033
dense
0.033383
0.633628
-0.018167
64
4.301581
4,096
12
4,032
1
0.599901
0.959033
4.485334
success
0.979302
0.000002
4,032
0.959033
0.15947
148
model.layers.21.mlp.gate_proj
0.096828
4,096
14,336
3.5
2.392283
1.333604
1.153122
true
3.609612
dense
1.40454
1.013426
0.557461
64
10.31398
4,096
10
4,032
1
0.440279
3.609612
2.857366
success
1.899898
0.000003
4,032
3.609612
0.308915
149
model.layers.21.mlp.up_proj
0.113977
4,096
14,336
3.5
3.603669
0.171321
1.250966
true
1.115683
dense
0.381532
0.86402
0.047541
64
7.311728
4,096
11
4,032
1
0.785036
1.115683
6.55359
success
1.056259
0.000003
4,032
1.115683
0.334261
150
model.layers.21.self_attn.k_proj
0.114759
1,024
4,096
4
2.784642
-2.1044
0.882703
true
0.175503
dense
-2.028877
-0.149946
-0.755717
64
0.708033
1,024
7
960
1
0.674531
0.175503
4.034318
success
0.41893
0.000001
960
0.175503
0.029547
151
model.layers.21.self_attn.o_proj
0.157227
4,096
4,096
1
3.486824
-2.135264
1.277466
true
0.244129
dense
-1.671439
0.315967
-0.612381
64
2.069983
4,096
12
4,032
1
0.717884
0.244129
8.479059
success
0.494094
0
4,032
0.244129
0.088636
152
model.layers.21.self_attn.q_proj
0.106511
4,096
4,096
1
1.895087
-0.546699
1.228855
true
0.514657
dense
-0.166783
0.36995
-0.288482
64
2.343957
4,096
17
4,032
1
0.217091
0.514657
4.554405
success
0.717396
0
over-trained
4,032
0.514657
0.027371
153
model.layers.21.self_attn.v_proj
0.106714
1,024
4,096
4
3.907415
-4.144983
0.897303
true
0.086936
dense
-4.059772
-0.29554
-1.060799
64
0.506361
1,024
7
960
1
1.0989
0.086936
5.82451
success
0.29485
0.000002
960
0.086936
0.027748
154
model.layers.22.mlp.down_proj
0.097275
4,096
14,336
3.5
2.915156
-0.008983
1.239602
true
0.99293
dense
0.115465
0.659474
-0.003081
64
4.565351
4,096
12
4,032
1
0.552858
0.99293
4.597859
success
0.996459
0.000002
4,032
0.99293
0.16519
155
model.layers.22.mlp.gate_proj
0.107711
4,096
14,336
3.5
2.556583
1.199422
1.180395
true
2.945442
dense
1.308265
1.013382
0.46915
64
10.312928
4,096
9
4,032
1
0.518861
2.945442
3.501318
success
1.716229
0.000003
4,032
2.945442
0.398624
156
model.layers.22.mlp.up_proj
0.108296
4,096
14,336
3.5
4.117817
0.294864
1.249735
true
1.179253
dense
0.416071
0.865351
0.071607
64
7.334171
4,096
8
4,032
1
1.102315
1.179253
6.219338
success
1.085934
0.000003
4,032
1.179253
0.402714
157
model.layers.22.self_attn.k_proj
0.096766
1,024
4,096
4
2.003269
-1.510586
0.867858
true
0.176173
dense
-1.277648
-0.133088
-0.75406
64
0.736057
1,024
17
960
1
0.243329
0.176173
4.178032
success
0.41973
0.000001
960
0.176173
0.008595
158
model.layers.22.self_attn.o_proj
0.119607
4,096
4,096
1
2.752449
-1.171055
1.259424
true
0.37544
dense
-0.918232
0.33078
-0.425459
64
2.141805
4,096
13
4,032
1
0.486042
0.37544
5.704785
success
0.612732
0
4,032
0.37544
0.071729
159
model.layers.22.self_attn.q_proj
0.092778
4,096
4,096
1
1.908004
-0.094512
1.160549
true
0.892206
dense
0.113221
0.438026
-0.049535
64
2.741739
4,096
17
4,032
1
0.220223
0.892206
3.072987
success
0.944567
0
over-trained
4,032
0.892206
0.026006
160
model.layers.22.self_attn.v_proj
0.126951
1,024
4,096
4
2.984884
-3.636414
0.918914
true
0.060496
dense
-3.230496
-0.317237
-1.218277
64
0.481685
1,024
12
960
1
0.572987
0.060496
7.962322
success
0.245958
0.000001
960
0.060496
0.016329
161
model.layers.23.mlp.down_proj
0.136955
4,096
14,336
3.5
2.609137
-0.380544
1.251473
true
0.714742
dense
0.008604
0.652099
-0.145851
64
4.488472
4,096
14
4,032
1
0.43006
0.714742
6.279848
success
0.845424
0.000002
4,032
0.714742
0.135923
162
model.layers.23.mlp.gate_proj
0.088552
4,096
14,336
3.5
2.535329
1.299763
1.171325
true
3.255821
dense
1.373304
1.011178
0.51266
64
10.260735
4,096
11
4,032
1
0.462919
3.255821
3.151505
success
1.804389
0.000003
4,032
3.255821
0.323447
163
model.layers.23.mlp.up_proj
0.085237
4,096
14,336
3.5
3.113806
0.239665
1.24443
true
1.193902
dense
0.515589
0.874181
0.076969
64
7.484814
4,096
13
4,032
1
0.586264
1.193902
6.269203
success
1.092658
0.000003
4,032
1.193902
0.286408
164
model.layers.23.self_attn.k_proj
0.120284
1,024
4,096
4
2.633858
-2.190326
0.895238
true
0.147366
dense
-2.016153
-0.143303
-0.831603
64
0.718948
1,024
8
960
1
0.577656
0.147366
4.878661
success
0.383882
0.000002
960
0.147366
0.026898
165
model.layers.23.self_attn.o_proj
0.128474
4,096
4,096
1
2.745961
-1.412795
1.268655
true
0.305845
dense
-1.036763
0.311847
-0.514499
64
2.050442
4,096
14
4,032
1
0.466628
0.305845
6.704193
success
0.553032
0
4,032
0.305845
0.065993
166
model.layers.23.self_attn.q_proj
0.143367
4,096
4,096
1
2.07931
-0.900177
1.273627
true
0.369045
dense
-0.434673
0.361771
-0.432921
64
2.30023
4,096
16
4,032
1
0.269828
0.369045
6.232928
success
0.607491
0
4,032
0.369045
0.042263
167
model.layers.23.self_attn.v_proj
0.120516
1,024
4,096
4
2.162317
-2.288699
0.890689
true
0.087408
dense
-1.878025
-0.269086
-1.058448
64
0.538163
1,024
14
960
1
0.310642
0.087408
6.156888
success
0.295649
0.000002
960
0.087408
0.010531
168
model.layers.24.mlp.down_proj
0.120728
4,096
14,336
3.5
3.102945
-0.002051
1.243654
true
0.998479
dense
0.07713
0.649114
-0.000661
64
4.457737
4,096
11
4,032
1
0.634062
0.998479
4.464528
success
0.999239
0.000002
4,032
0.998479
0.175398
169
model.layers.24.mlp.gate_proj
0.093973
4,096
14,336
3.5
2.316717
1.206078
1.172482
true
3.315872
dense
1.306348
1.022437
0.520598
64
10.530208
4,096
12
4,032
1
0.380103
3.315872
3.175698
success
1.820954
0.000002
4,032
3.315872
0.271317
170
model.layers.24.mlp.up_proj
0.118981
4,096
14,336
3.5
3.208384
-0.029564
1.256468
true
0.979006
dense
0.336656
0.866081
-0.009215
64
7.346503
4,096
14
4,032
1
0.590215
0.979006
7.504043
success
0.989447
0.000003
4,032
0.979006
0.281906
171
model.layers.24.self_attn.k_proj
0.137641
1,024
4,096
4
1.972477
-1.812983
0.88751
true
0.120465
dense
-1.39476
-0.174501
-0.91914
64
0.669113
1,024
17
960
1
0.23586
0.120465
5.554427
success
0.34708
0.000002
over-trained
960
0.120465
0.008152
172
model.layers.24.self_attn.o_proj
0.121796
4,096
4,096
1
3.490741
-1.735558
1.274527
true
0.318281
dense
-1.516867
0.332335
-0.497189
64
2.149487
4,096
11
4,032
1
0.750987
0.318281
6.75342
success
0.564164
0
4,032
0.318281
0.095662
173
model.layers.24.self_attn.q_proj
0.116849
4,096
4,096
1
2.127018
-0.363221
1.21705
true
0.674892
dense
-0.170545
0.39306
-0.170765
64
2.472066
4,096
11
4,032
1
0.339809
0.674892
3.662903
success
0.821518
0
4,032
0.674892
0.057734
174
model.layers.24.self_attn.v_proj
0.129029
1,024
4,096
4
2.129922
-2.279681
0.892357
true
0.085053
dense
-1.850581
-0.275046
-1.070312
64
0.530828
1,024
16
960
1
0.282481
0.085053
6.241163
success
0.291638
0.000002
960
0.085053
0.009003
175
model.layers.25.mlp.down_proj
0.105713
4,096
14,336
3.5
2.85188
-0.323161
1.255649
true
0.770344
dense
-0.07539
0.646843
-0.113315
64
4.434486
4,096
13
4,032
1
0.513619
0.770344
5.756498
success
0.877693
0.000002
4,032
0.770344
0.154021
176
model.layers.25.mlp.gate_proj
0.069561
4,096
14,336
3.5
2.350949
1.589706
1.109686
true
4.744578
dense
1.611528
1.019893
0.676198
64
10.468697
4,096
14
4,032
1
0.361056
4.744578
2.206455
success
2.178205
0.000003
4,032
4.744578
0.206881
177
model.layers.25.mlp.up_proj
0.15032
4,096
14,336
3.5
5.763207
0.004806
1.254436
true
1.001922
dense
0.112287
0.861523
0.000834
64
7.269807
4,096
5
4,032
1
2.130171
1.001922
7.255862
success
1.00096
0.000003
4,032
1.001922
0.533466
178
model.layers.25.self_attn.k_proj
0.144007
1,024
4,096
4
1.998126
-2.03634
0.914757
true
0.095692
dense
-1.625756
-0.233395
-1.019125
64
0.584258
1,024
17
960
1
0.242081
0.095692
6.105623
success
0.309341
0.000001
over-trained
960
0.095692
0.007753
179
model.layers.25.self_attn.o_proj
0.093961
4,096
4,096
1
3.637802
-1.868067
1.274864
true
0.306538
dense
-1.728484
0.25656
-0.513515
64
1.805345
4,096
9
4,032
1
0.879267
0.306538
5.889459
success
0.553659
0
4,032
0.306538
0.088987
180
model.layers.25.self_attn.q_proj
0.086197
4,096
4,096
1
1.80462
-0.095304
1.152994
true
0.885501
dense
0.063364
0.382037
-0.052811
64
2.410109
4,096
17
4,032
1
0.195149
0.885501
2.721746
success
0.941011
0
over-trained
4,032
0.885501
0.019111
181
model.layers.25.self_attn.v_proj
0.090123
1,024
4,096
4
2.294557
-2.036341
0.869857
true
0.129579
dense
-1.905706
-0.294062
-0.887466
64
0.508087
1,024
13
960
1
0.359045
0.129579
3.921064
success
0.359971
0.000001
960
0.129579
0.010466
182
model.layers.26.mlp.down_proj
0.120553
4,096
14,336
3.5
2.723654
-0.262714
1.251983
true
0.800836
dense
0.015633
0.663817
-0.096457
64
4.61123
4,096
11
4,032
1
0.519701
0.800836
5.758023
success
0.894894
0.000002
4,032
0.800836
0.171132
183
model.layers.26.mlp.gate_proj
0.091337
4,096
14,336
3.5
2.344174
1.351013
1.151821
true
3.769882
dense
1.414824
1.020362
0.576328
64
10.480022
4,096
13
4,032
1
0.372807
3.769882
2.779934
success
1.941618
0.000003
4,032
3.769882
0.244311
184
model.layers.26.mlp.up_proj
0.086812
4,096
14,336
3.5
4.392035
0.242671
1.251431
true
1.135671
dense
0.37363
0.87386
0.055253
64
7.479284
4,096
8
4,032
1
1.199265
1.135671
6.585783
success
1.065679
0.000003
4,032
1.135671
0.433704
185
model.layers.26.self_attn.k_proj
0.106872
1,024
4,096
4
2.051858
-1.892762
0.875378
true
0.119547
dense
-1.512787
-0.200754
-0.922463
64
0.629864
1,024
13
960
1
0.291733
0.119547
5.268767
success
0.345755
0.000002
960
0.119547
0.010768
186
model.layers.26.self_attn.o_proj
0.134184
4,096
4,096
1
2.441189
-1.539896
1.275169
true
0.233993
dense
-0.939815
0.274651
-0.630798
64
1.882136
4,096
15
4,032
1
0.372113
0.233993
8.04357
success
0.483728
0
4,032
0.233993
0.050972
187
model.layers.26.self_attn.q_proj
0.098903
4,096
4,096
1
1.94625
-0.172653
1.172286
true
0.815248
dense
0.02072
0.408872
-0.088711
64
2.563731
4,096
13
4,032
1
0.262442
0.815248
3.144727
success
0.902911
0
over-trained
4,032
0.815248
0.037628
188
model.layers.26.self_attn.v_proj
0.177327
1,024
4,096
4
2.084277
-2.452292
0.921592
true
0.066594
dense
-1.958116
-0.316788
-1.176567
64
0.482183
1,024
18
960
1
0.255566
0.066594
7.240673
success
0.258057
0.000002
960
0.066594
0.00719
189
model.layers.27.mlp.down_proj
0.086935
4,096
14,336
3.5
3.157849
0.028879
1.237124
true
1.021281
dense
0.10386
0.642767
0.009145
64
4.393056
4,096
9
4,032
1
0.719283
1.021281
4.301516
success
1.010584
0.000002
4,032
1.021281
0.199568
190
model.layers.27.mlp.gate_proj
0.084258
4,096
14,336
3.5
2.359513
1.042495
1.188402
true
2.765834
dense
1.179609
0.995663
0.441826
64
9.900623
4,096
13
4,032
1
0.377061
2.765834
3.579616
success
1.66308
0.000003
4,032
2.765834
0.257194
191
model.layers.27.mlp.up_proj
0.11716
4,096
14,336
3.5
4.703075
0.266932
1.251003
true
1.139612
dense
0.392874
0.884432
0.056757
64
7.663578
4,096
6
4,032
1
1.511774
1.139612
6.724727
success
1.067526
0.000003
4,032
1.139612
0.511535
192
model.layers.27.self_attn.k_proj
0.137354
1,024
4,096
4
1.939668
-1.521124
0.858189
true
0.164354
dense
-1.267935
-0.15881
-0.784219
64
0.693729
1,024
16
960
1
0.234917
0.164354
4.220938
success
0.405406
0.000001
over-trained
960
0.164354
0.008371
193
model.layers.27.self_attn.o_proj
0.166313
4,096
4,096
1
3.98469
-1.885441
1.259974
true
0.336379
dense
-1.686147
0.31561
-0.473171
64
2.068284
4,096
6
4,032
1
1.218495
0.336379
6.148673
success
0.579982
0
4,032
0.336379
0.139734
194
model.layers.27.self_attn.q_proj
0.098414
4,096
4,096
1
2.087291
-0.006988
1.15901
true
0.992321
dense
0.10856
0.438895
-0.003348
64
2.747227
4,096
9
4,032
1
0.36243
0.992321
2.768487
success
0.996153
0
4,032
0.992321
0.065965
195
model.layers.27.self_attn.v_proj
0.09186
1,024
4,096
4
2.089776
-2.143677
0.886055
true
0.094234
dense
-1.708897
-0.236603
-1.025793
64
0.579959
1,024
17
960
1
0.26431
0.094234
6.154463
success
0.306975
0.000002
960
0.094234
0.008735
196
model.layers.28.mlp.down_proj
0.097772
4,096
14,336
3.5
2.722751
-0.225053
1.245679
true
0.826692
dense
-0.029104
0.609328
-0.082656
64
4.067506
4,096
12
4,032
1
0.497315
0.826692
4.920221
success
0.909226
0.000002
4,032
0.826692
0.137727
197
model.layers.28.mlp.gate_proj
0.106238
4,096
14,336
3.5
2.577398
1.451249
1.149888
true
3.656516
dense
1.502734
1.011697
0.563067
64
10.272995
4,096
7
4,032
1
0.5962
3.656516
2.809504
success
1.912202
0.000003
4,032
3.656516
0.449288
198
model.layers.28.mlp.up_proj
0.110544
4,096
14,336
3.5
3.220158
0.687891
1.226926
true
1.635386
dense
0.800185
0.888572
0.21362
64
7.73698
4,096
8
4,032
1
0.784945
1.635386
4.73098
success
1.278822
0.000003
4,032
1.635386
0.388154
199
model.layers.28.self_attn.k_proj
0.148201
1,024
4,096
4
2.014589
-2.02633
0.901159
true
0.098667
dense
-1.595166
-0.213194
-1.005828
64
0.612077
1,024
17
960
1
0.246074
0.098667
6.203452
success
0.314113
0.000001
960
0.098667
0.008508