ishangarg183 commited on
Commit
5ad8b5a
·
verified ·
1 Parent(s): 09cae30

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +15 -0
  2. v2-samelayer/assembled_activations/llama32-3b-dpo/checkpoints/final.pt +3 -0
  3. v2-samelayer/assembled_activations/llama32-3b-dpo/features/counterfactual_scores.csv +0 -0
  4. v2-samelayer/assembled_activations/llama32-3b-dpo/features/counterfactual_scores_by_layer.csv +0 -0
  5. v2-samelayer/assembled_activations/llama32-3b-dpo/features/cross_layer_cosine_drift.csv +3 -0
  6. v2-samelayer/assembled_activations/llama32-3b-dpo/features/decoder_layer_profiles.csv +3 -0
  7. v2-samelayer/assembled_activations/llama32-3b-dpo/features/feature_classification.csv +0 -0
  8. v2-samelayer/assembled_activations/llama32-3b-dpo/features/merged_classification.csv +0 -0
  9. v2-samelayer/assembled_activations/llama32-3b-dpo/features/model_layer_stream_patterns.csv +0 -0
  10. v2-samelayer/assembled_activations/llama32-3b-dpo/features/superposition_analysis.json +0 -0
  11. v2-samelayer/assembled_activations/llama32-3b-dpo/metrics/aggregate_metrics.json +388 -0
  12. v2-samelayer/assembled_activations/llama32-3b-dpo/metrics/training_metrics.json +524 -0
  13. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/aligned_decoder_norm_heatmap.png +3 -0
  14. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/base_decoder_norm_heatmap.png +3 -0
  15. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/cf_shift_by_layer.png +3 -0
  16. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/cf_shift_p95_by_layer.png +3 -0
  17. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/class_distribution_multilayer.png +3 -0
  18. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/class_distribution_primary.png +3 -0
  19. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/cross_layer_cosine_drift_by_stream.png +3 -0
  20. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/decoder_norm_ratio_by_layer.png +3 -0
  21. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/feature_layer_trajectories.png +3 -0
  22. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/feature_sharing_ratio_by_layer.png +3 -0
  23. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/fve_by_layer.png +3 -0
  24. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/l0_by_layer.png +3 -0
  25. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/layer_concentration_entropy.png +3 -0
  26. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/loss_curves.png +3 -0
  27. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/max_norm_layer_migration.png +3 -0
  28. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/rho_histogram_by_layer.png +3 -0
  29. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/rho_theta_scatter_by_layer.png +3 -0
  30. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/superposition_by_layer.png +3 -0
  31. v2-samelayer/assembled_activations/llama32-3b-dpo/plots/theta_by_layer.png +3 -0
  32. v2-samelayer/assembled_activations/llama32-3b-grpo/checkpoints/final.pt +3 -0
  33. v2-samelayer/assembled_activations/llama32-3b-grpo/features/cross_layer_cosine_drift.csv +3 -0
  34. v2-samelayer/assembled_activations/llama32-3b-grpo/features/decoder_layer_profiles.csv +3 -0
  35. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/aligned_decoder_norm_heatmap.png +3 -0
  36. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/base_decoder_norm_heatmap.png +3 -0
  37. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/cf_shift_by_layer.png +3 -0
  38. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/cf_shift_p95_by_layer.png +3 -0
  39. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/class_distribution_multilayer.png +3 -0
  40. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/class_distribution_primary.png +3 -0
  41. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/cross_layer_cosine_drift_by_stream.png +3 -0
  42. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/decoder_norm_ratio_by_layer.png +3 -0
  43. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/feature_layer_trajectories.png +3 -0
  44. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/feature_sharing_ratio_by_layer.png +3 -0
  45. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/fve_by_layer.png +3 -0
  46. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/l0_by_layer.png +3 -0
  47. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/layer_concentration_entropy.png +3 -0
  48. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/loss_curves.png +3 -0
  49. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/max_norm_layer_migration.png +3 -0
  50. v2-samelayer/assembled_activations/llama32-3b-grpo/plots/rho_histogram_by_layer.png +3 -0
.gitattributes CHANGED
@@ -186,3 +186,18 @@ v2-samelayer/assembled_activations/smollm3-grpo/features/cross_layer_cosine_drif
186
  v2-samelayer/assembled_activations/qwen3-4b-dpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
187
  v2-samelayer/assembled_activations/qwen3-4b-grpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
188
  v2-samelayer/assembled_activations/qwen3-4b-grpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  v2-samelayer/assembled_activations/qwen3-4b-dpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
187
  v2-samelayer/assembled_activations/qwen3-4b-grpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
188
  v2-samelayer/assembled_activations/qwen3-4b-grpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
189
+ v2-samelayer/assembled_activations/qwen3-4b-ppo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
190
+ v2-samelayer/assembled_activations/llama32-3b-ppo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
191
+ v2-samelayer/assembled_activations/llama32-3b-ppo/features/model_layer_stream_patterns.csv filter=lfs diff=lfs merge=lfs -text
192
+ v2-samelayer/assembled_activations/llama32-3b-ppo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
193
+ v2-samelayer/assembled_activations/llama32-3b-kto/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
194
+ v2-samelayer/assembled_activations/llama32-3b-kto/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
195
+ v2-samelayer/assembled_activations/llama32-3b-grpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
196
+ v2-samelayer/assembled_activations/llama32-3b-grpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
197
+ v2-samelayer/assembled_activations/smollm3-simpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
198
+ v2-samelayer/assembled_activations/llama32-3b-dpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
199
+ v2-samelayer/assembled_activations/llama32-3b-dpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
200
+ v2-samelayer/assembled_activations/qwen3-4b-simpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
201
+ v2-samelayer/assembled_activations/llama32-3b-orpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
202
+ v2-samelayer/assembled_activations/llama32-3b-orpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
203
+ v2-samelayer/assembled_activations/smollm3-kto/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
v2-samelayer/assembled_activations/llama32-3b-dpo/checkpoints/final.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00fcd03d23e426a857fd9d36297fc24efae3c9d071c11a0fc88553105cce0ae4
3
+ size 10873439922
v2-samelayer/assembled_activations/llama32-3b-dpo/features/counterfactual_scores.csv ADDED
The diff for this file is too large to render. See raw diff
 
v2-samelayer/assembled_activations/llama32-3b-dpo/features/counterfactual_scores_by_layer.csv ADDED
The diff for this file is too large to render. See raw diff
 
v2-samelayer/assembled_activations/llama32-3b-dpo/features/cross_layer_cosine_drift.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98369ea77bd0d42503293df68eed91eedeb05452b6d12ffda8e9f6d552ee09e0
3
+ size 27252306
v2-samelayer/assembled_activations/llama32-3b-dpo/features/decoder_layer_profiles.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f685534b39b1dcd0f85a3934158b2fb28e947472c0c6b9169aa990ac896fa897
3
+ size 13008950
v2-samelayer/assembled_activations/llama32-3b-dpo/features/feature_classification.csv ADDED
The diff for this file is too large to render. See raw diff
 
v2-samelayer/assembled_activations/llama32-3b-dpo/features/merged_classification.csv ADDED
The diff for this file is too large to render. See raw diff
 
v2-samelayer/assembled_activations/llama32-3b-dpo/features/model_layer_stream_patterns.csv ADDED
The diff for this file is too large to render. See raw diff
 
v2-samelayer/assembled_activations/llama32-3b-dpo/features/superposition_analysis.json ADDED
The diff for this file is too large to render. See raw diff
 
v2-samelayer/assembled_activations/llama32-3b-dpo/metrics/aggregate_metrics.json ADDED
@@ -0,0 +1,388 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crosscoder_kind": "multilayer_sparc",
3
+ "layers": [
4
+ 12,
5
+ 13,
6
+ 14
7
+ ],
8
+ "topk_mode": "model_balanced_layer_agg",
9
+ "topk": 400,
10
+ "expansion_factor": 8,
11
+ "dict_size": 24576,
12
+ "forced_shared_fraction": 0.06,
13
+ "class_counts": {
14
+ "aligned_only": 6255,
15
+ "shared_redirected": 5381,
16
+ "shared_intermediate": 5281,
17
+ "base_only": 3273,
18
+ "shared_attenuated": 2898,
19
+ "shared_aligned": 1488
20
+ },
21
+ "multilayer_class_counts": {
22
+ "drifting_or_rotating": 12229,
23
+ "persistent_aligned_only": 4659,
24
+ "persistent_shared": 2467,
25
+ "persistent_base_only": 2016,
26
+ "localized_aligned_only": 1596,
27
+ "localized_base_only": 1257,
28
+ "mixed_or_ambiguous": 352
29
+ },
30
+ "classification_thresholds": {
31
+ "rho_base_only": 0.4,
32
+ "rho_aligned_only": 0.5571486335286551,
33
+ "rho_shared_low": 0.441999366421853,
34
+ "rho_shared_high": 0.5571486335286551
35
+ },
36
+ "threshold_sensitivity": {
37
+ "original": {
38
+ "aligned_only": 6255,
39
+ "shared_redirected": 5381,
40
+ "shared_intermediate": 5281,
41
+ "base_only": 3273,
42
+ "shared_attenuated": 2898,
43
+ "shared_aligned": 1488
44
+ },
45
+ "perturbed": {
46
+ "delta_-0.05": {
47
+ "base_only": 840,
48
+ "aligned_only": 2834,
49
+ "shared_aligned": 1500,
50
+ "shared_redirected": 9107,
51
+ "shared_intermediate": 8349,
52
+ "shared_attenuated": 1946,
53
+ "other": 0
54
+ },
55
+ "delta_+0.05": {
56
+ "base_only": 6820,
57
+ "aligned_only": 10851,
58
+ "shared_aligned": 1476,
59
+ "shared_redirected": 734,
60
+ "shared_intermediate": 804,
61
+ "shared_attenuated": 3891,
62
+ "other": 0
63
+ }
64
+ },
65
+ "perturbation": 0.05
66
+ },
67
+ "class_counts_by_layer": {
68
+ "12": {
69
+ "shared_redirected": 7895,
70
+ "shared_intermediate": 6656,
71
+ "base_only": 4688,
72
+ "shared_aligned": 2938,
73
+ "aligned_only": 2399
74
+ },
75
+ "13": {
76
+ "shared_redirected": 8816,
77
+ "shared_intermediate": 8466,
78
+ "base_only": 3035,
79
+ "shared_aligned": 2802,
80
+ "aligned_only": 1457
81
+ },
82
+ "14": {
83
+ "shared_redirected": 8569,
84
+ "base_only": 4460,
85
+ "shared_intermediate": 4299,
86
+ "aligned_only": 3222,
87
+ "shared_attenuated": 2072,
88
+ "shared_aligned": 1954
89
+ }
90
+ },
91
+ "feature_sharing_ratio_by_layer": {
92
+ "12": 0.7116292317708334,
93
+ "13": 0.8172200520833334,
94
+ "14": 0.6874186197916666
95
+ },
96
+ "decoder_amplification_by_layer": {
97
+ "12": {
98
+ "median": 0.9999999872344182,
99
+ "p95": 4.808803178398062
100
+ },
101
+ "13": {
102
+ "median": 0.9999999877864876,
103
+ "p95": 2.875206066511721
104
+ },
105
+ "14": {
106
+ "median": 0.9999999891683586,
107
+ "p95": 2.0530812721600027
108
+ }
109
+ },
110
+ "classification_thresholds_by_layer": {
111
+ "12": {
112
+ "rho_base_only": 0.3249751650747221,
113
+ "rho_aligned_only": 0.7504738322947574,
114
+ "rho_shared_low": 0.3249751650747221,
115
+ "rho_shared_high": 0.7504738322947574
116
+ },
117
+ "13": {
118
+ "rho_base_only": 0.33311266648506177,
119
+ "rho_aligned_only": 0.7273791891804369,
120
+ "rho_shared_low": 0.33311266648506177,
121
+ "rho_shared_high": 0.7273791891804369
122
+ },
123
+ "14": {
124
+ "rho_base_only": 0.4,
125
+ "rho_aligned_only": 0.6195673090879186,
126
+ "rho_shared_low": 0.4332179366876391,
127
+ "rho_shared_high": 0.6195673090879186
128
+ }
129
+ },
130
+ "threshold_sensitivity_by_layer": {
131
+ "12": {
132
+ "original": {
133
+ "shared_redirected": 7895,
134
+ "shared_intermediate": 6656,
135
+ "base_only": 4688,
136
+ "shared_aligned": 2938,
137
+ "aligned_only": 2399
138
+ },
139
+ "perturbed": {
140
+ "delta_-0.05": {
141
+ "base_only": 3192,
142
+ "aligned_only": 1558,
143
+ "shared_aligned": 3589,
144
+ "shared_redirected": 8800,
145
+ "shared_intermediate": 7437,
146
+ "shared_attenuated": 0,
147
+ "other": 0
148
+ },
149
+ "delta_+0.05": {
150
+ "base_only": 6578,
151
+ "aligned_only": 3609,
152
+ "shared_aligned": 2147,
153
+ "shared_redirected": 6691,
154
+ "shared_intermediate": 5551,
155
+ "shared_attenuated": 0,
156
+ "other": 0
157
+ }
158
+ },
159
+ "perturbation": 0.05
160
+ },
161
+ "13": {
162
+ "original": {
163
+ "shared_redirected": 8816,
164
+ "shared_intermediate": 8466,
165
+ "base_only": 3035,
166
+ "shared_aligned": 2802,
167
+ "aligned_only": 1457
168
+ },
169
+ "perturbed": {
170
+ "delta_-0.05": {
171
+ "base_only": 1793,
172
+ "aligned_only": 829,
173
+ "shared_aligned": 3190,
174
+ "shared_redirected": 9548,
175
+ "shared_intermediate": 9216,
176
+ "shared_attenuated": 0,
177
+ "other": 0
178
+ },
179
+ "delta_+0.05": {
180
+ "base_only": 5100,
181
+ "aligned_only": 2534,
182
+ "shared_aligned": 2092,
183
+ "shared_redirected": 7747,
184
+ "shared_intermediate": 7103,
185
+ "shared_attenuated": 0,
186
+ "other": 0
187
+ }
188
+ },
189
+ "perturbation": 0.05
190
+ },
191
+ "14": {
192
+ "original": {
193
+ "shared_redirected": 8569,
194
+ "base_only": 4460,
195
+ "shared_intermediate": 4299,
196
+ "aligned_only": 3222,
197
+ "shared_attenuated": 2072,
198
+ "shared_aligned": 1954
199
+ },
200
+ "perturbed": {
201
+ "delta_-0.05": {
202
+ "base_only": 1933,
203
+ "aligned_only": 1278,
204
+ "shared_aligned": 3019,
205
+ "shared_redirected": 10753,
206
+ "shared_intermediate": 6024,
207
+ "shared_attenuated": 1569,
208
+ "other": 0
209
+ },
210
+ "delta_+0.05": {
211
+ "base_only": 7807,
212
+ "aligned_only": 6199,
213
+ "shared_aligned": 1340,
214
+ "shared_redirected": 4592,
215
+ "shared_intermediate": 2047,
216
+ "shared_attenuated": 2591,
217
+ "other": 0
218
+ }
219
+ },
220
+ "perturbation": 0.05
221
+ }
222
+ },
223
+ "counterfactual_shift_by_layer": {
224
+ "12": {
225
+ "aligned_only": {
226
+ "mean_shift": 2.4112238498396065e-06,
227
+ "median_shift": 0.0,
228
+ "p95_abs_shift": 0.0,
229
+ "count": 6255
230
+ },
231
+ "base_only": {
232
+ "mean_shift": -2.4341085855552528e-06,
233
+ "median_shift": 0.0,
234
+ "p95_abs_shift": 0.0,
235
+ "count": 3273
236
+ },
237
+ "shared_aligned": {
238
+ "mean_shift": -7.11400898647972e-05,
239
+ "median_shift": 0.0,
240
+ "p95_abs_shift": 0.08343747109174728,
241
+ "count": 1488
242
+ },
243
+ "shared_attenuated": {
244
+ "mean_shift": -1.0171309737362479e-07,
245
+ "median_shift": 0.0,
246
+ "p95_abs_shift": 0.0,
247
+ "count": 2898
248
+ },
249
+ "shared_intermediate": {
250
+ "mean_shift": 5.1859217087668957e-08,
251
+ "median_shift": 0.0,
252
+ "p95_abs_shift": 0.0,
253
+ "count": 5281
254
+ },
255
+ "shared_redirected": {
256
+ "mean_shift": 2.86653050904218e-09,
257
+ "median_shift": 0.0,
258
+ "p95_abs_shift": 0.0,
259
+ "count": 5381
260
+ }
261
+ },
262
+ "13": {
263
+ "aligned_only": {
264
+ "mean_shift": 8.161741125300389e-06,
265
+ "median_shift": 0.0,
266
+ "p95_abs_shift": 0.0,
267
+ "count": 6255
268
+ },
269
+ "base_only": {
270
+ "mean_shift": -1.1108721304735658e-05,
271
+ "median_shift": 0.0,
272
+ "p95_abs_shift": 0.0,
273
+ "count": 3273
274
+ },
275
+ "shared_aligned": {
276
+ "mean_shift": 5.4217558803460264e-05,
277
+ "median_shift": 0.0,
278
+ "p95_abs_shift": 0.09117132201790806,
279
+ "count": 1488
280
+ },
281
+ "shared_attenuated": {
282
+ "mean_shift": -2.378168234298194e-06,
283
+ "median_shift": 0.0,
284
+ "p95_abs_shift": 0.0,
285
+ "count": 2898
286
+ },
287
+ "shared_intermediate": {
288
+ "mean_shift": -4.839519160503355e-08,
289
+ "median_shift": 0.0,
290
+ "p95_abs_shift": 0.0,
291
+ "count": 5281
292
+ },
293
+ "shared_redirected": {
294
+ "mean_shift": 1.3520313741632717e-07,
295
+ "median_shift": 0.0,
296
+ "p95_abs_shift": 0.0,
297
+ "count": 5381
298
+ }
299
+ },
300
+ "14": {
301
+ "aligned_only": {
302
+ "mean_shift": 3.1487233612113166e-05,
303
+ "median_shift": 0.0,
304
+ "p95_abs_shift": 0.0,
305
+ "count": 6255
306
+ },
307
+ "base_only": {
308
+ "mean_shift": -3.585443771814724e-05,
309
+ "median_shift": 0.0,
310
+ "p95_abs_shift": 0.0,
311
+ "count": 3273
312
+ },
313
+ "shared_aligned": {
314
+ "mean_shift": 6.549898494149471e-05,
315
+ "median_shift": 3.887436008653822e-07,
316
+ "p95_abs_shift": 0.10445111840963363,
317
+ "count": 1488
318
+ },
319
+ "shared_attenuated": {
320
+ "mean_shift": -1.079474320873924e-05,
321
+ "median_shift": 0.0,
322
+ "p95_abs_shift": 0.0,
323
+ "count": 2898
324
+ },
325
+ "shared_intermediate": {
326
+ "mean_shift": 8.715442941091603e-08,
327
+ "median_shift": 0.0,
328
+ "p95_abs_shift": 0.0,
329
+ "count": 5281
330
+ },
331
+ "shared_redirected": {
332
+ "mean_shift": 8.519746033905039e-07,
333
+ "median_shift": 0.0,
334
+ "p95_abs_shift": 0.0,
335
+ "count": 5381
336
+ }
337
+ }
338
+ },
339
+ "total_features": 24576,
340
+ "fve_base": 0.6943563584881928,
341
+ "fve_aligned": 0.6857018002664855,
342
+ "fve_base_by_layer": [
343
+ 0.6758359645673742,
344
+ 0.6896311734359302,
345
+ 0.7176018759842319
346
+ ],
347
+ "fve_aligned_by_layer": [
348
+ 0.6675708861875285,
349
+ 0.6810908585942853,
350
+ 0.7084435948526672
351
+ ],
352
+ "val_fve_base_by_layer": [
353
+ 0.6758359645673742,
354
+ 0.6896311734359302,
355
+ 0.7176018759842319
356
+ ],
357
+ "val_fve_aligned_by_layer": [
358
+ 0.6675708861875285,
359
+ 0.6810908585942853,
360
+ 0.7084435948526672
361
+ ],
362
+ "dead_neuron_fraction": 0.9607996527590532,
363
+ "l0_sparsity_base": 75.38008013901764,
364
+ "l0_sparsity_aligned": 74.12330600273101,
365
+ "l0_base_by_layer": [
366
+ 63.096559398715705,
367
+ 75.58358873321657,
368
+ 87.46008464681844
369
+ ],
370
+ "l0_aligned_by_layer": [
371
+ 61.64835084646818,
372
+ 74.61715557501459,
373
+ 86.1044038237011
374
+ ],
375
+ "val_l0_base": 76.88609194131422,
376
+ "val_l0_aligned": 75.5361974426589,
377
+ "val_l0_base_by_layer": [
378
+ 64.24105585307976,
379
+ 77.04439356938707,
380
+ 89.37281851244222
381
+ ],
382
+ "val_l0_aligned_by_layer": [
383
+ 62.69873474280872,
384
+ 75.9856566583923,
385
+ 87.92419285799197
386
+ ],
387
+ "superposition_fraction": 0.0
388
+ }
v2-samelayer/assembled_activations/llama32-3b-dpo/metrics/training_metrics.json ADDED
@@ -0,0 +1,524 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epochs": [
3
+ 1,
4
+ 2,
5
+ 3,
6
+ 4,
7
+ 5,
8
+ 6,
9
+ 7,
10
+ 8
11
+ ],
12
+ "train_loss": [
13
+ 0.07673105311085798,
14
+ 0.04314152776142414,
15
+ 0.04106981019553078,
16
+ 0.039948021885392544,
17
+ 0.039125458598876305,
18
+ 0.038466410324308024,
19
+ 0.03799404853697207,
20
+ 0.03772751730255091
21
+ ],
22
+ "val_loss": [
23
+ 0.0452033090380786,
24
+ 0.04240399528860422,
25
+ 0.04129640658566465,
26
+ 0.04064904576586803,
27
+ 0.0401821202008512,
28
+ 0.03991277757739521,
29
+ 0.03979530702320693,
30
+ 0.03977678012083338
31
+ ],
32
+ "train_fve_base": [
33
+ 0.5215789154168545,
34
+ 0.6578964054480116,
35
+ 0.6789237529787485,
36
+ 0.6908673060545779,
37
+ 0.6998354073720443,
38
+ 0.7069360464550359,
39
+ 0.7119182502047332,
40
+ 0.7145967230905376
41
+ ],
42
+ "train_fve_aligned": [
43
+ 0.5162195712566496,
44
+ 0.6509691739179764,
45
+ 0.6700791642809221,
46
+ 0.6808612814996233,
47
+ 0.6889950146956257,
48
+ 0.6955827845165901,
49
+ 0.7002096909288071,
50
+ 0.7027074147300698
51
+ ],
52
+ "val_fve_base": [
53
+ 0.6388902236653872,
54
+ 0.6665667504540289,
55
+ 0.6772977547495777,
56
+ 0.6839563450264057,
57
+ 0.6892214420578242,
58
+ 0.6923850852781566,
59
+ 0.6940219399192571,
60
+ 0.6943563584881928
61
+ ],
62
+ "val_fve_aligned": [
63
+ 0.6334137236260619,
64
+ 0.6591302406101327,
65
+ 0.6693885479297937,
66
+ 0.6759788746609113,
67
+ 0.6809322253571755,
68
+ 0.6837883154759232,
69
+ 0.6854131986333437,
70
+ 0.6857018002664855
71
+ ],
72
+ "val_fve_base_by_layer": [
73
+ [
74
+ 0.6264502502860824,
75
+ 0.633638423774879,
76
+ 0.6565819432598134
77
+ ],
78
+ [
79
+ 0.6513919614996585,
80
+ 0.6613985574682345,
81
+ 0.6869096593707019
82
+ ],
83
+ [
84
+ 0.6611033381591916,
85
+ 0.6716362203603016,
86
+ 0.6991536592313756
87
+ ],
88
+ [
89
+ 0.66649499051858,
90
+ 0.6792081759862251,
91
+ 0.7061658099059659
92
+ ],
93
+ [
94
+ 0.6711723985472274,
95
+ 0.6845272938618485,
96
+ 0.7119645825855395
97
+ ],
98
+ [
99
+ 0.6738163388836447,
100
+ 0.6877937959750909,
101
+ 0.7155450829036573
102
+ ],
103
+ [
104
+ 0.6754764951960579,
105
+ 0.6893106998573423,
106
+ 0.7172785657238586
107
+ ],
108
+ [
109
+ 0.6758359645673742,
110
+ 0.6896311734359302,
111
+ 0.7176018759842319
112
+ ]
113
+ ],
114
+ "val_fve_aligned_by_layer": [
115
+ [
116
+ 0.6210345457361631,
117
+ 0.6278264157435033,
118
+ 0.651380156659331
119
+ ],
120
+ [
121
+ 0.6446888418721903,
122
+ 0.6542470848373093,
123
+ 0.6784547342679887
124
+ ],
125
+ [
126
+ 0.6536089899652291,
127
+ 0.6642535906187527,
128
+ 0.6903030060972842
129
+ ],
130
+ [
131
+ 0.6592154839900152,
132
+ 0.6713800143196945,
133
+ 0.6973410641959824
134
+ ],
135
+ [
136
+ 0.6634331439178027,
137
+ 0.6763586255268277,
138
+ 0.7030048576325022
139
+ ],
140
+ [
141
+ 0.6656399893511028,
142
+ 0.6792695619048873,
143
+ 0.7064553421205251
144
+ ],
145
+ [
146
+ 0.6672888201568763,
147
+ 0.6807850318429358,
148
+ 0.7081656861679716
149
+ ],
150
+ [
151
+ 0.6675708861875285,
152
+ 0.6810908585942853,
153
+ 0.7084435948526672
154
+ ]
155
+ ],
156
+ "train_fve_base_by_layer": [
157
+ [
158
+ 0.5129442294299498,
159
+ 0.5156938191818621,
160
+ 0.5360986467905373
161
+ ],
162
+ [
163
+ 0.6431764893133739,
164
+ 0.6530787017494353,
165
+ 0.67743396191867
166
+ ],
167
+ [
168
+ 0.6618652313129983,
169
+ 0.6743674225718114,
170
+ 0.700538551466408
171
+ ],
172
+ [
173
+ 0.6724326563939252,
174
+ 0.6864327243107908,
175
+ 0.7137364810555564
176
+ ],
177
+ [
178
+ 0.6801178986331697,
179
+ 0.6953849959846693,
180
+ 0.7240032671281489
181
+ ],
182
+ [
183
+ 0.6860084016978636,
184
+ 0.7023456112307133,
185
+ 0.7324540658576127
186
+ ],
187
+ [
188
+ 0.6901865686092611,
189
+ 0.707262370611448,
190
+ 0.7383057537025969
191
+ ],
192
+ [
193
+ 0.6925097409564016,
194
+ 0.7099300534907518,
195
+ 0.7413503145587006
196
+ ]
197
+ ],
198
+ "train_fve_aligned_by_layer": [
199
+ [
200
+ 0.5080299372222086,
201
+ 0.5100825478372278,
202
+ 0.5305461785073734
203
+ ],
204
+ [
205
+ 0.6369982385802394,
206
+ 0.6462390441262645,
207
+ 0.6696701796167578
208
+ ],
209
+ [
210
+ 0.6540195649295123,
211
+ 0.6656472806476809,
212
+ 0.6905705887743833
213
+ ],
214
+ [
215
+ 0.6633175232573274,
216
+ 0.6766819278886286,
217
+ 0.7025843324608366
218
+ ],
219
+ [
220
+ 0.6700740303133081,
221
+ 0.6847399561222853,
222
+ 0.7121709983945936
223
+ ],
224
+ [
225
+ 0.6753198687472124,
226
+ 0.6911622862055924,
227
+ 0.7202661430981865
228
+ ],
229
+ [
230
+ 0.679051866255792,
231
+ 0.6957064838139411,
232
+ 0.725870660641565
233
+ ],
234
+ [
235
+ 0.6811704615786698,
236
+ 0.6981789980168518,
237
+ 0.7287727255467706
238
+ ]
239
+ ],
240
+ "dead_neurons": [
241
+ 0.8832501577690757,
242
+ 0.9564017257144837,
243
+ 0.9614509329834808,
244
+ 0.9621241365130436,
245
+ 0.961682792024451,
246
+ 0.961177955297302,
247
+ 0.9608592282452224,
248
+ 0.9607996527590532
249
+ ],
250
+ "l0_base": [
251
+ 120.13765799393796,
252
+ 71.49038236432095,
253
+ 70.5635059042695,
254
+ 71.62221756471365,
255
+ 73.03186058316116,
256
+ 74.30840404842307,
257
+ 75.1019739261233,
258
+ 75.38008013901764
259
+ ],
260
+ "l0_aligned": [
261
+ 118.25455218313455,
262
+ 69.83914911879202,
263
+ 69.18486336275491,
264
+ 70.40499982149088,
265
+ 71.87874234272455,
266
+ 73.09848961771682,
267
+ 73.85036860442064,
268
+ 74.12330600273101
269
+ ],
270
+ "l0_base_by_layer": [
271
+ [
272
+ 110.42726576182136,
273
+ 118.15745402802102,
274
+ 131.82824357851723
275
+ ],
276
+ [
277
+ 60.61651707530648,
278
+ 70.74379743140689,
279
+ 83.11082530647985
280
+ ],
281
+ [
282
+ 59.78093987157035,
283
+ 70.1333917104495,
284
+ 81.77617848803268
285
+ ],
286
+ [
287
+ 60.539860624635146,
288
+ 71.36715557501459,
289
+ 82.95962857559836
290
+ ],
291
+ [
292
+ 61.52302247518973,
293
+ 72.9452714535902,
294
+ 84.62728035610041
295
+ ],
296
+ [
297
+ 62.357304436660826,
298
+ 74.39871570344425,
299
+ 86.16918417980152
300
+ ],
301
+ [
302
+ 62.89430093403386,
303
+ 75.2756129597198,
304
+ 87.13600043782837
305
+ ],
306
+ [
307
+ 63.096559398715705,
308
+ 75.58358873321657,
309
+ 87.46008464681844
310
+ ]
311
+ ],
312
+ "l0_aligned_by_layer": [
313
+ [
314
+ 108.76663747810858,
315
+ 116.1836325160537,
316
+ 129.81337565674255
317
+ ],
318
+ [
319
+ 59.149591360186804,
320
+ 69.2851539696439,
321
+ 81.08269483362523
322
+ ],
323
+ [
324
+ 58.44850043782837,
325
+ 68.96836690017513,
326
+ 80.13771526561588
327
+ ],
328
+ [
329
+ 59.264685493286635,
330
+ 70.46750948628137,
331
+ 81.48279699357852
332
+ ],
333
+ [
334
+ 60.21510143023934,
335
+ 72.08517586106247,
336
+ 83.3359420607122
337
+ ],
338
+ [
339
+ 60.98376386456509,
340
+ 73.48206727962639,
341
+ 84.82963003502627
342
+ ],
343
+ [
344
+ 61.47834573847052,
345
+ 74.30782983070637,
346
+ 85.76492265032107
347
+ ],
348
+ [
349
+ 61.64835084646818,
350
+ 74.61715557501459,
351
+ 86.1044038237011
352
+ ]
353
+ ],
354
+ "val_l0_base": [
355
+ 74.9518095485827,
356
+ 72.04285087385726,
357
+ 71.85811411273417,
358
+ 73.11738550475755,
359
+ 74.9007987376907,
360
+ 75.9018712867617,
361
+ 76.71053545886933,
362
+ 76.88609194131422
363
+ ],
364
+ "val_l0_aligned": [
365
+ 73.2166620624003,
366
+ 70.44959171155361,
367
+ 70.45259094238281,
368
+ 72.05052217014173,
369
+ 73.63809156168193,
370
+ 74.55457613480652,
371
+ 75.40545250857687,
372
+ 75.5361974426589
373
+ ],
374
+ "val_l0_base_by_layer": [
375
+ [
376
+ 63.72376746532181,
377
+ 74.18455497382199,
378
+ 86.94709861715427
379
+ ],
380
+ [
381
+ 61.39261563286107,
382
+ 71.40614095158602,
383
+ 83.32978840773018
384
+ ],
385
+ [
386
+ 61.2749781983061,
387
+ 70.88192629190016,
388
+ 83.41743021860172
389
+ ],
390
+ [
391
+ 61.58027924542652,
392
+ 73.10269416689249,
393
+ 84.66917539267016
394
+ ],
395
+ [
396
+ 62.88618019862949,
397
+ 75.00589005235602,
398
+ 86.81031851244222
399
+ ],
400
+ [
401
+ 63.25599914071447,
402
+ 76.11076570680628,
403
+ 88.33884162303664
404
+ ],
405
+ [
406
+ 64.0468477378965,
407
+ 76.84511346467502,
408
+ 89.23963788416998
409
+ ],
410
+ [
411
+ 64.24105585307976,
412
+ 77.04439356938707,
413
+ 89.37281851244222
414
+ ]
415
+ ],
416
+ "val_l0_aligned_by_layer": [
417
+ [
418
+ 62.179482984293195,
419
+ 72.61747382198953,
420
+ 84.85302139202338
421
+ ],
422
+ [
423
+ 59.61180192637818,
424
+ 70.26423429319372,
425
+ 81.47273126572215
426
+ ],
427
+ [
428
+ 59.49803664921466,
429
+ 70.10842060668307,
430
+ 81.75130890052355
431
+ ],
432
+ [
433
+ 60.45980585307975,
434
+ 72.25725351703105,
435
+ 83.434500449615
436
+ ],
437
+ [
438
+ 61.48102094240838,
439
+ 73.94295377881114,
440
+ 85.49029233443176
441
+ ],
442
+ [
443
+ 61.75714442867259,
444
+ 74.9690227009239,
445
+ 86.93755456415147
446
+ ],
447
+ [
448
+ 62.58933246073298,
449
+ 75.80775523560209,
450
+ 87.81926267933471
451
+ ],
452
+ [
453
+ 62.69873474280872,
454
+ 75.9856566583923,
455
+ 87.92419285799197
456
+ ]
457
+ ],
458
+ "self_recon": [
459
+ 0.03777978854700793,
460
+ 0.027052706861036672,
461
+ 0.025448514492198242,
462
+ 0.02454077672176346,
463
+ 0.02385480677675275,
464
+ 0.02330240188954834,
465
+ 0.022913188621906323,
466
+ 0.02270352855320396
467
+ ],
468
+ "cross_recon": [
469
+ 0.03782143883333176,
470
+ 0.027059119352413005,
471
+ 0.025455742043601234,
472
+ 0.02455789306061315,
473
+ 0.02389682638331143,
474
+ 0.023368101301714475,
475
+ 0.022991156307746636,
476
+ 0.022785278416678562
477
+ ],
478
+ "sparsity": [
479
+ 0.023822688434807986,
480
+ 0.005265172788959216,
481
+ 0.0054389986500983445,
482
+ 0.005584087701795989,
483
+ 0.0057119209736898775,
484
+ 0.005816767561171689,
485
+ 0.005884397146713382,
486
+ 0.005909877130625823
487
+ ],
488
+ "val_self_recon": [
489
+ 0.028549473235354374,
490
+ 0.026422248499867802,
491
+ 0.025580557531321237,
492
+ 0.025051593956054816,
493
+ 0.024642081082803417,
494
+ 0.02439956735644041,
495
+ 0.024270332832845094,
496
+ 0.024245827668468365
497
+ ],
498
+ "val_cross_recon": [
499
+ 0.028555416477678335,
500
+ 0.02642720296042752,
501
+ 0.02558600322309277,
502
+ 0.02506005116470197,
503
+ 0.024656025887861924,
504
+ 0.0244177212324286,
505
+ 0.024290522209402778,
506
+ 0.02426636498909034
507
+ ],
508
+ "val_sparsity": [
509
+ 0.005231669019049531,
510
+ 0.005410865241788445,
511
+ 0.0054814473564946216,
512
+ 0.005573431137188567,
513
+ 0.00567762855274593,
514
+ 0.005746121440297334,
515
+ 0.0058087650418320565,
516
+ 0.0058244062753404
517
+ ],
518
+ "layers": [
519
+ 12,
520
+ 13,
521
+ 14
522
+ ],
523
+ "topk_mode": "model_balanced_layer_agg"
524
+ }
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/aligned_decoder_norm_heatmap.png ADDED

Git LFS Details

  • SHA256: 0b785c0789235e0e09021152111abe2883320037492c285cb1cf161898bf4426
  • Pointer size: 131 Bytes
  • Size of remote file: 194 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/base_decoder_norm_heatmap.png ADDED

Git LFS Details

  • SHA256: 7963e169c56dc9770c48ebefcc48cf8e0fb74093580fa70215efb62ed9e1325b
  • Pointer size: 131 Bytes
  • Size of remote file: 193 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/cf_shift_by_layer.png ADDED

Git LFS Details

  • SHA256: 5bb386efba3e01de7132c86769c5ab2467c86e965676e39b6804d118f6f6e947
  • Pointer size: 130 Bytes
  • Size of remote file: 44 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/cf_shift_p95_by_layer.png ADDED

Git LFS Details

  • SHA256: 1b98235767fb49dc13db68afb0d9b4c78927e833808be066213cd3f90714e32d
  • Pointer size: 131 Bytes
  • Size of remote file: 116 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/class_distribution_multilayer.png ADDED

Git LFS Details

  • SHA256: ff6c710cc3fb0f7f7b4dd3b91cbd73c7516f52833d1434f366486918b81e7ccf
  • Pointer size: 130 Bytes
  • Size of remote file: 83.8 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/class_distribution_primary.png ADDED

Git LFS Details

  • SHA256: 6450e25fffc381fdd660003367ca532e23cb407b552824d0810da1e6cbb5e414
  • Pointer size: 131 Bytes
  • Size of remote file: 146 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/cross_layer_cosine_drift_by_stream.png ADDED

Git LFS Details

  • SHA256: d9a5495cabc23385c43815cd409e968b6155c052712e4add89302f66e868c10e
  • Pointer size: 130 Bytes
  • Size of remote file: 54.7 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/decoder_norm_ratio_by_layer.png ADDED

Git LFS Details

  • SHA256: 95a6ee52212b466029370571e30669187732f4a225cf249ee6b5be1eb76614a0
  • Pointer size: 130 Bytes
  • Size of remote file: 50.2 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/feature_layer_trajectories.png ADDED

Git LFS Details

  • SHA256: b50250e7ab757910a1afec29c7b7b91f669a54518082bb8d0fefd85b0f0c78ac
  • Pointer size: 131 Bytes
  • Size of remote file: 609 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/feature_sharing_ratio_by_layer.png ADDED

Git LFS Details

  • SHA256: 79372f07321e52568481e022c394f15359ead5e999bd541279f0931175d2ac69
  • Pointer size: 130 Bytes
  • Size of remote file: 62.3 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/fve_by_layer.png ADDED

Git LFS Details

  • SHA256: b55d5c6286ab990ae0871f45d91ea5ab804868ca6b5f4fd4f7369462e97fa405
  • Pointer size: 131 Bytes
  • Size of remote file: 104 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/l0_by_layer.png ADDED

Git LFS Details

  • SHA256: 705f648cc6291e9ef79862fdf65a3fb9e6bc7640e8de25d608d71dff7476b781
  • Pointer size: 130 Bytes
  • Size of remote file: 91.6 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/layer_concentration_entropy.png ADDED

Git LFS Details

  • SHA256: 312d9cb7fa53e91947d5e5858294916964b0595d05ac8c0e56b68203c824cd5a
  • Pointer size: 131 Bytes
  • Size of remote file: 107 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/loss_curves.png ADDED

Git LFS Details

  • SHA256: c4e6fa4dfe3a8537641f5d3139ce0130686b2554b04827d8bffbcecc19a5739d
  • Pointer size: 131 Bytes
  • Size of remote file: 327 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/max_norm_layer_migration.png ADDED

Git LFS Details

  • SHA256: 87135a29f45f35bbe7cdc9119b42d5471a261be7b413c570f74bb2a0ff50f1a6
  • Pointer size: 130 Bytes
  • Size of remote file: 80.1 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/rho_histogram_by_layer.png ADDED

Git LFS Details

  • SHA256: fcebb92a72b5be882bcfd1f347ec756a05a1019895002a2e43b7fac4a9a5825a
  • Pointer size: 130 Bytes
  • Size of remote file: 55.3 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/rho_theta_scatter_by_layer.png ADDED

Git LFS Details

  • SHA256: ee8823cc50c293947c198259274ddad737015da8bff88e8d6e095f617e539d5d
  • Pointer size: 132 Bytes
  • Size of remote file: 1.17 MB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/superposition_by_layer.png ADDED

Git LFS Details

  • SHA256: fc8f4a0305ed1c9069347275b1244a1f8a55ed5a755eb3d3a261ac501c52b2a0
  • Pointer size: 131 Bytes
  • Size of remote file: 107 kB
v2-samelayer/assembled_activations/llama32-3b-dpo/plots/theta_by_layer.png ADDED

Git LFS Details

  • SHA256: 1553d348e4ea95b02e7551fd2cd2d9f6fb83b59cf8d0b0d5b0adfe5d2c8610d3
  • Pointer size: 130 Bytes
  • Size of remote file: 51.6 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/checkpoints/final.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab5c226343b2042cb69e8cba11d4f0ff6753f3d104d3d8a1a036e8e217e58153
3
+ size 10873439922
v2-samelayer/assembled_activations/llama32-3b-grpo/features/cross_layer_cosine_drift.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da7045e64bece542f6164c96fe5d4646c516a3f92c58101857158273d5197575
3
+ size 27244726
v2-samelayer/assembled_activations/llama32-3b-grpo/features/decoder_layer_profiles.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7e8c1f429a3bfda239e53e7c0638b75f2511ed00bf7556ed10bed296bb20867
3
+ size 13037100
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/aligned_decoder_norm_heatmap.png ADDED

Git LFS Details

  • SHA256: f179fead9df779fc48f1573c2c8935981a29cbfff6f8d8d718ff48aa8df82c89
  • Pointer size: 131 Bytes
  • Size of remote file: 188 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/base_decoder_norm_heatmap.png ADDED

Git LFS Details

  • SHA256: bfd400890bbf5fa81872f9292127c79bbe7e37c53425f8f94ddab09c91ffa1af
  • Pointer size: 131 Bytes
  • Size of remote file: 187 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/cf_shift_by_layer.png ADDED

Git LFS Details

  • SHA256: 5bb386efba3e01de7132c86769c5ab2467c86e965676e39b6804d118f6f6e947
  • Pointer size: 130 Bytes
  • Size of remote file: 44 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/cf_shift_p95_by_layer.png ADDED

Git LFS Details

  • SHA256: 2f3b143076d09c6c52194fecb5886c0e0f1e098b84123d39c5c86d8f8cb074f5
  • Pointer size: 131 Bytes
  • Size of remote file: 115 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/class_distribution_multilayer.png ADDED

Git LFS Details

  • SHA256: 7c559df4d6cd1e545ff36ab23fd197f9fa87447acfb963436384f9f623a306b9
  • Pointer size: 130 Bytes
  • Size of remote file: 83.8 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/class_distribution_primary.png ADDED

Git LFS Details

  • SHA256: b2c54e345977052040bd492700c2efbfc3faa119ba84c6cdf0c4909996b99c1a
  • Pointer size: 131 Bytes
  • Size of remote file: 146 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/cross_layer_cosine_drift_by_stream.png ADDED

Git LFS Details

  • SHA256: dba3d52fd2c05d42436711a636ec7fec8017f8330e5f53518818204a15237b9d
  • Pointer size: 130 Bytes
  • Size of remote file: 54.8 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/decoder_norm_ratio_by_layer.png ADDED

Git LFS Details

  • SHA256: 2515fba00de16bea47142354e608726d523e141e83683d72869fbf00e3503fb7
  • Pointer size: 130 Bytes
  • Size of remote file: 48.3 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/feature_layer_trajectories.png ADDED

Git LFS Details

  • SHA256: 4a73b77474e8fbf6a78d6cbd0695a5d67f990f1cc292e6031ee8de43627f3547
  • Pointer size: 131 Bytes
  • Size of remote file: 546 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/feature_sharing_ratio_by_layer.png ADDED

Git LFS Details

  • SHA256: dcafa9ca1fa1e781a7d60ecf8502db14008eac8b7c2678709237c2f5b3a9b06e
  • Pointer size: 130 Bytes
  • Size of remote file: 57.6 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/fve_by_layer.png ADDED

Git LFS Details

  • SHA256: 93571c4873c5d7f84f476cd5c6582db96691d24abb6e345facc4d486c30b28ef
  • Pointer size: 131 Bytes
  • Size of remote file: 104 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/l0_by_layer.png ADDED

Git LFS Details

  • SHA256: c2fc980a2f0f5dcc8ba4a644636fb766d284cf60df2ebf64917bfac75ee4d897
  • Pointer size: 130 Bytes
  • Size of remote file: 92.5 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/layer_concentration_entropy.png ADDED

Git LFS Details

  • SHA256: 92f0be3690bcd9243a1f980183b136cd5cbddd0f6e2b504eb3bfa2bac984c79e
  • Pointer size: 131 Bytes
  • Size of remote file: 107 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/loss_curves.png ADDED

Git LFS Details

  • SHA256: 739a40dc8fd23f021c9b72ea77bfad3db88f584c768cd77d7af59920e72f520e
  • Pointer size: 131 Bytes
  • Size of remote file: 323 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/max_norm_layer_migration.png ADDED

Git LFS Details

  • SHA256: 6fa23c28164671245154f076e84ff9cd09b4cf932b309e41403733bc1440b289
  • Pointer size: 130 Bytes
  • Size of remote file: 81.2 kB
v2-samelayer/assembled_activations/llama32-3b-grpo/plots/rho_histogram_by_layer.png ADDED

Git LFS Details

  • SHA256: f278e30528e6592094a892b88b16e4d1a05426c7a4d6694829eb14d121193fa8
  • Pointer size: 130 Bytes
  • Size of remote file: 55.2 kB