norabelrose commited on
Commit
c0d4d78
·
verified ·
1 Parent(s): ca210ff

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"sae": {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false}, "batch_size": 4, "grad_acc_steps": 2, "micro_acc_steps": 1, "lr": null, "lr_warmup_steps": 1000, "auxk_alpha": 0.0, "dead_feature_threshold": 10000000, "hookpoints": ["layers.0.mlp", "layers.1.mlp", "layers.2.mlp", "layers.3.mlp", "layers.4.mlp", "layers.5.mlp", "layers.6.mlp", "layers.7.mlp", "layers.8.mlp", "layers.9.mlp", "layers.10.mlp", "layers.11.mlp", "layers.12.mlp", "layers.13.mlp", "layers.14.mlp", "layers.15.mlp"], "init_seeds": [0], "layers": [], "layer_stride": 1, "transcode": false, "distribute_modules": true, "save_every": 1000, "log_to_wandb": true, "run_name": null, "wandb_log_frequency": 1, "model": "meta-llama/Llama-3.2-1B", "dataset": "EleutherAI/rpj-v2-sample", "split": "train", "ctx_len": 2048, "revision": null, "load_in_8bit": false, "max_examples": null, "resume": false, "text_column": "raw_content", "finetune": null, "shuffle_seed": 42, "data_preprocessing_num_proc": 112}
layers.0.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.0.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a96d55b34d4918555af141185814f59b8a318941869a47be39c6503b42c1215
3
+ size 2148016472
layers.1.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.1.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c99dc1dfc2272d614d43a25840d61192309fb5d9ddf7c32efeffb4f23c17762a
3
+ size 2148016472
layers.10.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.10.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77a7b946e2df8a60640dbf47d2c7bec7baa0b419a00819af6ae851ff6e8eacd6
3
+ size 2148016472
layers.11.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.11.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07b5d33e9c5846fe9c2365b14d99d82d7b5c7d10d5740b452fda03b524118049
3
+ size 2148016472
layers.12.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.12.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aed25e161b72b18c8fc985ca86907cd69eecaa49dfdf75649c15d3d95cbc351d
3
+ size 2148016472
layers.13.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.13.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dcfd74a71c891a23a47939eea482a8fdc45bd9179933460abe2098421fb525d
3
+ size 2148016472
layers.14.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.14.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd8855af21ca11964bdd27170dcf19ded2586bda052352289c92f0431eb0f8cf
3
+ size 2148016472
layers.15.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.15.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4938369aafeebc1ed05ab2120fd107a1d1bd4c82d4fdf53bc6dd67dbaff7beaf
3
+ size 2148016472
layers.2.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.2.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e51b440cc123ccdaa47ab3e8f9449db82f0ddee77463f94bf7ffc6570df9b1a8
3
+ size 2148016472
layers.3.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.3.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd7c9d4318998f93f32cdd1f00e7f6e3a0b281cac42de131ee28ad1a96f1a1ff
3
+ size 2148016472
layers.4.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.4.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ee83109c3730d5f447c3abdccd3edf600f363ff62717a2d053173aa28632207
3
+ size 2148016472
layers.5.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.5.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8158c97f62526f737204761c20ea42a0b267f2e922054df2f50d0340f1dc92f9
3
+ size 2148016472
layers.6.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.6.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8bc4655874add8a23ca298b12d08569d35004541c591b1023fd443104d50f85
3
+ size 2148016472
layers.7.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.7.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48caa0710540f66dea5351d0b9f2bb3e7e7bcc208d393065242d8ef0754d8436
3
+ size 2148016472
layers.8.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.8.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0aed46f99816bdff5ebb22304dcf2379effbcd9aad6589aa4226db07f5b90928
3
+ size 2148016472
layers.9.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": false, "d_in": 2048}
layers.9.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4175ee29b3f86075f2c1706777a6cd1eb156dcbe40037cb8e29c2f9d30ad6338
3
+ size 2148016472