Jacob Dunefsky commited on
Commit
01a805b
·
1 Parent(s): 71e425d

tc10 through tc19 (inclusive)

Browse files
pythia-2.8B-dun-mlp-tc10/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc10/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8af3401b142ceefb2e13b884d606d2b4c4d8bd304b6b09668f830a5a4a65f742
3
+ size 629273928
pythia-2.8B-dun-mlp-tc10/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc10", "input_layer": {"layer": 10, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 10, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc10/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc11/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc11/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b487c4177603add3d3f4b13407ef9dd871c2f0087ba32d4741e38b24740342d7
3
+ size 629273928
pythia-2.8B-dun-mlp-tc11/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc11", "input_layer": {"layer": 11, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 11, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc11/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc12/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc12/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:180a7a0d63bb81fdaac3b78698870a4700303a16a8a796981b1eed62a3c1570c
3
+ size 629273928
pythia-2.8B-dun-mlp-tc12/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc12", "input_layer": {"layer": 12, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 12, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc12/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc13/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc13/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb9c87d70dac3d347526535333140178d0563ff92514f917b1b8cdfa4598bd9a
3
+ size 629273928
pythia-2.8B-dun-mlp-tc13/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc13", "input_layer": {"layer": 13, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 13, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc13/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc14/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc14/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e27d71cbb80e9916ceab6072658bbeec2db50b4e51376ae9b145f701362a24e3
3
+ size 629273928
pythia-2.8B-dun-mlp-tc14/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc14", "input_layer": {"layer": 14, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 14, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc14/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc15/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc15/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c8b258f2f90515ce50c9a24ab82abf647cdac25de371dea00d57a921bc2ab88
3
+ size 629273928
pythia-2.8B-dun-mlp-tc15/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc15", "input_layer": {"layer": 15, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 15, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc15/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc16/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc16/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ddf99d1cee0c9befdcc30f5fa74eac4512523c0ab664f2bb04531cbc3c8a9d6
3
+ size 629273928
pythia-2.8B-dun-mlp-tc16/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc16", "input_layer": {"layer": 16, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 16, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc16/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc17/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc17/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0555867cbf00443bce64e5416ccb337ea1465c41a29b51eba00806601a7a3107
3
+ size 629273928
pythia-2.8B-dun-mlp-tc17/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc17", "input_layer": {"layer": 17, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 17, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc17/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc18/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc18/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45c4785bd2b459c5919561e739944d07d47f3323798d06100c7d2c0c585edcdb
3
+ size 629273928
pythia-2.8B-dun-mlp-tc18/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc18", "input_layer": {"layer": 18, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 18, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc18/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}
pythia-2.8B-dun-mlp-tc19/sae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"d_in": 2560, "num_features": 61440, "d_out": 2560, "dtype": "torch.float32", "act_fn": "top_k", "top_k": 60, "tensors_filename": "sae.safetensors"}
pythia-2.8B-dun-mlp-tc19/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92a4fc94e8d1d6911c3d1c4f264e7a59ae7349d2d0a70720d971d66b5c229aca
3
+ size 629273928
pythia-2.8B-dun-mlp-tc19/sae_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "pythia-2.8B-dun-mlp-tc19", "input_layer": {"layer": 19, "sublayer": "mlp_in", "parallel_attn_mlp": true}, "output_layer": {"layer": 19, "sublayer": "mlp_out", "parallel_attn_mlp": true}, "short_name": null, "model_path": "pythia-2.8B", "upstreams": [{"path_type": "huggingface", "hf_repo_id": "jacobdunefsky/pythia-2.8B-transcoders", "hf_path": "pythia-2.8B-dun-mlp-tc19/sae_info.json", "absolute_path": null, "relative_path": null}], "sae_filename": "sae.json"}