huypn16 commited on
Commit
c1d312b
1 Parent(s): dac6ddb

Update layers.8/cfg.json

Browse files
Files changed (1) hide show
  1. layers.8/cfg.json +23 -1
layers.8/cfg.json CHANGED
@@ -1 +1,23 @@
1
- {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "d_in": 2048}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architecture": "topk",
3
+ "hook_name": "blocks.8.hook_resid_post",
4
+ "hook_layer": 8,
5
+ "layer": 8,
6
+ "k": 32,
7
+ "activation_fn_str": "relu",
8
+ "d_sae": 65536,
9
+ "d_in": 2048,
10
+ "multi_topk": false,
11
+ "device": "cuda",
12
+ "apply_b_dec_to_input": false,
13
+ "finetuning_scaling_factor": false,
14
+ "context_size": 1024,
15
+ "hook_head_index": null,
16
+ "prepend_bos": true,
17
+ "normalize_activations": "none",
18
+ "dtype": "float32",
19
+ "sae_lens_training_version": "eleuther",
20
+ "neuronpedia_id": null,
21
+ "activation_fn_kwargs": {},
22
+ "model_from_pretrained_kwargs": {}
23
+ }