File size: 563 Bytes
c1d312b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
  "architecture": "topk",
  "hook_name": "blocks.8.hook_resid_post",
  "hook_layer": 8,
  "layer": 8,
  "k": 32,
  "activation_fn_str": "relu",
  "d_sae": 65536,
  "d_in": 2048, 
  "multi_topk": false,
  "device": "cuda",
  "apply_b_dec_to_input": false,
  "finetuning_scaling_factor": false,
  "context_size": 1024,
  "hook_head_index": null,
  "prepend_bos": true,
  "normalize_activations": "none",
  "dtype": "float32",
  "sae_lens_training_version": "eleuther",
  "neuronpedia_id": null,
  "activation_fn_kwargs": {},
  "model_from_pretrained_kwargs": {}
}