huypn16's picture
Update layers.8/cfg.json
c1d312b verified
raw
history blame contribute delete
563 Bytes
{
"architecture": "topk",
"hook_name": "blocks.8.hook_resid_post",
"hook_layer": 8,
"layer": 8,
"k": 32,
"activation_fn_str": "relu",
"d_sae": 65536,
"d_in": 2048,
"multi_topk": false,
"device": "cuda",
"apply_b_dec_to_input": false,
"finetuning_scaling_factor": false,
"context_size": 1024,
"hook_head_index": null,
"prepend_bos": true,
"normalize_activations": "none",
"dtype": "float32",
"sae_lens_training_version": "eleuther",
"neuronpedia_id": null,
"activation_fn_kwargs": {},
"model_from_pretrained_kwargs": {}
}