{ "architecture": "standard", "d_in": 4096, "d_sae": 131072, "activation_fn_str": "topk", "activation_fn_kwargs": { "k": 128 }, "apply_b_dec_to_input": true, "normalize_activations": "layer_norm", "finetuning_scaling_factor": false, "dtype": "float32", "context_size": 256, "model_name": "mistral-7b-instruct", "hook_name": "blocks.16.hook_mlp_out", "hook_layer": 16, "hook_head_index": null, "prepend_bos": false, "dataset_path": "monology/pile-uncopyrighted", "dataset_trust_remote_code": false }