tommmcgrath
commited on
Commit
•
8304330
1
Parent(s):
bd1c3e1
Upload folder using huggingface_hub
Browse files
sae_group_gpt2_blocks.1.hook_mlp_out_24576:v0/cfg.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"model_name": "gpt2", "model_class_name": "HookedTransformer", "hook_point": "blocks.1.hook_mlp_out", "hook_point_eval": "blocks.{layer}.attn.pattern", "hook_point_layer": 1, "hook_point_head_index": null, "dataset_path": "apollo-research/Skylion007-openwebtext-tokenizer-gpt2", "streaming": true, "is_dataset_tokenized": true, "context_size": 512, "use_cached_activations": false, "cached_activations_path": null, "d_in": 768, "d_sae": 24576, "b_dec_init_method": "zeros", "expansion_factor": 32, "activation_fn": "relu", "normalize_sae_decoder": false, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": false, "decoder_orthogonal_init": false, "decoder_heuristic_init": true, "init_encoder_as_decoder_transpose": true, "n_batches_in_buffer": 64, "training_tokens": 819200000, "finetuning_tokens": 0, "store_batch_size_prompts": 32, "train_batch_size_tokens": 4096, "normalize_activations": true, "device": "cuda", "seed": 42, "dtype": "torch.float32", "prepend_bos": true, "autocast": true, "compile_llm": true, "llm_compilation_mode": null, "compile_sae": true, "sae_compilation_mode": null, "adam_beta1": 0.9, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 5, "lp_norm": 1.0, "scale_sparsity_penalty_by_decoder_norm": true, "l1_warm_up_steps": 10000, "lr": 5e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 5e-06, "lr_decay_steps": 0, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 1000, "dead_feature_window": 1000, "dead_feature_threshold": 0.0001, "n_eval_batches": 40, "eval_batch_size_prompts": 4, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "gpt-2-mlp-outs-reduced-d_sae", "wandb_id": null, "run_name": "24576-L1-5-LR-5e-05-Tokens-8.192e+08", "wandb_entity": null, "wandb_log_frequency": 50, "eval_every_n_wandb_logs": 10, "resume": false, "n_checkpoints": 0, "checkpoint_path": "checkpoints/jkjl04jz/x0nryzbx/b9uspgb2", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {}, "sae_lens_version": "2.1.3", "sae_lens_training_version": "2.1.3", "tokens_per_buffer": 134217728}
|
sae_group_gpt2_blocks.1.hook_mlp_out_24576:v0/sae_weights.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:65e6740f5548afa990185b838b5cf141d71d2ee11604b3fc8f4ece25325c4643
|
3 |
+
size 151195024
|
sae_group_gpt2_blocks.1.hook_mlp_out_24576:v0/sparsity.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3c6de4c7d3faa1e9471500119fb3e4dbaea0dc7cf6ffcb242c58b10516fc3ae3
|
3 |
+
size 98384
|