{"seed": 49, "batch_size": 4096, "buffer_mult": 384, "lr": 0.0012, "num_tokens": 2000000000, "l1_coeff": 1.1, "beta1": 0.9, "beta2": 0.99, "dict_mult": 32, "seq_len": 128, "enc_dtype": "fp32", "model_name": "gpt2-small", "site": "z", "layer": 4, "device": "cuda", "reinit": "reinit", "head": "cat", "concat_heads": true, "resample_scheme": "anthropic", "anthropic_neuron_resample_scale": 0.2, "dead_direction_cutoff": 1e-06, "re_init_every": 25000, "anthropic_resample_last": 12500, "resample_factor": 0.01, "num_resamples": 4, "wandb_project_name": "gpt2-L4-20240118", "wandb_entity": "ckkissane", "save_state_dict_every": 50000, "b_dec_init": "zeros", "sched_type": "cosine_warmup", "sched_epochs": 1000, "sched_lr_factor": 0.1, "sched_warmup_epochs": 1000, "sched_finish": true, "anthropic_resample_batches": 100, "eval_every": 1000, "model_batch_size": 512, "buffer_size": 1572864, "buffer_batches": 12288, "act_name": "blocks.4.attn.hook_z", "act_size": 768, "dict_size": 24576, "name": "gpt2-small_4_24576_z"}