File size: 976 Bytes
7ad5729
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
{
    "seed": 42,
    "model_name_or_path": "EleutherAI/pythia-70m-deduped",
    "hook_point": "gpt_neox.layers.3",
    "dataset_name_or_path": "jbrinkma/pile-300k",
    "activation_size": -1,
    "add_bos_token": false,
    "expansion_factor": 4,
    "b_dec_init_method": "",
    "n_steps": -1,
    "device": "cuda",
    "batch_size": 32,
    "ctx_length": 256,
    "lr": 0.001,
    "min_lr": 0.0,
    "lr_warmup_steps": 5000,
    "sparsity_coefficient": 0.0012642857142857143,
    "evaluation_interval": 200,
    "beta1": 0.9,
    "beta2": 0.999,
    "l1_sqrt": true,
    "cos_sim_reg": false,
    "cos_sim_alpha": 0.0,
    "n_tokens_in_feature_cache": 500000.0,
    "use_ghost_grads": false,
    "output_dir": "outputs",
    "cache_dir": "cache",
    "checkpoint_interval": 200,
    "use_wandb": true,
    "wandb_entity": "best_sae",
    "wandb_project": "best_sae",
    "wandb_name": "sparsity_coefficient_0.001264_20240311220027664677",
    "wandb_group": "L1(Sqrt)_v2"
}