Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- models/csp_bridges2/beeg_config.json +1 -0
- models/csp_sweep1_1x_0.9Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_1x_0.9Mnonzero_afrac0.125/beeg_config.json +1 -0
- models/csp_sweep1_1x_0.9Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_1x_0.9Mnonzero_afrac0.500/beeg_config.json +1 -0
- models/csp_sweep1_1x_1.9Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_1x_1.9Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_sweep1_1x_3.7Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_sweep1_1x_7.4Mnonzero_afrac0.125/beeg_config.json +1 -0
- models/csp_sweep1_2x_0.9Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_2x_1.9Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_2x_1.9Mnonzero_afrac0.125/beeg_config.json +1 -0
- models/csp_sweep1_2x_1.9Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_2x_1.9Mnonzero_afrac0.500/beeg_config.json +1 -0
- models/csp_sweep1_2x_14.8Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_2x_14.8Mnonzero_afrac0.125/beeg_config.json +1 -0
- models/csp_sweep1_2x_14.8Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_2x_14.8Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_sweep1_2x_3.7Mnonzero_afrac0.500/beeg_config.json +1 -0
- models/csp_sweep1_2x_7.4Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_2x_7.4Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_4x_0.9Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_4x_0.9Mnonzero_afrac0.125/beeg_config.json +1 -0
- models/csp_sweep1_4x_0.9Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_4x_0.9Mnonzero_afrac0.500/beeg_config.json +1 -0
- models/csp_sweep1_4x_1.9Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_4x_1.9Mnonzero_afrac0.500/beeg_config.json +1 -0
- models/csp_sweep1_4x_14.8Mnonzero_afrac0.500/beeg_config.json +1 -0
- models/csp_sweep1_4x_14.8Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_sweep1_4x_3.7Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_4x_7.4Mnonzero_afrac0.062/beeg_config.json +1 -0
- models/csp_sweep1_4x_7.4Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_4x_7.4Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_sweep1_8x_1.9Mnonzero_afrac0.500/final_model.pt +0 -0
- models/csp_sweep1_8x_1.9Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_sweep1_8x_14.8Mnonzero_afrac0.250/beeg_config.json +1 -0
- models/csp_sweep1_8x_3.7Mnonzero_afrac0.125/beeg_config.json +1 -0
- models/csp_sweep1_8x_3.7Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_sweep1_8x_7.4Mnonzero_afrac0.500/beeg_config.json +1 -0
- models/csp_sweep1_8x_7.4Mnonzero_afrac1.000/beeg_config.json +1 -0
- models/csp_yolo2/beeg_config.json +1 -0
- models/dense1_4x/beeg_config.json +1 -0
- train_curves/csp_bridges1/progress.json +1 -0
- train_curves/csp_sweep1_1x_0.9Mnonzero_afrac1.000/progress.json +1 -0
- train_curves/csp_sweep1_1x_1.9Mnonzero_afrac0.250/progress.json +1 -0
- train_curves/csp_sweep1_1x_1.9Mnonzero_afrac0.500/progress.json +1 -0
- train_curves/csp_sweep1_1x_3.7Mnonzero_afrac0.125/progress.json +1 -0
- train_curves/csp_sweep1_1x_3.7Mnonzero_afrac0.250/progress.json +1 -0
- train_curves/csp_sweep1_1x_7.4Mnonzero_afrac0.062/progress.json +1 -0
- train_curves/csp_sweep1_1x_7.4Mnonzero_afrac0.500/progress.json +1 -0
models/csp_bridges2/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 4, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.125, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_k,attn_q,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.5, "expansion_factor": 8, "expansion_factor_mlp": 8, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 8192, "enable_bigram_table": true, "learnable_bigram_table": false, "d_pos_emb": null, "dropout_cat_pos_emb": false, "sinusoidal_cat_pos_emb": false, "flash": true, "sink": false, "rtopk": false, "n_embd": null}
|
models/csp_sweep1_1x_0.9Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_1x_0.9Mnonzero_afrac0.125/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.125, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_1x_0.9Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_1x_0.9Mnonzero_afrac0.500/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.5, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_1x_1.9Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_1x_1.9Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_1x_3.7Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.5, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_1x_7.4Mnonzero_afrac0.125/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 16, "d_head": 16, "d_model": 256, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.125, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 1, "expansion_factor": 1, "expansion_factor_mlp": 1, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 1024, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_0.9Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_1.9Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_1.9Mnonzero_afrac0.125/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.125, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_1.9Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_1.9Mnonzero_afrac0.500/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.5, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_14.8Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 2, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_14.8Mnonzero_afrac0.125/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.125, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 2, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_14.8Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 2, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_14.8Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 2, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_3.7Mnonzero_afrac0.500/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.5, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.5, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_7.4Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 1, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_2x_7.4Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 32, "d_head": 16, "d_model": 512, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 1, "expansion_factor": 2, "expansion_factor_mlp": 2, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 2048, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_0.9Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_0.9Mnonzero_afrac0.125/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.125, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_0.9Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_0.9Mnonzero_afrac0.500/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.5, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.125, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_1.9Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_1.9Mnonzero_afrac0.500/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.5, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.25, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_14.8Mnonzero_afrac0.500/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.5, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 2, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_14.8Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 2, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_3.7Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.5, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_7.4Mnonzero_afrac0.062/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.0625, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 1, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_7.4Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 1, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_4x_7.4Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 1, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null, "n_embd": null}
|
models/csp_sweep1_8x_1.9Mnonzero_afrac0.500/final_model.pt
ADDED
|
File without changes
|
models/csp_sweep1_8x_1.9Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "tokenizer_name": "tinypython_2k", "grad_checkpointing": true, "d_mlp": 8192, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null}
|
models/csp_sweep1_8x_14.8Mnonzero_afrac0.250/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "tokenizer_name": "tinypython_2k", "grad_checkpointing": true, "d_mlp": 8192, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null}
|
models/csp_sweep1_8x_3.7Mnonzero_afrac0.125/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.125, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "tokenizer_name": "tinypython_2k", "grad_checkpointing": true, "d_mlp": 8192, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null}
|
models/csp_sweep1_8x_3.7Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "tokenizer_name": "tinypython_2k", "grad_checkpointing": true, "d_mlp": 8192, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null}
|
models/csp_sweep1_8x_7.4Mnonzero_afrac0.500/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.5, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "tokenizer_name": "tinypython_2k", "grad_checkpointing": true, "d_mlp": 8192, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null}
|
models/csp_sweep1_8x_7.4Mnonzero_afrac1.000/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "tokenizer_name": "tinypython_2k", "grad_checkpointing": true, "d_mlp": 8192, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": null}
|
models/csp_yolo2/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 8, "n_head": 128, "d_head": 16, "d_model": 2048, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": 0.25, "afrac_loctypes": "attn_in,attn_out,mlp_in,mlp_out,mlp_neuron,attn_v,attn_k,attn_q", "afrac_approx": false, "enable_sparse_kernels": false, "ignore_dw_grad": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "tokenizer_name": "tinypython_2k", "pfrac": 0.0625, "expansion_factor": 8, "expansion_factor_mlp": 8, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 8192, "enable_bigram_table": true, "learnable_bigram_table": true, "d_pos_emb": 32, "flash": true, "sink": true, "rtopk": false, "n_embd": null}
|
models/dense1_4x/beeg_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"block_size": 1024, "vocab_size": 2048, "n_layer": 4, "n_head": 64, "d_head": 16, "d_model": 1024, "dropout": 0.0, "bias": true, "ln_bias": true, "rms_norm": true, "residual_activation_type": "identity", "activation_type": "gelu", "afrac": null, "enable_sparse_kernels": false, "debug_nans": false, "tied_unembed": false, "bigram_table_rank": null, "pfrac": 16, "expansion_factor": 4, "expansion_factor_mlp": 4, "debug_exact_topk": true, "grad_checkpointing": true, "sparse_matmul_impl": "cuda", "d_mlp": 4096, "flash": true, "enable_bigram_table": true, "learnable_bigram_table": true, "n_embd": null}
|
train_curves/csp_bridges1/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.3716938495635986, "test_kl": 0.0, "test_xent": 1.2761914730072021, "grad_norm": 0, "weight_sparsity": 0.9893760681152344, "L0_as_frac_of_orig_params": 0.25989243388175964, "L0": 2251264.0, "L0_non_LN": 2228224.0, "L0_non_embed": 1835008.0, "L0_non_embed_as_frac_of_orig_params": 0.5, "param_norm": 610.7026977539062, "aux_invertable": 0.5948593616485596, "elapsed_tokens": 31992053760, "grad_scale": 8192.0, "lr": 6.354985519237744e-06, "pfrac": 0.5, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 269959.87127701443, "num_alive_neurons/c_fc/layer_0": 8192, "num_alive_neurons/c_fc/layer_1": 8192, "num_alive_neurons/c_fc/layer_2": 8192, "num_alive_neurons/c_fc/layer_3": 8192, "test_kl_bridged": 0.20348259806632996, "test_xent_bridged": 1.365612506866455, "dead_qk/layer_0": 1546, "dead_qk/layer_1": 1711, "dead_qk/layer_2": 1610, "dead_qk/layer_3": 1299, "empirical_L0_frac/transformer.wte.weight": 0.0625, "empirical_L0_frac/transformer.wpe.weight": 0.0625, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 0.0078125, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 0.0625, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 0.0625, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 0.0078125, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 0.0625, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 0.0625, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 0.0078125, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 0.0625, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 0.0625, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 0.0078125, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 0.0625, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 0.0625, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 0.0078125, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 0.0625, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 0.0625, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 0.0078125, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 0.0625, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 0.0625, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 0.0078125, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 0.0625, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 0.0625, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 0.0078125, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 0.0625, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 0.0078125, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 0.0625, "empirical_L0_frac/lm_head.weight": 0.0625, "weight_grad_norm/transformer.wte.weight": 1994.9478759765625, "weight_grad_norm/transformer.wpe.weight": 1419.62451171875, "weight_grad_norm/transformer.h.0.ln_1.weight": 42.788997650146484, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 3418.453125, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 76.63591003417969, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 2001.5572509765625, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 45.17903137207031, "weight_grad_norm/transformer.h.0.ln_2.weight": 42.63290023803711, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 4028.899658203125, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 85.2895736694336, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 3970.3828125, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 45.12526321411133, "weight_grad_norm/transformer.h.1.ln_1.weight": 42.63804244995117, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 3463.614990234375, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 76.29949951171875, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 2001.994140625, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 45.133697509765625, "weight_grad_norm/transformer.h.1.ln_2.weight": 41.74234390258789, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 4005.307373046875, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 85.3062973022461, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 3873.67626953125, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 45.10429000854492, "weight_grad_norm/transformer.h.2.ln_1.weight": 41.69755554199219, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 3443.843505859375, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 75.44393920898438, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 1986.8465576171875, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 45.09878158569336, "weight_grad_norm/transformer.h.2.ln_2.weight": 38.98308181762695, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 3926.2900390625, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 81.8944091796875, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 3875.3232421875, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 45.0527458190918, "weight_grad_norm/transformer.h.3.ln_1.weight": 42.81784439086914, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 3390.3291015625, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 74.33521270751953, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 1973.0169677734375, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 45.03806686401367, "weight_grad_norm/transformer.h.3.ln_2.weight": 41.031883239746094, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 3912.853759765625, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 80.599853515625, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 3904.46044921875, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 44.76580810546875, "weight_grad_norm/transformer.ln_f.weight": 43.085914611816406, "weight_grad_norm/lm_head.weight": 2006.2811279296875}
|
train_curves/csp_sweep1_1x_0.9Mnonzero_afrac1.000/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.3903303146362305, "test_kl": 0.0, "test_xent": 1.2088825702667236, "grad_norm": 0, "weight_sparsity": 0.5641829967498779, "L0_as_frac_of_orig_params": 5151036.0, "L0": 5151036.0, "L0_non_LN": 5144380.0, "L0_non_embed": 851772.0, "L0_non_embed_as_frac_of_orig_params": 851772.0, "param_norm": 57704.1328125, "aux_invertable": 0, "elapsed_tokens": 31992053760, "grad_scale": 262144.0, "lr": 3.594922683996257e-05, "pfrac": 0.125, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 2819883.7604288794, "num_alive_neurons/c_fc/layer_0": 1023, "num_alive_neurons/c_fc/layer_1": 1023, "num_alive_neurons/c_fc/layer_2": 1020, "num_alive_neurons/c_fc/layer_3": 1024, "num_alive_neurons/c_fc/layer_4": 1015, "num_alive_neurons/c_fc/layer_5": 1021, "num_alive_neurons/c_fc/layer_6": 1022, "num_alive_neurons/c_fc/layer_7": 1020, "empirical_L0_frac/transformer.wte.weight": 0.125, "empirical_L0_frac/transformer.wpe.weight": 0.125, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 0.1249847412109375, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 0.1249847412109375, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 0.12497329711914062, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 0.124969482421875, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 0.12493896484375, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 0.12497329711914062, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 0.125, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.4.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.4.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.4.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.4.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.4.mlp.c_fc.weight": 0.1248626708984375, "empirical_L0_frac/transformer.h.4.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.4.mlp.c_proj.weight": 0.1248626708984375, "empirical_L0_frac/transformer.h.4.mlp.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.5.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.5.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.5.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.5.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.5.mlp.c_fc.weight": 0.1249542236328125, "empirical_L0_frac/transformer.h.5.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.5.mlp.c_proj.weight": 0.1249542236328125, "empirical_L0_frac/transformer.h.5.mlp.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.6.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.6.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.6.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.6.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.6.mlp.c_fc.weight": 0.124969482421875, "empirical_L0_frac/transformer.h.6.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.6.mlp.c_proj.weight": 0.124969482421875, "empirical_L0_frac/transformer.h.6.mlp.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.7.attn.c_attn.weight": 0.125, "empirical_L0_frac/transformer.h.7.attn.c_attn.bias": 0.125, "empirical_L0_frac/transformer.h.7.attn.c_proj.weight": 0.125, "empirical_L0_frac/transformer.h.7.attn.c_proj.bias": 0.125, "empirical_L0_frac/transformer.h.7.mlp.c_fc.weight": 0.12492752075195312, "empirical_L0_frac/transformer.h.7.mlp.c_fc.bias": 0.125, "empirical_L0_frac/transformer.h.7.mlp.c_proj.weight": 0.12492752075195312, "empirical_L0_frac/transformer.h.7.mlp.c_proj.bias": 0.125, "empirical_L0_frac/lm_head.weight": 0.125, "weight_grad_norm/bigram_table": 631.363037109375, "weight_grad_norm/transformer.wte.weight": 697.4276733398438, "weight_grad_norm/transformer.wpe.weight": 496.9635009765625, "weight_grad_norm/transformer.h.0.ln_1.weight": 15.847907066345215, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 438.6827697753906, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 27.29521942138672, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 255.36196899414062, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 15.937189102172852, "weight_grad_norm/transformer.h.0.ln_2.weight": 15.381803512573242, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 504.02490234375, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 30.562679290771484, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 505.83441162109375, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 15.933929443359375, "weight_grad_norm/transformer.h.1.ln_1.weight": 15.048428535461426, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 435.08270263671875, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 27.138776779174805, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 254.4925079345703, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 15.923842430114746, "weight_grad_norm/transformer.h.1.ln_2.weight": 13.394063949584961, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 503.5754089355469, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 29.319252014160156, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 508.32415771484375, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 15.912269592285156, "weight_grad_norm/transformer.h.2.ln_1.weight": 15.366438865661621, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 438.9326477050781, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 27.141801834106445, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 253.67112731933594, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 15.907684326171875, "weight_grad_norm/transformer.h.2.ln_2.weight": 14.388676643371582, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 504.48150634765625, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 29.940105438232422, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 505.33050537109375, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 15.893245697021484, "weight_grad_norm/transformer.h.3.ln_1.weight": 15.170600891113281, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 437.91217041015625, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 26.993755340576172, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 253.47360229492188, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 15.870428085327148, "weight_grad_norm/transformer.h.3.ln_2.weight": 15.468531608581543, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 505.47357177734375, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 30.986587524414062, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 500.7970275878906, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 15.869139671325684, "weight_grad_norm/transformer.h.4.ln_1.weight": 15.242147445678711, "weight_grad_norm/transformer.h.4.attn.c_attn.weight": 437.17047119140625, "weight_grad_norm/transformer.h.4.attn.c_attn.bias": 27.009096145629883, "weight_grad_norm/transformer.h.4.attn.c_proj.weight": 251.18423461914062, "weight_grad_norm/transformer.h.4.attn.c_proj.bias": 15.83334732055664, "weight_grad_norm/transformer.h.4.ln_2.weight": 13.827543258666992, "weight_grad_norm/transformer.h.4.mlp.c_fc.weight": 503.4390869140625, "weight_grad_norm/transformer.h.4.mlp.c_fc.bias": 27.57511329650879, "weight_grad_norm/transformer.h.4.mlp.c_proj.weight": 502.7713928222656, "weight_grad_norm/transformer.h.4.mlp.c_proj.bias": 15.806681632995605, "weight_grad_norm/transformer.h.5.ln_1.weight": 15.34961223602295, "weight_grad_norm/transformer.h.5.attn.c_attn.weight": 436.5926513671875, "weight_grad_norm/transformer.h.5.attn.c_attn.bias": 26.74176597595215, "weight_grad_norm/transformer.h.5.attn.c_proj.weight": 250.17210388183594, "weight_grad_norm/transformer.h.5.attn.c_proj.bias": 15.770788192749023, "weight_grad_norm/transformer.h.5.ln_2.weight": 12.854101181030273, "weight_grad_norm/transformer.h.5.mlp.c_fc.weight": 502.6910095214844, "weight_grad_norm/transformer.h.5.mlp.c_fc.bias": 25.82603645324707, "weight_grad_norm/transformer.h.5.mlp.c_proj.weight": 503.8000793457031, "weight_grad_norm/transformer.h.5.mlp.c_proj.bias": 15.757322311401367, "weight_grad_norm/transformer.h.6.ln_1.weight": 15.26122760772705, "weight_grad_norm/transformer.h.6.attn.c_attn.weight": 435.0715637207031, "weight_grad_norm/transformer.h.6.attn.c_attn.bias": 26.61015510559082, "weight_grad_norm/transformer.h.6.attn.c_proj.weight": 249.0816192626953, "weight_grad_norm/transformer.h.6.attn.c_proj.bias": 15.733631134033203, "weight_grad_norm/transformer.h.6.ln_2.weight": 12.8392972946167, "weight_grad_norm/transformer.h.6.mlp.c_fc.weight": 502.6235656738281, "weight_grad_norm/transformer.h.6.mlp.c_fc.bias": 26.23878288269043, "weight_grad_norm/transformer.h.6.mlp.c_proj.weight": 501.96063232421875, "weight_grad_norm/transformer.h.6.mlp.c_proj.bias": 15.706363677978516, "weight_grad_norm/transformer.h.7.ln_1.weight": 14.846925735473633, "weight_grad_norm/transformer.h.7.attn.c_attn.weight": 430.12005615234375, "weight_grad_norm/transformer.h.7.attn.c_attn.bias": 26.234453201293945, "weight_grad_norm/transformer.h.7.attn.c_proj.weight": 251.66656494140625, "weight_grad_norm/transformer.h.7.attn.c_proj.bias": 15.692305564880371, "weight_grad_norm/transformer.h.7.ln_2.weight": 13.093469619750977, "weight_grad_norm/transformer.h.7.mlp.c_fc.weight": 504.1043701171875, "weight_grad_norm/transformer.h.7.mlp.c_fc.bias": 27.070175170898438, "weight_grad_norm/transformer.h.7.mlp.c_proj.weight": 503.9578857421875, "weight_grad_norm/transformer.h.7.mlp.c_proj.bias": 15.700504302978516, "weight_grad_norm/transformer.ln_f.weight": 15.409149169921875, "weight_grad_norm/lm_head.weight": 718.8758544921875}
|
train_curves/csp_sweep1_1x_1.9Mnonzero_afrac0.250/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.4601116180419922, "test_kl": 0.0, "test_xent": 1.2725650072097778, "grad_norm": 0, "weight_sparsity": 0.4838891625404358, "L0_as_frac_of_orig_params": 6100050.0, "L0": 6100050.0, "L0_non_LN": 6091090.0, "L0_non_embed": 1700434.0, "L0_non_embed_as_frac_of_orig_params": 1700434.0, "param_norm": 50600.79296875, "aux_invertable": 0, "elapsed_tokens": 31992053760, "grad_scale": 524288.0, "lr": 2.5419942076950977e-05, "pfrac": 0.25, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 1248905.5550105039, "num_alive_neurons/c_fc/layer_0": 1022, "num_alive_neurons/c_fc/layer_1": 997, "num_alive_neurons/c_fc/layer_2": 1018, "num_alive_neurons/c_fc/layer_3": 1019, "num_alive_neurons/c_fc/layer_4": 1022, "num_alive_neurons/c_fc/layer_5": 1022, "num_alive_neurons/c_fc/layer_6": 1021, "num_alive_neurons/c_fc/layer_7": 1019, "empirical_L0_frac/transformer.wte.weight": 0.25, "empirical_L0_frac/transformer.wpe.weight": 0.2490234375, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 0.24995803833007812, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 0.249969482421875, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 0.24957656860351562, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 0.2495574951171875, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 0.249908447265625, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 0.249908447265625, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 0.203125, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 0.2499237060546875, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 0.2499237060546875, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.4.mlp.c_fc.weight": 0.249969482421875, "empirical_L0_frac/transformer.h.4.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.4.mlp.c_proj.weight": 0.249969482421875, "empirical_L0_frac/transformer.h.4.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.5.mlp.c_fc.weight": 0.249969482421875, "empirical_L0_frac/transformer.h.5.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.5.mlp.c_proj.weight": 0.249969482421875, "empirical_L0_frac/transformer.h.5.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.6.mlp.c_fc.weight": 0.2499542236328125, "empirical_L0_frac/transformer.h.6.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.6.mlp.c_proj.weight": 0.2499542236328125, "empirical_L0_frac/transformer.h.6.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.7.mlp.c_fc.weight": 0.2499237060546875, "empirical_L0_frac/transformer.h.7.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.7.mlp.c_proj.weight": 0.2499237060546875, "empirical_L0_frac/transformer.h.7.mlp.c_proj.bias": 0.25, "empirical_L0_frac/lm_head.weight": 0.25, "weight_grad_norm/bigram_table": 642.3857421875, "weight_grad_norm/transformer.wte.weight": 680.8904418945312, "weight_grad_norm/transformer.wpe.weight": 487.96136474609375, "weight_grad_norm/transformer.h.0.ln_1.weight": 15.862854957580566, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 429.4355163574219, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 26.872060775756836, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 253.30393981933594, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 15.875631332397461, "weight_grad_norm/transformer.h.0.ln_2.weight": 14.846437454223633, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 492.71527099609375, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 30.62880516052246, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 496.0094909667969, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 15.887850761413574, "weight_grad_norm/transformer.h.1.ln_1.weight": 15.430895805358887, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 434.5604248046875, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 26.415401458740234, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 246.36277770996094, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 15.802570343017578, "weight_grad_norm/transformer.h.1.ln_2.weight": 14.48871898651123, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 490.5267028808594, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 29.25676918029785, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 463.1202087402344, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 15.79340648651123, "weight_grad_norm/transformer.h.2.ln_1.weight": 14.960880279541016, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 431.4499206542969, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 26.389997482299805, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 250.8378143310547, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 15.735588073730469, "weight_grad_norm/transformer.h.2.ln_2.weight": 14.297500610351562, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 492.275634765625, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 28.86977195739746, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 451.14678955078125, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 15.741005897521973, "weight_grad_norm/transformer.h.3.ln_1.weight": 15.442708015441895, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 426.93853759765625, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 26.713830947875977, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 245.7864990234375, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 15.718214988708496, "weight_grad_norm/transformer.h.3.ln_2.weight": 13.803006172180176, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 494.4602966308594, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 27.265729904174805, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 471.00201416015625, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 15.745279312133789, "weight_grad_norm/transformer.h.4.ln_1.weight": 15.233202934265137, "weight_grad_norm/transformer.h.4.attn.c_attn.weight": 428.91259765625, "weight_grad_norm/transformer.h.4.attn.c_attn.bias": 26.244455337524414, "weight_grad_norm/transformer.h.4.attn.c_proj.weight": 243.18531799316406, "weight_grad_norm/transformer.h.4.attn.c_proj.bias": 15.66982364654541, "weight_grad_norm/transformer.h.4.ln_2.weight": 13.87956714630127, "weight_grad_norm/transformer.h.4.mlp.c_fc.weight": 494.83514404296875, "weight_grad_norm/transformer.h.4.mlp.c_fc.bias": 27.20918846130371, "weight_grad_norm/transformer.h.4.mlp.c_proj.weight": 458.2650146484375, "weight_grad_norm/transformer.h.4.mlp.c_proj.bias": 15.708174705505371, "weight_grad_norm/transformer.h.5.ln_1.weight": 15.2291259765625, "weight_grad_norm/transformer.h.5.attn.c_attn.weight": 432.7164611816406, "weight_grad_norm/transformer.h.5.attn.c_attn.bias": 26.29124641418457, "weight_grad_norm/transformer.h.5.attn.c_proj.weight": 244.70449829101562, "weight_grad_norm/transformer.h.5.attn.c_proj.bias": 15.59890079498291, "weight_grad_norm/transformer.h.5.ln_2.weight": 13.725266456604004, "weight_grad_norm/transformer.h.5.mlp.c_fc.weight": 489.1258544921875, "weight_grad_norm/transformer.h.5.mlp.c_fc.bias": 25.67348861694336, "weight_grad_norm/transformer.h.5.mlp.c_proj.weight": 428.4815368652344, "weight_grad_norm/transformer.h.5.mlp.c_proj.bias": 15.601716995239258, "weight_grad_norm/transformer.h.6.ln_1.weight": 14.966949462890625, "weight_grad_norm/transformer.h.6.attn.c_attn.weight": 418.1392517089844, "weight_grad_norm/transformer.h.6.attn.c_attn.bias": 26.102476119995117, "weight_grad_norm/transformer.h.6.attn.c_proj.weight": 229.20591735839844, "weight_grad_norm/transformer.h.6.attn.c_proj.bias": 15.603477478027344, "weight_grad_norm/transformer.h.6.ln_2.weight": 12.808850288391113, "weight_grad_norm/transformer.h.6.mlp.c_fc.weight": 487.8836975097656, "weight_grad_norm/transformer.h.6.mlp.c_fc.bias": 24.362924575805664, "weight_grad_norm/transformer.h.6.mlp.c_proj.weight": 434.13580322265625, "weight_grad_norm/transformer.h.6.mlp.c_proj.bias": 15.610278129577637, "weight_grad_norm/transformer.h.7.ln_1.weight": 15.06717586517334, "weight_grad_norm/transformer.h.7.attn.c_attn.weight": 423.3311462402344, "weight_grad_norm/transformer.h.7.attn.c_attn.bias": 25.83054542541504, "weight_grad_norm/transformer.h.7.attn.c_proj.weight": 234.76136779785156, "weight_grad_norm/transformer.h.7.attn.c_proj.bias": 15.414196968078613, "weight_grad_norm/transformer.h.7.ln_2.weight": 14.320310592651367, "weight_grad_norm/transformer.h.7.mlp.c_fc.weight": 495.8484191894531, "weight_grad_norm/transformer.h.7.mlp.c_fc.bias": 28.441875457763672, "weight_grad_norm/transformer.h.7.mlp.c_proj.weight": 480.0677490234375, "weight_grad_norm/transformer.h.7.mlp.c_proj.bias": 15.47130012512207, "weight_grad_norm/transformer.ln_f.weight": 15.496414184570312, "weight_grad_norm/lm_head.weight": 715.97607421875}
|
train_curves/csp_sweep1_1x_1.9Mnonzero_afrac0.500/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.3872884511947632, "test_kl": 0.0, "test_xent": 1.2092041969299316, "grad_norm": 0, "weight_sparsity": 0.4840371012687683, "L0_as_frac_of_orig_params": 6098301.5, "L0": 6098302.0, "L0_non_LN": 6089342.0, "L0_non_embed": 1698686.0, "L0_non_embed_as_frac_of_orig_params": 1698686.0, "param_norm": 27498.9609375, "aux_invertable": 0, "elapsed_tokens": 31992053760, "grad_scale": 262144.0, "lr": 1.2709971038475488e-05, "pfrac": 0.25, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 1183900.3522366942, "num_alive_neurons/c_fc/layer_0": 1024, "num_alive_neurons/c_fc/layer_1": 1024, "num_alive_neurons/c_fc/layer_2": 1024, "num_alive_neurons/c_fc/layer_3": 1024, "num_alive_neurons/c_fc/layer_4": 622, "num_alive_neurons/c_fc/layer_5": 798, "num_alive_neurons/c_fc/layer_6": 1024, "num_alive_neurons/c_fc/layer_7": 1024, "empirical_L0_frac/transformer.wte.weight": 0.25, "empirical_L0_frac/transformer.wpe.weight": 0.2490234375, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 0.25, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 0.25, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 0.25, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 0.25, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.4.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.4.mlp.c_fc.weight": 0.2436370849609375, "empirical_L0_frac/transformer.h.4.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.4.mlp.c_proj.weight": 0.24364089965820312, "empirical_L0_frac/transformer.h.4.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.5.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.5.mlp.c_fc.weight": 0.24634933471679688, "empirical_L0_frac/transformer.h.5.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.5.mlp.c_proj.weight": 0.24634552001953125, "empirical_L0_frac/transformer.h.5.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.6.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.6.mlp.c_fc.weight": 0.25, "empirical_L0_frac/transformer.h.6.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.6.mlp.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.6.mlp.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_attn.weight": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_attn.bias": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.7.attn.c_proj.bias": 0.25, "empirical_L0_frac/transformer.h.7.mlp.c_fc.weight": 0.25, "empirical_L0_frac/transformer.h.7.mlp.c_fc.bias": 0.25, "empirical_L0_frac/transformer.h.7.mlp.c_proj.weight": 0.25, "empirical_L0_frac/transformer.h.7.mlp.c_proj.bias": 0.25, "empirical_L0_frac/lm_head.weight": 0.25, "weight_grad_norm/bigram_table": 628.7498168945312, "weight_grad_norm/transformer.wte.weight": 683.3626708984375, "weight_grad_norm/transformer.wpe.weight": 489.6246337890625, "weight_grad_norm/transformer.h.0.ln_1.weight": 15.813851356506348, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 430.2315979003906, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 27.138795852661133, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 252.97103881835938, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 15.920992851257324, "weight_grad_norm/transformer.h.0.ln_2.weight": 15.587895393371582, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 502.311279296875, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 31.101150512695312, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 503.3676452636719, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 15.926351547241211, "weight_grad_norm/transformer.h.1.ln_1.weight": 15.751385688781738, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 438.7297668457031, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 26.96685791015625, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 253.8997802734375, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 15.88632869720459, "weight_grad_norm/transformer.h.1.ln_2.weight": 15.094093322753906, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 496.9649658203125, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 29.41766929626465, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 492.24285888671875, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 15.861209869384766, "weight_grad_norm/transformer.h.2.ln_1.weight": 15.358555793762207, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 437.3223876953125, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 26.7916202545166, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 250.1053466796875, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 15.845401763916016, "weight_grad_norm/transformer.h.2.ln_2.weight": 14.985011100769043, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 501.12469482421875, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 29.856380462646484, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 489.3728332519531, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 15.821051597595215, "weight_grad_norm/transformer.h.3.ln_1.weight": 15.384258270263672, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 433.6177062988281, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 26.935672760009766, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 249.21397399902344, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 15.795930862426758, "weight_grad_norm/transformer.h.3.ln_2.weight": 14.262572288513184, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 496.9120788574219, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 27.23418617248535, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 473.3862609863281, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 15.797994613647461, "weight_grad_norm/transformer.h.4.ln_1.weight": 15.387063980102539, "weight_grad_norm/transformer.h.4.attn.c_attn.weight": 434.7806701660156, "weight_grad_norm/transformer.h.4.attn.c_attn.bias": 26.631128311157227, "weight_grad_norm/transformer.h.4.attn.c_proj.weight": 240.96484375, "weight_grad_norm/transformer.h.4.attn.c_proj.bias": 15.729325294494629, "weight_grad_norm/transformer.h.4.ln_2.weight": 15.474197387695312, "weight_grad_norm/transformer.h.4.mlp.c_fc.weight": 485.5074462890625, "weight_grad_norm/transformer.h.4.mlp.c_fc.bias": 30.48859214782715, "weight_grad_norm/transformer.h.4.mlp.c_proj.weight": 484.0849914550781, "weight_grad_norm/transformer.h.4.mlp.c_proj.bias": 15.767924308776855, "weight_grad_norm/transformer.h.5.ln_1.weight": 15.154948234558105, "weight_grad_norm/transformer.h.5.attn.c_attn.weight": 429.15869140625, "weight_grad_norm/transformer.h.5.attn.c_attn.bias": 26.3605899810791, "weight_grad_norm/transformer.h.5.attn.c_proj.weight": 242.15591430664062, "weight_grad_norm/transformer.h.5.attn.c_proj.bias": 15.700382232666016, "weight_grad_norm/transformer.h.5.ln_2.weight": 15.334463119506836, "weight_grad_norm/transformer.h.5.mlp.c_fc.weight": 407.529052734375, "weight_grad_norm/transformer.h.5.mlp.c_fc.bias": 30.144132614135742, "weight_grad_norm/transformer.h.5.mlp.c_proj.weight": 416.4881896972656, "weight_grad_norm/transformer.h.5.mlp.c_proj.bias": 15.77451229095459, "weight_grad_norm/transformer.h.6.ln_1.weight": 15.342992782592773, "weight_grad_norm/transformer.h.6.attn.c_attn.weight": 429.1058349609375, "weight_grad_norm/transformer.h.6.attn.c_attn.bias": 26.396272659301758, "weight_grad_norm/transformer.h.6.attn.c_proj.weight": 246.26644897460938, "weight_grad_norm/transformer.h.6.attn.c_proj.bias": 15.678738594055176, "weight_grad_norm/transformer.h.6.ln_2.weight": 15.129767417907715, "weight_grad_norm/transformer.h.6.mlp.c_fc.weight": 496.1315612792969, "weight_grad_norm/transformer.h.6.mlp.c_fc.bias": 29.189680099487305, "weight_grad_norm/transformer.h.6.mlp.c_proj.weight": 482.39483642578125, "weight_grad_norm/transformer.h.6.mlp.c_proj.bias": 15.689312934875488, "weight_grad_norm/transformer.h.7.ln_1.weight": 15.06506633758545, "weight_grad_norm/transformer.h.7.attn.c_attn.weight": 426.6111755371094, "weight_grad_norm/transformer.h.7.attn.c_attn.bias": 26.24847412109375, "weight_grad_norm/transformer.h.7.attn.c_proj.weight": 239.05960083007812, "weight_grad_norm/transformer.h.7.attn.c_proj.bias": 15.63977336883545, "weight_grad_norm/transformer.h.7.ln_2.weight": 14.135393142700195, "weight_grad_norm/transformer.h.7.mlp.c_fc.weight": 499.2266845703125, "weight_grad_norm/transformer.h.7.mlp.c_fc.bias": 26.934202194213867, "weight_grad_norm/transformer.h.7.mlp.c_proj.weight": 493.0384521484375, "weight_grad_norm/transformer.h.7.mlp.c_proj.bias": 15.680523872375488, "weight_grad_norm/transformer.ln_f.weight": 15.445952415466309, "weight_grad_norm/lm_head.weight": 717.0191650390625}
|
train_curves/csp_sweep1_1x_3.7Mnonzero_afrac0.125/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.548852801322937, "test_kl": 0.0, "test_xent": 1.3587499856948853, "grad_norm": 0, "weight_sparsity": 0.33887094259262085, "L0_as_frac_of_orig_params": 7814059.0, "L0": 7814059.0, "L0_non_LN": 7800491.0, "L0_non_embed": 3278763.0, "L0_non_embed_as_frac_of_orig_params": 3278763.0, "param_norm": 18314.7578125, "aux_invertable": 0, "elapsed_tokens": 31992053760, "grad_scale": 262144.0, "lr": 6.354985519237744e-06, "pfrac": 0.5, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 1264825.9617037028, "num_alive_neurons/c_fc/layer_0": 1024, "num_alive_neurons/c_fc/layer_1": 1024, "num_alive_neurons/c_fc/layer_2": 1024, "num_alive_neurons/c_fc/layer_3": 834, "num_alive_neurons/c_fc/layer_4": 822, "num_alive_neurons/c_fc/layer_5": 837, "num_alive_neurons/c_fc/layer_6": 1024, "num_alive_neurons/c_fc/layer_7": 957, "empirical_L0_frac/transformer.wte.weight": 0.5, "empirical_L0_frac/transformer.wpe.weight": 0.2490234375, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 0.49853515625, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 0.5, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 0.5, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 0.5, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 0.4317588806152344, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 0.43979644775390625, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.4.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.4.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.4.attn.c_proj.weight": 0.498687744140625, "empirical_L0_frac/transformer.h.4.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.4.mlp.c_fc.weight": 0.4291229248046875, "empirical_L0_frac/transformer.h.4.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.4.mlp.c_proj.weight": 0.4337615966796875, "empirical_L0_frac/transformer.h.4.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.5.mlp.c_fc.weight": 0.4347038269042969, "empirical_L0_frac/transformer.h.5.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.5.mlp.c_proj.weight": 0.4418220520019531, "empirical_L0_frac/transformer.h.5.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.6.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.6.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.6.attn.c_proj.weight": 0.4929351806640625, "empirical_L0_frac/transformer.h.6.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_fc.weight": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.7.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.7.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.7.attn.c_proj.weight": 0.4807586669921875, "empirical_L0_frac/transformer.h.7.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.7.mlp.c_fc.weight": 0.4441871643066406, "empirical_L0_frac/transformer.h.7.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.7.mlp.c_proj.weight": 0.4596061706542969, "empirical_L0_frac/transformer.h.7.mlp.c_proj.bias": 0.5, "empirical_L0_frac/lm_head.weight": 0.5, "weight_grad_norm/bigram_table": 660.9458618164062, "weight_grad_norm/transformer.wte.weight": 695.5767822265625, "weight_grad_norm/transformer.wpe.weight": 496.72216796875, "weight_grad_norm/transformer.h.0.ln_1.weight": 15.823344230651855, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 432.80816650390625, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 27.075288772583008, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 253.48460388183594, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 15.92225456237793, "weight_grad_norm/transformer.h.0.ln_2.weight": 15.749990463256836, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 468.2547302246094, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 31.177505493164062, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 481.580322265625, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 15.917108535766602, "weight_grad_norm/transformer.h.1.ln_1.weight": 15.50668716430664, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 435.9496765136719, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 26.631338119506836, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 251.6148681640625, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 15.894259452819824, "weight_grad_norm/transformer.h.1.ln_2.weight": 15.29879093170166, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 496.439697265625, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 29.986461639404297, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 490.737060546875, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 15.85243034362793, "weight_grad_norm/transformer.h.2.ln_1.weight": 15.739998817443848, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 435.5765686035156, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 26.408966064453125, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 246.01123046875, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 15.810622215270996, "weight_grad_norm/transformer.h.2.ln_2.weight": 15.114733695983887, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 495.6687316894531, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 29.42669677734375, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 477.9856262207031, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 15.807103157043457, "weight_grad_norm/transformer.h.3.ln_1.weight": 15.58407974243164, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 430.42437744140625, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 26.18344497680664, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 239.53807067871094, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 15.783400535583496, "weight_grad_norm/transformer.h.3.ln_2.weight": 15.344802856445312, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 432.8707580566406, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 29.81864356994629, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 451.7640380859375, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 15.815694808959961, "weight_grad_norm/transformer.h.4.ln_1.weight": 15.476143836975098, "weight_grad_norm/transformer.h.4.attn.c_attn.weight": 430.228759765625, "weight_grad_norm/transformer.h.4.attn.c_attn.bias": 26.028942108154297, "weight_grad_norm/transformer.h.4.attn.c_proj.weight": 241.58656311035156, "weight_grad_norm/transformer.h.4.attn.c_proj.bias": 15.762313842773438, "weight_grad_norm/transformer.h.4.ln_2.weight": 15.771337509155273, "weight_grad_norm/transformer.h.4.mlp.c_fc.weight": 470.2578430175781, "weight_grad_norm/transformer.h.4.mlp.c_fc.bias": 30.183063507080078, "weight_grad_norm/transformer.h.4.mlp.c_proj.weight": 471.57000732421875, "weight_grad_norm/transformer.h.4.mlp.c_proj.bias": 15.801304817199707, "weight_grad_norm/transformer.h.5.ln_1.weight": 15.619426727294922, "weight_grad_norm/transformer.h.5.attn.c_attn.weight": 430.23773193359375, "weight_grad_norm/transformer.h.5.attn.c_attn.bias": 26.09477996826172, "weight_grad_norm/transformer.h.5.attn.c_proj.weight": 240.6260223388672, "weight_grad_norm/transformer.h.5.attn.c_proj.bias": 15.733394622802734, "weight_grad_norm/transformer.h.5.ln_2.weight": 14.746664047241211, "weight_grad_norm/transformer.h.5.mlp.c_fc.weight": 405.469970703125, "weight_grad_norm/transformer.h.5.mlp.c_fc.bias": 29.281816482543945, "weight_grad_norm/transformer.h.5.mlp.c_proj.weight": 432.4727478027344, "weight_grad_norm/transformer.h.5.mlp.c_proj.bias": 15.802350997924805, "weight_grad_norm/transformer.h.6.ln_1.weight": 15.430679321289062, "weight_grad_norm/transformer.h.6.attn.c_attn.weight": 428.226806640625, "weight_grad_norm/transformer.h.6.attn.c_attn.bias": 26.054115295410156, "weight_grad_norm/transformer.h.6.attn.c_proj.weight": 243.30201721191406, "weight_grad_norm/transformer.h.6.attn.c_proj.bias": 15.801026344299316, "weight_grad_norm/transformer.h.6.ln_2.weight": 15.087353706359863, "weight_grad_norm/transformer.h.6.mlp.c_fc.weight": 496.76171875, "weight_grad_norm/transformer.h.6.mlp.c_fc.bias": 28.89158058166504, "weight_grad_norm/transformer.h.6.mlp.c_proj.weight": 474.67626953125, "weight_grad_norm/transformer.h.6.mlp.c_proj.bias": 15.729215621948242, "weight_grad_norm/transformer.h.7.ln_1.weight": 15.337434768676758, "weight_grad_norm/transformer.h.7.attn.c_attn.weight": 429.54766845703125, "weight_grad_norm/transformer.h.7.attn.c_attn.bias": 25.783069610595703, "weight_grad_norm/transformer.h.7.attn.c_proj.weight": 246.78839111328125, "weight_grad_norm/transformer.h.7.attn.c_proj.bias": 15.730786323547363, "weight_grad_norm/transformer.h.7.ln_2.weight": 15.621283531188965, "weight_grad_norm/transformer.h.7.mlp.c_fc.weight": 494.70135498046875, "weight_grad_norm/transformer.h.7.mlp.c_fc.bias": 29.853933334350586, "weight_grad_norm/transformer.h.7.mlp.c_proj.weight": 479.1886291503906, "weight_grad_norm/transformer.h.7.mlp.c_proj.bias": 15.730098724365234, "weight_grad_norm/transformer.ln_f.weight": 15.664403915405273, "weight_grad_norm/lm_head.weight": 713.345947265625}
|
train_curves/csp_sweep1_1x_3.7Mnonzero_afrac0.250/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.4170283079147339, "test_kl": 0.0, "test_xent": 1.2382739782333374, "grad_norm": 0, "weight_sparsity": 0.3287457823753357, "L0_as_frac_of_orig_params": 7933731.0, "L0": 7933731.0, "L0_non_LN": 7920163.0, "L0_non_embed": 3398435.0, "L0_non_embed_as_frac_of_orig_params": 3398435.0, "param_norm": 42830.45703125, "aux_invertable": 0, "elapsed_tokens": 31992053760, "grad_scale": 524288.0, "lr": 1.7974613419981285e-05, "pfrac": 0.5, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 1250179.4587436917, "num_alive_neurons/c_fc/layer_0": 1024, "num_alive_neurons/c_fc/layer_1": 1014, "num_alive_neurons/c_fc/layer_2": 1015, "num_alive_neurons/c_fc/layer_3": 1021, "num_alive_neurons/c_fc/layer_4": 1023, "num_alive_neurons/c_fc/layer_5": 1022, "num_alive_neurons/c_fc/layer_6": 1024, "num_alive_neurons/c_fc/layer_7": 1024, "empirical_L0_frac/transformer.wte.weight": 0.5, "empirical_L0_frac/transformer.wpe.weight": 0.2490234375, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 0.4999885559082031, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 0.4998435974121094, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 0.4998321533203125, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 0.4998626708984375, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 0.4998626708984375, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 0.465057373046875, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 0.4707489013671875, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 0.4999542236328125, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 0.4999542236328125, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.4.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.4.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.4.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.4.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.4.mlp.c_fc.weight": 0.4999847412109375, "empirical_L0_frac/transformer.h.4.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.4.mlp.c_proj.weight": 0.4999847412109375, "empirical_L0_frac/transformer.h.4.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.5.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.5.mlp.c_fc.weight": 0.4999580383300781, "empirical_L0_frac/transformer.h.5.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.5.mlp.c_proj.weight": 0.4999542236328125, "empirical_L0_frac/transformer.h.5.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.6.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.6.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.6.attn.c_proj.weight": 0.49505615234375, "empirical_L0_frac/transformer.h.6.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_fc.weight": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.6.mlp.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.7.attn.c_attn.weight": 0.5, "empirical_L0_frac/transformer.h.7.attn.c_attn.bias": 0.5, "empirical_L0_frac/transformer.h.7.attn.c_proj.weight": 0.4983062744140625, "empirical_L0_frac/transformer.h.7.attn.c_proj.bias": 0.5, "empirical_L0_frac/transformer.h.7.mlp.c_fc.weight": 0.5, "empirical_L0_frac/transformer.h.7.mlp.c_fc.bias": 0.5, "empirical_L0_frac/transformer.h.7.mlp.c_proj.weight": 0.5, "empirical_L0_frac/transformer.h.7.mlp.c_proj.bias": 0.5, "empirical_L0_frac/lm_head.weight": 0.5, "weight_grad_norm/bigram_table": 637.1761474609375, "weight_grad_norm/transformer.wte.weight": 672.9556884765625, "weight_grad_norm/transformer.wpe.weight": 484.0882568359375, "weight_grad_norm/transformer.h.0.ln_1.weight": 15.855632781982422, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 425.4382019042969, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 26.815513610839844, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 253.05087280273438, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 15.853080749511719, "weight_grad_norm/transformer.h.0.ln_2.weight": 15.006595611572266, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 484.61083984375, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 30.536235809326172, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 489.29168701171875, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 15.87098503112793, "weight_grad_norm/transformer.h.1.ln_1.weight": 15.610502243041992, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 431.5155029296875, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 26.317401885986328, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 245.20465087890625, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 15.74299144744873, "weight_grad_norm/transformer.h.1.ln_2.weight": 14.66415023803711, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 483.6657409667969, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 28.678773880004883, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 479.5257263183594, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 15.771401405334473, "weight_grad_norm/transformer.h.2.ln_1.weight": 14.887096405029297, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 427.34613037109375, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 26.463918685913086, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 244.87908935546875, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 15.73776912689209, "weight_grad_norm/transformer.h.2.ln_2.weight": 14.442747116088867, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 479.25958251953125, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 28.33482551574707, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 434.62451171875, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 15.677105903625488, "weight_grad_norm/transformer.h.3.ln_1.weight": 15.271175384521484, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 417.3486022949219, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 26.13765525817871, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 249.08181762695312, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 15.637354850769043, "weight_grad_norm/transformer.h.3.ln_2.weight": 13.892548561096191, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 480.3007507324219, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 25.708873748779297, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 447.3494873046875, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 15.640965461730957, "weight_grad_norm/transformer.h.4.ln_1.weight": 15.306427955627441, "weight_grad_norm/transformer.h.4.attn.c_attn.weight": 424.0674743652344, "weight_grad_norm/transformer.h.4.attn.c_attn.bias": 26.101716995239258, "weight_grad_norm/transformer.h.4.attn.c_proj.weight": 241.86062622070312, "weight_grad_norm/transformer.h.4.attn.c_proj.bias": 15.587786674499512, "weight_grad_norm/transformer.h.4.ln_2.weight": 13.841102600097656, "weight_grad_norm/transformer.h.4.mlp.c_fc.weight": 483.63763427734375, "weight_grad_norm/transformer.h.4.mlp.c_fc.bias": 26.3693904876709, "weight_grad_norm/transformer.h.4.mlp.c_proj.weight": 430.3146667480469, "weight_grad_norm/transformer.h.4.mlp.c_proj.bias": 15.641448020935059, "weight_grad_norm/transformer.h.5.ln_1.weight": 15.350500106811523, "weight_grad_norm/transformer.h.5.attn.c_attn.weight": 428.8147277832031, "weight_grad_norm/transformer.h.5.attn.c_attn.bias": 26.239444732666016, "weight_grad_norm/transformer.h.5.attn.c_proj.weight": 235.97409057617188, "weight_grad_norm/transformer.h.5.attn.c_proj.bias": 15.52134895324707, "weight_grad_norm/transformer.h.5.ln_2.weight": 13.71221923828125, "weight_grad_norm/transformer.h.5.mlp.c_fc.weight": 476.86859130859375, "weight_grad_norm/transformer.h.5.mlp.c_fc.bias": 25.133302688598633, "weight_grad_norm/transformer.h.5.mlp.c_proj.weight": 414.2835388183594, "weight_grad_norm/transformer.h.5.mlp.c_proj.bias": 15.522082328796387, "weight_grad_norm/transformer.h.6.ln_1.weight": 15.099194526672363, "weight_grad_norm/transformer.h.6.attn.c_attn.weight": 414.2250671386719, "weight_grad_norm/transformer.h.6.attn.c_attn.bias": 25.807497024536133, "weight_grad_norm/transformer.h.6.attn.c_proj.weight": 231.27359008789062, "weight_grad_norm/transformer.h.6.attn.c_proj.bias": 15.517931938171387, "weight_grad_norm/transformer.h.6.ln_2.weight": 13.300917625427246, "weight_grad_norm/transformer.h.6.mlp.c_fc.weight": 477.05084228515625, "weight_grad_norm/transformer.h.6.mlp.c_fc.bias": 23.798715591430664, "weight_grad_norm/transformer.h.6.mlp.c_proj.weight": 420.84991455078125, "weight_grad_norm/transformer.h.6.mlp.c_proj.bias": 15.516470909118652, "weight_grad_norm/transformer.h.7.ln_1.weight": 15.383352279663086, "weight_grad_norm/transformer.h.7.attn.c_attn.weight": 419.9510803222656, "weight_grad_norm/transformer.h.7.attn.c_attn.bias": 25.629806518554688, "weight_grad_norm/transformer.h.7.attn.c_proj.weight": 235.0347137451172, "weight_grad_norm/transformer.h.7.attn.c_proj.bias": 15.34646987915039, "weight_grad_norm/transformer.h.7.ln_2.weight": 14.88731575012207, "weight_grad_norm/transformer.h.7.mlp.c_fc.weight": 489.3669738769531, "weight_grad_norm/transformer.h.7.mlp.c_fc.bias": 28.585243225097656, "weight_grad_norm/transformer.h.7.mlp.c_proj.weight": 476.77410888671875, "weight_grad_norm/transformer.h.7.mlp.c_proj.bias": 15.400300025939941, "weight_grad_norm/transformer.ln_f.weight": 15.572734832763672, "weight_grad_norm/lm_head.weight": 713.6367797851562}
|
train_curves/csp_sweep1_1x_7.4Mnonzero_afrac0.062/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.6472182273864746, "test_kl": 0.0, "test_xent": 1.447981595993042, "grad_norm": 0, "weight_sparsity": 0.016656219959259033, "L0_as_frac_of_orig_params": 11622400.0, "L0": 11622400.0, "L0_non_LN": 11599616.0, "L0_non_embed": 6815744.0, "L0_non_embed_as_frac_of_orig_params": 6815744.0, "param_norm": 29723.248046875, "aux_invertable": 0, "elapsed_tokens": 31992053760, "grad_scale": 8192.0, "lr": 8.987306709990642e-06, "pfrac": 1, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 1272223.3881706428, "num_alive_neurons/c_fc/layer_0": 1024, "num_alive_neurons/c_fc/layer_1": 1024, "num_alive_neurons/c_fc/layer_2": 1024, "num_alive_neurons/c_fc/layer_3": 1024, "num_alive_neurons/c_fc/layer_4": 1024, "num_alive_neurons/c_fc/layer_5": 1024, "num_alive_neurons/c_fc/layer_6": 1024, "num_alive_neurons/c_fc/layer_7": 1024, "empirical_L0_frac/transformer.wte.weight": 1.0, "empirical_L0_frac/transformer.wpe.weight": 0.2490234375, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_proj.bias": 1.0, "empirical_L0_frac/lm_head.weight": 1.0, "weight_grad_norm/bigram_table": 677.2108154296875, "weight_grad_norm/transformer.wte.weight": 700.6430053710938, "weight_grad_norm/transformer.wpe.weight": 499.02203369140625, "weight_grad_norm/transformer.h.0.ln_1.weight": 15.863765716552734, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 432.4327087402344, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 26.923789978027344, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 254.40492248535156, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 15.932149887084961, "weight_grad_norm/transformer.h.0.ln_2.weight": 14.218550682067871, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 475.14581298828125, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 30.644081115722656, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 497.4815368652344, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 15.898612022399902, "weight_grad_norm/transformer.h.1.ln_1.weight": 15.819245338439941, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 436.310791015625, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 26.56356430053711, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 254.06332397460938, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 15.85494327545166, "weight_grad_norm/transformer.h.1.ln_2.weight": 15.413650512695312, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 456.003173828125, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 30.474130630493164, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 461.7818298339844, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 15.872654914855957, "weight_grad_norm/transformer.h.2.ln_1.weight": 15.639593124389648, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 430.6888732910156, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 26.14330291748047, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 248.5823211669922, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 15.882184028625488, "weight_grad_norm/transformer.h.2.ln_2.weight": 15.332818984985352, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 485.013916015625, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 29.539077758789062, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 459.705810546875, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 15.862210273742676, "weight_grad_norm/transformer.h.3.ln_1.weight": 15.793739318847656, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 428.9212341308594, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 26.096940994262695, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 242.24900817871094, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 15.846512794494629, "weight_grad_norm/transformer.h.3.ln_2.weight": 15.285762786865234, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 424.4765930175781, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 28.6768741607666, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 472.64752197265625, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 15.835545539855957, "weight_grad_norm/transformer.h.4.ln_1.weight": 15.731610298156738, "weight_grad_norm/transformer.h.4.attn.c_attn.weight": 427.6908264160156, "weight_grad_norm/transformer.h.4.attn.c_attn.bias": 26.05739974975586, "weight_grad_norm/transformer.h.4.attn.c_proj.weight": 242.27081298828125, "weight_grad_norm/transformer.h.4.attn.c_proj.bias": 15.786231994628906, "weight_grad_norm/transformer.h.4.ln_2.weight": 14.579571723937988, "weight_grad_norm/transformer.h.4.mlp.c_fc.weight": 434.8970031738281, "weight_grad_norm/transformer.h.4.mlp.c_fc.bias": 29.07623863220215, "weight_grad_norm/transformer.h.4.mlp.c_proj.weight": 448.091552734375, "weight_grad_norm/transformer.h.4.mlp.c_proj.bias": 15.830724716186523, "weight_grad_norm/transformer.h.5.ln_1.weight": 15.571245193481445, "weight_grad_norm/transformer.h.5.attn.c_attn.weight": 428.8929138183594, "weight_grad_norm/transformer.h.5.attn.c_attn.bias": 26.06372833251953, "weight_grad_norm/transformer.h.5.attn.c_proj.weight": 245.26419067382812, "weight_grad_norm/transformer.h.5.attn.c_proj.bias": 15.792339324951172, "weight_grad_norm/transformer.h.5.ln_2.weight": 15.04889965057373, "weight_grad_norm/transformer.h.5.mlp.c_fc.weight": 471.4477233886719, "weight_grad_norm/transformer.h.5.mlp.c_fc.bias": 30.27324104309082, "weight_grad_norm/transformer.h.5.mlp.c_proj.weight": 479.19244384765625, "weight_grad_norm/transformer.h.5.mlp.c_proj.bias": 15.827888488769531, "weight_grad_norm/transformer.h.6.ln_1.weight": 15.51347541809082, "weight_grad_norm/transformer.h.6.attn.c_attn.weight": 422.67767333984375, "weight_grad_norm/transformer.h.6.attn.c_attn.bias": 25.973400115966797, "weight_grad_norm/transformer.h.6.attn.c_proj.weight": 240.5559844970703, "weight_grad_norm/transformer.h.6.attn.c_proj.bias": 15.814825057983398, "weight_grad_norm/transformer.h.6.ln_2.weight": 14.705399513244629, "weight_grad_norm/transformer.h.6.mlp.c_fc.weight": 488.697998046875, "weight_grad_norm/transformer.h.6.mlp.c_fc.bias": 28.742870330810547, "weight_grad_norm/transformer.h.6.mlp.c_proj.weight": 477.77960205078125, "weight_grad_norm/transformer.h.6.mlp.c_proj.bias": 15.787850379943848, "weight_grad_norm/transformer.h.7.ln_1.weight": 15.467348098754883, "weight_grad_norm/transformer.h.7.attn.c_attn.weight": 424.7254943847656, "weight_grad_norm/transformer.h.7.attn.c_attn.bias": 25.851058959960938, "weight_grad_norm/transformer.h.7.attn.c_proj.weight": 247.4598388671875, "weight_grad_norm/transformer.h.7.attn.c_proj.bias": 15.773311614990234, "weight_grad_norm/transformer.h.7.ln_2.weight": 14.885380744934082, "weight_grad_norm/transformer.h.7.mlp.c_fc.weight": 496.1335144042969, "weight_grad_norm/transformer.h.7.mlp.c_fc.bias": 29.96689796447754, "weight_grad_norm/transformer.h.7.mlp.c_proj.weight": 487.01776123046875, "weight_grad_norm/transformer.h.7.mlp.c_proj.bias": 15.70630168914795, "weight_grad_norm/transformer.ln_f.weight": 15.663735389709473, "weight_grad_norm/lm_head.weight": 713.0973510742188}
|
train_curves/csp_sweep1_1x_7.4Mnonzero_afrac0.500/progress.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"xent": 1.331468939781189, "test_kl": 0.0, "test_xent": 1.158504605293274, "grad_norm": 0, "weight_sparsity": 0.016656219959259033, "L0_as_frac_of_orig_params": 11622400.0, "L0": 11622400.0, "L0_non_LN": 11599616.0, "L0_non_embed": 6815744.0, "L0_non_embed_as_frac_of_orig_params": 6815744.0, "param_norm": 26212.8203125, "aux_invertable": 0, "elapsed_tokens": 31992053760, "grad_scale": 524288.0, "lr": 8.987306709990642e-06, "pfrac": 1, "step": 61020, "did_clip_grad_norm": 0, "tokens_per_second": 1179802.2463828605, "num_alive_neurons/c_fc/layer_0": 1024, "num_alive_neurons/c_fc/layer_1": 1024, "num_alive_neurons/c_fc/layer_2": 1024, "num_alive_neurons/c_fc/layer_3": 1024, "num_alive_neurons/c_fc/layer_4": 1024, "num_alive_neurons/c_fc/layer_5": 1024, "num_alive_neurons/c_fc/layer_6": 1024, "num_alive_neurons/c_fc/layer_7": 1024, "empirical_L0_frac/transformer.wte.weight": 1.0, "empirical_L0_frac/transformer.wpe.weight": 0.2490234375, "empirical_L0_frac/transformer.h.0.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.0.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.0.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.0.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.0.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.1.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.1.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.2.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.2.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.3.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.3.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.4.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.4.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.5.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.5.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.6.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.6.mlp.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_attn.weight": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_attn.bias": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.7.attn.c_proj.bias": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_fc.weight": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_fc.bias": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_proj.weight": 1.0, "empirical_L0_frac/transformer.h.7.mlp.c_proj.bias": 1.0, "empirical_L0_frac/lm_head.weight": 1.0, "weight_grad_norm/bigram_table": 622.0841674804688, "weight_grad_norm/transformer.wte.weight": 672.7935180664062, "weight_grad_norm/transformer.wpe.weight": 483.6532897949219, "weight_grad_norm/transformer.h.0.ln_1.weight": 15.856054306030273, "weight_grad_norm/transformer.h.0.attn.c_attn.weight": 427.2981872558594, "weight_grad_norm/transformer.h.0.attn.c_attn.bias": 27.145584106445312, "weight_grad_norm/transformer.h.0.attn.c_proj.weight": 252.24661254882812, "weight_grad_norm/transformer.h.0.attn.c_proj.bias": 15.871504783630371, "weight_grad_norm/transformer.h.0.ln_2.weight": 15.430551528930664, "weight_grad_norm/transformer.h.0.mlp.c_fc.weight": 482.3642272949219, "weight_grad_norm/transformer.h.0.mlp.c_fc.bias": 30.85920524597168, "weight_grad_norm/transformer.h.0.mlp.c_proj.weight": 493.5689697265625, "weight_grad_norm/transformer.h.0.mlp.c_proj.bias": 15.874482154846191, "weight_grad_norm/transformer.h.1.ln_1.weight": 15.78908920288086, "weight_grad_norm/transformer.h.1.attn.c_attn.weight": 431.7076416015625, "weight_grad_norm/transformer.h.1.attn.c_attn.bias": 26.719778060913086, "weight_grad_norm/transformer.h.1.attn.c_proj.weight": 249.38270568847656, "weight_grad_norm/transformer.h.1.attn.c_proj.bias": 15.832723617553711, "weight_grad_norm/transformer.h.1.ln_2.weight": 15.534907341003418, "weight_grad_norm/transformer.h.1.mlp.c_fc.weight": 477.07177734375, "weight_grad_norm/transformer.h.1.mlp.c_fc.bias": 29.797218322753906, "weight_grad_norm/transformer.h.1.mlp.c_proj.weight": 469.5992736816406, "weight_grad_norm/transformer.h.1.mlp.c_proj.bias": 15.81921672821045, "weight_grad_norm/transformer.h.2.ln_1.weight": 15.646594047546387, "weight_grad_norm/transformer.h.2.attn.c_attn.weight": 427.6214599609375, "weight_grad_norm/transformer.h.2.attn.c_attn.bias": 26.686790466308594, "weight_grad_norm/transformer.h.2.attn.c_proj.weight": 243.83714294433594, "weight_grad_norm/transformer.h.2.attn.c_proj.bias": 15.788347244262695, "weight_grad_norm/transformer.h.2.ln_2.weight": 15.253597259521484, "weight_grad_norm/transformer.h.2.mlp.c_fc.weight": 464.1746826171875, "weight_grad_norm/transformer.h.2.mlp.c_fc.bias": 27.500457763671875, "weight_grad_norm/transformer.h.2.mlp.c_proj.weight": 447.8418884277344, "weight_grad_norm/transformer.h.2.mlp.c_proj.bias": 15.78138256072998, "weight_grad_norm/transformer.h.3.ln_1.weight": 15.599883079528809, "weight_grad_norm/transformer.h.3.attn.c_attn.weight": 373.19549560546875, "weight_grad_norm/transformer.h.3.attn.c_attn.bias": 26.479198455810547, "weight_grad_norm/transformer.h.3.attn.c_proj.weight": 244.7146453857422, "weight_grad_norm/transformer.h.3.attn.c_proj.bias": 15.865797996520996, "weight_grad_norm/transformer.h.3.ln_2.weight": 14.730164527893066, "weight_grad_norm/transformer.h.3.mlp.c_fc.weight": 461.1935119628906, "weight_grad_norm/transformer.h.3.mlp.c_fc.bias": 25.492385864257812, "weight_grad_norm/transformer.h.3.mlp.c_proj.weight": 434.82928466796875, "weight_grad_norm/transformer.h.3.mlp.c_proj.bias": 15.766986846923828, "weight_grad_norm/transformer.h.4.ln_1.weight": 15.671966552734375, "weight_grad_norm/transformer.h.4.attn.c_attn.weight": 429.1090087890625, "weight_grad_norm/transformer.h.4.attn.c_attn.bias": 26.557336807250977, "weight_grad_norm/transformer.h.4.attn.c_proj.weight": 244.8657989501953, "weight_grad_norm/transformer.h.4.attn.c_proj.bias": 15.722468376159668, "weight_grad_norm/transformer.h.4.ln_2.weight": 14.905549049377441, "weight_grad_norm/transformer.h.4.mlp.c_fc.weight": 460.77325439453125, "weight_grad_norm/transformer.h.4.mlp.c_fc.bias": 24.70446014404297, "weight_grad_norm/transformer.h.4.mlp.c_proj.weight": 429.946044921875, "weight_grad_norm/transformer.h.4.mlp.c_proj.bias": 15.735456466674805, "weight_grad_norm/transformer.h.5.ln_1.weight": 15.529065132141113, "weight_grad_norm/transformer.h.5.attn.c_attn.weight": 420.5953063964844, "weight_grad_norm/transformer.h.5.attn.c_attn.bias": 26.522525787353516, "weight_grad_norm/transformer.h.5.attn.c_proj.weight": 244.9410400390625, "weight_grad_norm/transformer.h.5.attn.c_proj.bias": 15.70954704284668, "weight_grad_norm/transformer.h.5.ln_2.weight": 15.594764709472656, "weight_grad_norm/transformer.h.5.mlp.c_fc.weight": 484.391845703125, "weight_grad_norm/transformer.h.5.mlp.c_fc.bias": 30.405588150024414, "weight_grad_norm/transformer.h.5.mlp.c_proj.weight": 474.2224426269531, "weight_grad_norm/transformer.h.5.mlp.c_proj.bias": 15.719286918640137, "weight_grad_norm/transformer.h.6.ln_1.weight": 15.618080139160156, "weight_grad_norm/transformer.h.6.attn.c_attn.weight": 423.1441650390625, "weight_grad_norm/transformer.h.6.attn.c_attn.bias": 26.49150276184082, "weight_grad_norm/transformer.h.6.attn.c_proj.weight": 238.97264099121094, "weight_grad_norm/transformer.h.6.attn.c_proj.bias": 15.63322639465332, "weight_grad_norm/transformer.h.6.ln_2.weight": 14.635210037231445, "weight_grad_norm/transformer.h.6.mlp.c_fc.weight": 468.0048828125, "weight_grad_norm/transformer.h.6.mlp.c_fc.bias": 25.421348571777344, "weight_grad_norm/transformer.h.6.mlp.c_proj.weight": 435.7790222167969, "weight_grad_norm/transformer.h.6.mlp.c_proj.bias": 15.6398286819458, "weight_grad_norm/transformer.h.7.ln_1.weight": 15.429731369018555, "weight_grad_norm/transformer.h.7.attn.c_attn.weight": 417.89556884765625, "weight_grad_norm/transformer.h.7.attn.c_attn.bias": 26.508264541625977, "weight_grad_norm/transformer.h.7.attn.c_proj.weight": 239.08877563476562, "weight_grad_norm/transformer.h.7.attn.c_proj.bias": 15.60376262664795, "weight_grad_norm/transformer.h.7.ln_2.weight": 15.297236442565918, "weight_grad_norm/transformer.h.7.mlp.c_fc.weight": 480.86859130859375, "weight_grad_norm/transformer.h.7.mlp.c_fc.bias": 28.626235961914062, "weight_grad_norm/transformer.h.7.mlp.c_proj.weight": 475.55181884765625, "weight_grad_norm/transformer.h.7.mlp.c_proj.bias": 15.604207038879395, "weight_grad_norm/transformer.ln_f.weight": 15.549541473388672, "weight_grad_norm/lm_head.weight": 711.9124755859375}
|