gerou161 commited on
Commit
1b37db9
1 Parent(s): 8f20568

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. attention_2_only_emb/final_model/config.json +19 -0
  2. attention_2_only_emb/model_1000/config.json +19 -0
  3. attention_2_only_emb/model_1000/training_state.json +8 -0
  4. attention_2_only_emb/model_10000/config.json +19 -0
  5. attention_2_only_emb/model_10000/training_state.json +8 -0
  6. attention_2_only_emb/model_11000/config.json +19 -0
  7. attention_2_only_emb/model_11000/training_state.json +8 -0
  8. attention_2_only_emb/model_12000/config.json +19 -0
  9. attention_2_only_emb/model_12000/training_state.json +8 -0
  10. attention_2_only_emb/model_13000/config.json +19 -0
  11. attention_2_only_emb/model_13000/training_state.json +8 -0
  12. attention_2_only_emb/model_14000/config.json +19 -0
  13. attention_2_only_emb/model_14000/training_state.json +8 -0
  14. attention_2_only_emb/model_15000/config.json +19 -0
  15. attention_2_only_emb/model_15000/training_state.json +8 -0
  16. attention_2_only_emb/model_2000/config.json +19 -0
  17. attention_2_only_emb/model_2000/training_state.json +8 -0
  18. attention_2_only_emb/model_3000/config.json +19 -0
  19. attention_2_only_emb/model_3000/training_state.json +8 -0
  20. attention_2_only_emb/model_4000/config.json +19 -0
  21. attention_2_only_emb/model_4000/training_state.json +8 -0
  22. attention_2_only_emb/model_5000/config.json +19 -0
  23. attention_2_only_emb/model_5000/training_state.json +8 -0
  24. attention_2_only_emb/model_6000/config.json +19 -0
  25. attention_2_only_emb/model_6000/training_state.json +8 -0
  26. attention_2_only_emb/model_7000/config.json +19 -0
  27. attention_2_only_emb/model_7000/training_state.json +8 -0
  28. attention_2_only_emb/model_8000/config.json +19 -0
  29. attention_2_only_emb/model_8000/training_state.json +8 -0
  30. attention_2_only_emb/model_9000/config.json +19 -0
  31. attention_2_only_emb/model_9000/training_state.json +8 -0
  32. attention_2_only_emb/training_config.yaml +50 -0
  33. baseline/model_15000/pytorch_model.bin +3 -0
  34. bigram_2_full/model_1000/model_config.json +19 -0
  35. bigram_2_full/model_10000/model_config.json +19 -0
  36. bigram_2_full/model_10000/training_state.json +8 -0
  37. bigram_2_full/model_12000/model_config.json +19 -0
  38. bigram_2_full/model_12000/training_state.json +8 -0
  39. bigram_2_full/model_14000/model_config.json +19 -0
  40. bigram_2_full/model_15000/model_config.json +19 -0
  41. bigram_2_full/model_15000/training_state.json +8 -0
  42. bigram_2_full/model_6000/model_config.json +19 -0
  43. bigram_2_full/model_6000/training_state.json +8 -0
  44. bigram_2_full/model_7000/model_config.json +19 -0
  45. bigram_2_full/model_7000/training_state.json +8 -0
  46. bigram_2_full/model_8000/model_config.json +19 -0
  47. bigram_2_full/model_8000/training_state.json +8 -0
  48. first_layer_1/final_model/config.json +19 -0
  49. first_layer_1/model_1000/config.json +19 -0
  50. first_layer_1/model_1000/training_state.json +8 -0
attention_2_only_emb/final_model/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_1000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_1000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 1000,
3
+ "update_step": 1000,
4
+ "tokens_seen": 639025152,
5
+ "tokens_seen_before": 638386176,
6
+ "update_time": 2.876735210418701,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_10000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_10000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 10000,
3
+ "update_step": 10000,
4
+ "tokens_seen": 6389809152,
5
+ "tokens_seen_before": 6389170176,
6
+ "update_time": 2.879143476486206,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_11000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_11000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 11000,
3
+ "update_step": 11000,
4
+ "tokens_seen": 7028785152,
5
+ "tokens_seen_before": 7028146176,
6
+ "update_time": 2.8783798217773438,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_12000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_12000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 12000,
3
+ "update_step": 12000,
4
+ "tokens_seen": 7667761152,
5
+ "tokens_seen_before": 7667122176,
6
+ "update_time": 2.8790109157562256,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_13000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_13000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 13000,
3
+ "update_step": 13000,
4
+ "tokens_seen": 8306737152,
5
+ "tokens_seen_before": 8306098176,
6
+ "update_time": 2.877460241317749,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_14000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_14000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 14000,
3
+ "update_step": 14000,
4
+ "tokens_seen": 8945713152,
5
+ "tokens_seen_before": 8945074176,
6
+ "update_time": 2.8788270950317383,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_15000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_15000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 15000,
3
+ "update_step": 15000,
4
+ "tokens_seen": 9584689152,
5
+ "tokens_seen_before": 9584050176,
6
+ "update_time": 2.877842426300049,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_2000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_2000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 2000,
3
+ "update_step": 2000,
4
+ "tokens_seen": 1278001152,
5
+ "tokens_seen_before": 1277362176,
6
+ "update_time": 2.8781590461730957,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_3000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_3000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 3000,
3
+ "update_step": 3000,
4
+ "tokens_seen": 1916977152,
5
+ "tokens_seen_before": 1916338176,
6
+ "update_time": 2.876859426498413,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_4000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_4000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 4000,
3
+ "update_step": 4000,
4
+ "tokens_seen": 2555953152,
5
+ "tokens_seen_before": 2555314176,
6
+ "update_time": 2.8781702518463135,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_5000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_5000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 5000,
3
+ "update_step": 5000,
4
+ "tokens_seen": 3194929152,
5
+ "tokens_seen_before": 3194290176,
6
+ "update_time": 2.8779265880584717,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_6000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_6000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 6000,
3
+ "update_step": 6000,
4
+ "tokens_seen": 3833905152,
5
+ "tokens_seen_before": 3833266176,
6
+ "update_time": 2.880322217941284,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_7000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_7000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 7000,
3
+ "update_step": 7000,
4
+ "tokens_seen": 4472881152,
5
+ "tokens_seen_before": 4472242176,
6
+ "update_time": 2.8802781105041504,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_8000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_8000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 8000,
3
+ "update_step": 8000,
4
+ "tokens_seen": 5111857152,
5
+ "tokens_seen_before": 5111218176,
6
+ "update_time": 2.8798861503601074,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/model_9000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
attention_2_only_emb/model_9000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 9000,
3
+ "update_step": 9000,
4
+ "tokens_seen": 5750833152,
5
+ "tokens_seen_before": 5750194176,
6
+ "update_time": 2.881277322769165,
7
+ "wandb_id": "sgqffduo"
8
+ }
attention_2_only_emb/training_config.yaml ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ adam_beta1: 0.9
2
+ adam_beta2: 0.95
3
+ adjust_step: 0
4
+ autoresume: false
5
+ batch_size: 6
6
+ clip_grad_norm: 1.0
7
+ comment: null
8
+ cycle_length: null
9
+ dtype: bfloat16
10
+ emb_freeze: null
11
+ eval_dataset_path: /work01/yanokazuki/fineweb/valid_data_gpt2/
12
+ eval_every: 1000
13
+ first_attention: false
14
+ first_attention_2: true
15
+ first_attention_resume: false
16
+ first_layer: false
17
+ first_layer_2: false
18
+ gradient_accumulation: 13
19
+ keep_checkpoints: null
20
+ layer_freeze: null
21
+ layer_freeze_2: false
22
+ load_optimizer_state_on_resume: true
23
+ lr: 0.0004
24
+ max_length: 1024
25
+ max_train_tokens: null
26
+ min_lr_ratio: 0.1
27
+ model_config: model_config/478m.json
28
+ model_name_or_path: null
29
+ model_revision: null
30
+ num_training_steps: 15000
31
+ optimizer: Adam
32
+ restart_warmup_steps: null
33
+ resume_from: null
34
+ run_name: attention_2_only_emb
35
+ save_dir: checkpoints/attention_2_only_emb
36
+ save_every: 1000
37
+ scheduler: cosine
38
+ seed: 0
39
+ shuffle: true
40
+ skip_batches: !!set {}
41
+ tags:
42
+ - 396m-for-680m
43
+ total_batch_size: 624
44
+ train_dataset_path: /work01/yanokazuki/fineweb/train_data_gpt2/
45
+ training_config: training_config/two_stage/attention_2_only_emb.yaml
46
+ wandb_watch: true
47
+ warmed_up_model: null
48
+ warmup_steps: 1500
49
+ weight_decay: 0.0
50
+ workers: 8
baseline/model_15000/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a68abbc43af60bd4c7f2d95c72f10740acbbb95df43e3fd9ba6ae48d8c02ccc
3
+ size 2533545094
bigram_2_full/model_1000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_10000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_10000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 10000,
3
+ "update_step": 10000,
4
+ "tokens_seen": 6389809152,
5
+ "tokens_seen_before": 6389170176,
6
+ "update_time": 3.281773805618286,
7
+ "wandb_id": "7nopmkvs"
8
+ }
bigram_2_full/model_12000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_12000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 12000,
3
+ "update_step": 12000,
4
+ "tokens_seen": 7667761152,
5
+ "tokens_seen_before": 7667122176,
6
+ "update_time": 3.285931348800659,
7
+ "wandb_id": "7nopmkvs"
8
+ }
bigram_2_full/model_14000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_15000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_15000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 15000,
3
+ "update_step": 15000,
4
+ "tokens_seen": 9584689152,
5
+ "tokens_seen_before": 9584050176,
6
+ "update_time": 3.2829833030700684,
7
+ "wandb_id": "7nopmkvs"
8
+ }
bigram_2_full/model_6000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_6000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 6000,
3
+ "update_step": 6000,
4
+ "tokens_seen": 3833905152,
5
+ "tokens_seen_before": 3833266176,
6
+ "update_time": 3.283099412918091,
7
+ "wandb_id": "7nopmkvs"
8
+ }
bigram_2_full/model_7000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_7000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 7000,
3
+ "update_step": 7000,
4
+ "tokens_seen": 4472881152,
5
+ "tokens_seen_before": 4472242176,
6
+ "update_time": 3.2802579402923584,
7
+ "wandb_id": "7nopmkvs"
8
+ }
bigram_2_full/model_8000/model_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
bigram_2_full/model_8000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 8000,
3
+ "update_step": 8000,
4
+ "tokens_seen": 5111857152,
5
+ "tokens_seen_before": 5111218176,
6
+ "update_time": 3.285356044769287,
7
+ "wandb_id": "7nopmkvs"
8
+ }
first_layer_1/final_model/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
first_layer_1/model_1000/config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LLaMAForCausalLM"
4
+ ],
5
+ "bos_token_id": 50256,
6
+ "eos_token_id": 50256,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "intermediate_size": 5376,
10
+ "initializer_range": 0.02,
11
+ "max_sequence_length": 1024,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 24,
14
+ "num_hidden_layers": 14,
15
+ "rms_norm_eps": 1e-05,
16
+ "transformers_version": "4.28.1",
17
+ "use_cache": true,
18
+ "vocab_size": 50257
19
+ }
first_layer_1/model_1000/training_state.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "global_step": 1000,
3
+ "update_step": 1000,
4
+ "tokens_seen": 639025152,
5
+ "tokens_seen_before": 638386176,
6
+ "update_time": 0.9770157337188721,
7
+ "wandb_id": "krzb2185"
8
+ }