andrew-healey commited on
Commit
084266f
·
verified ·
1 Parent(s): 3e1c603

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads2_seed1340/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_2_1340", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_2_1340", "n_embd": 128}
attention_kindselective_n_heads2_seed1340/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.8210
3
  0 val perplexity 136085.8281
4
- 0 train 11.833096 (lr=7.5000e-07) (hash(x)=164406924)
5
- 100 val loss 8.9632
6
- 100 val perplexity 7809.9502
7
- 100 train 9.364941 (lr=7.5750e-05) (hash(x)=177407419)
8
- 200 val loss 7.6800
9
- 200 val perplexity 2164.7278
10
- 200 train 7.591533 (lr=1.5000e-04) (hash(x)=144903932)
11
- 300 val loss 7.6212
12
- 300 val perplexity 2041.0771
13
- 300 train 7.965369 (lr=1.4997e-04) (hash(x)=173839165)
14
- 400 val loss 7.6203
15
- 400 val perplexity 2039.1880
16
- 400 train 7.712651 (lr=1.4986e-04) (hash(x)=167734596)
17
- 500 val loss 7.6629
18
- 500 val perplexity 2127.8159
19
- 500 train 7.718594 (lr=1.4969e-04) (hash(x)=153224076)
20
- 600 val loss 7.6136
21
- 600 val perplexity 2025.6000
22
- 600 train 7.535222 (lr=1.4945e-04) (hash(x)=149619098)
23
- 700 val loss 7.5827
24
- 700 val perplexity 1963.9933
25
- 700 train 7.542725 (lr=1.4913e-04) (hash(x)=146539909)
26
- 800 val loss 7.5429
27
- 800 val perplexity 1887.2891
28
- 800 train 7.505376 (lr=1.4876e-04) (hash(x)=153710890)
29
- 900 val loss 7.5786
30
- 900 val perplexity 1955.8300
31
- 900 train 7.526095 (lr=1.4831e-04) (hash(x)=155873620)
32
- 1000 val loss 7.4960
33
- 1000 val perplexity 1800.8063
34
- 1000 train 7.421547 (lr=1.4779e-04) (hash(x)=145450636)
35
- 1100 val loss 7.4722
36
- 1100 val perplexity 1758.4231
37
- 1100 train 7.541608 (lr=1.4721e-04) (hash(x)=154123388)
38
- 1200 val loss 7.4480
39
- 1200 val perplexity 1716.4382
40
- 1200 train 7.308753 (lr=1.4656e-04) (hash(x)=145249251)
41
- 1300 val loss 7.4397
42
- 1300 val perplexity 1702.2803
43
- 1300 train 7.368898 (lr=1.4585e-04) (hash(x)=148937127)
44
- 1400 val loss 7.4124
45
- 1400 val perplexity 1656.4629
46
- 1400 train 7.518748 (lr=1.4507e-04) (hash(x)=150475545)
47
- 1500 val loss 7.3749
48
- 1500 val perplexity 1595.4633
49
- 1500 train 7.291466 (lr=1.4422e-04) (hash(x)=154653428)
50
- 1600 val loss 7.3791
51
- 1600 val perplexity 1602.1072
52
- 1600 train 7.349000 (lr=1.4332e-04) (hash(x)=144483776)
53
- 1700 val loss 7.3710
54
- 1700 val perplexity 1589.2416
55
- 1700 train 7.533147 (lr=1.4235e-04) (hash(x)=157395496)
56
- 1800 val loss 7.3049
57
- 1800 val perplexity 1487.5573
58
- 1800 train 7.328281 (lr=1.4131e-04) (hash(x)=157916369)
59
- 1900 val loss 7.2586
60
- 1900 val perplexity 1420.2678
61
- 1900 train 7.444204 (lr=1.4022e-04) (hash(x)=166073923)
62
- 2000 val loss 7.2832
63
- 2000 val perplexity 1455.6466
64
- 2000 train 7.365619 (lr=1.3907e-04) (hash(x)=154856891)
65
- 2100 val loss 7.2291
66
- 2100 val perplexity 1378.9766
67
- 2100 train 7.213472 (lr=1.3786e-04) (hash(x)=151925203)
68
- 2200 val loss 7.1903
69
- 2200 val perplexity 1326.5510
70
- 2200 train 6.975736 (lr=1.3660e-04) (hash(x)=136191502)
71
- 2300 val loss 7.1517
72
- 2300 val perplexity 1276.3109
73
- 2300 train 7.243627 (lr=1.3527e-04) (hash(x)=153273362)
74
- 2400 val loss 7.1472
75
- 2400 val perplexity 1270.4921
76
- 2400 train 7.079123 (lr=1.3390e-04) (hash(x)=148021541)
77
- 2500 val loss 7.1173
78
- 2500 val perplexity 1233.0848
79
- 2500 train 7.058460 (lr=1.3247e-04) (hash(x)=141356608)
80
- 2600 val loss 7.0719
81
- 2600 val perplexity 1178.3792
82
- 2600 train 7.030075 (lr=1.3099e-04) (hash(x)=146005217)
83
- 2700 val loss 7.0647
84
- 2700 val perplexity 1169.9490
85
- 2700 train 6.929686 (lr=1.2946e-04) (hash(x)=144511718)
86
- 2800 val loss 7.0291
87
- 2800 val perplexity 1128.9994
88
- 2800 train 6.924961 (lr=1.2788e-04) (hash(x)=146019502)
89
- 2900 val loss 7.0272
90
- 2900 val perplexity 1126.9019
91
- 2900 train 6.943810 (lr=1.2626e-04) (hash(x)=146496200)
92
- 3000 val loss 6.9735
93
- 3000 val perplexity 1067.9238
94
- 3000 train 6.912158 (lr=1.2459e-04) (hash(x)=150127281)
95
- 3100 val loss 6.9307
96
- 3100 val perplexity 1023.2532
97
- 3100 train 6.903213 (lr=1.2287e-04) (hash(x)=142022255)
98
- 3200 val loss 6.8967
99
- 3200 val perplexity 988.9994
100
- 3200 train 6.954908 (lr=1.2112e-04) (hash(x)=154120875)
101
- 3300 val loss 6.8727
102
- 3300 val perplexity 965.5895
103
- 3300 train 6.896562 (lr=1.1932e-04) (hash(x)=153999717)
104
- 3400 val loss 6.8499
105
- 3400 val perplexity 943.7463
106
- 3400 train 6.647655 (lr=1.1749e-04) (hash(x)=139694097)
107
- 3500 val loss 6.8276
108
- 3500 val perplexity 922.9308
109
- 3500 train 7.016836 (lr=1.1562e-04) (hash(x)=162992732)
110
- 3600 val loss 6.7817
111
- 3600 val perplexity 881.5263
112
- 3600 train 6.761508 (lr=1.1372e-04) (hash(x)=147574101)
113
- 3700 val loss 6.7609
114
- 3700 val perplexity 863.3860
115
- 3700 train 6.856951 (lr=1.1179e-04) (hash(x)=157763099)
116
- 3800 val loss 6.7505
117
- 3800 val perplexity 854.4914
118
- 3800 train 6.888644 (lr=1.0982e-04) (hash(x)=170800034)
119
- 3900 val loss 6.7345
120
- 3900 val perplexity 840.8956
121
- 3900 train 6.812279 (lr=1.0783e-04) (hash(x)=164984528)
122
- 4000 val loss 6.7104
123
- 4000 val perplexity 820.8669
124
- 4000 train 6.596583 (lr=1.0581e-04) (hash(x)=141743323)
125
- 4100 val loss 6.7152
126
- 4100 val perplexity 824.8502
127
- 4100 train 6.724424 (lr=1.0377e-04) (hash(x)=153392872)
128
- 4200 val loss 6.6934
129
- 4200 val perplexity 807.0927
130
- 4200 train 6.549108 (lr=1.0171e-04) (hash(x)=149074933)
131
- 4300 val loss 6.6564
132
- 4300 val perplexity 777.7588
133
- 4300 train 7.006297 (lr=9.9622e-05) (hash(x)=167823423)
134
- 4400 val loss 6.6221
135
- 4400 val perplexity 751.5302
136
- 4400 train 6.387350 (lr=9.7520e-05) (hash(x)=141203114)
137
- 4500 val loss 6.5965
138
- 4500 val perplexity 732.5110
139
- 4500 train 6.602117 (lr=9.5403e-05) (hash(x)=146284780)
140
- 4600 val loss 6.5730
141
- 4600 val perplexity 715.4841
142
- 4600 train 6.407486 (lr=9.3273e-05) (hash(x)=141126464)
143
- 4700 val loss 6.5855
144
- 4700 val perplexity 724.4916
145
- 4700 train 6.621264 (lr=9.1132e-05) (hash(x)=154751926)
146
- 4800 val loss 6.5644
147
- 4800 val perplexity 709.3859
148
- 4800 train 6.642868 (lr=8.8982e-05) (hash(x)=154793198)
149
- 4900 val loss 6.5719
150
- 4900 val perplexity 714.7520
151
- 4900 train 6.330755 (lr=8.6825e-05) (hash(x)=139406392)
152
- 5000 val loss 6.5733
153
- 5000 val perplexity 715.7223
154
- 5000 train 6.305210 (lr=8.4663e-05) (hash(x)=153548741)
155
- 5100 val loss 6.5487
156
- 5100 val perplexity 698.3330
157
- 5100 train 6.591603 (lr=8.2500e-05) (hash(x)=160488568)
158
- 5200 val loss 6.5172
159
- 5200 val perplexity 676.7101
160
- 5200 train 6.530630 (lr=8.0337e-05) (hash(x)=149645053)
161
- 5300 val loss 6.4955
162
- 5300 val perplexity 662.1673
163
- 5300 train 6.574218 (lr=7.8175e-05) (hash(x)=155820556)
164
- 5400 val loss 6.4987
165
- 5400 val perplexity 664.2599
166
- 5400 train 6.438256 (lr=7.6018e-05) (hash(x)=147538134)
167
- 5500 val loss 6.4771
168
- 5500 val perplexity 650.0532
169
- 5500 train 6.676089 (lr=7.3868e-05) (hash(x)=166889307)
170
- 5600 val loss 6.4716
171
- 5600 val perplexity 646.5319
172
- 5600 train 6.210559 (lr=7.1727e-05) (hash(x)=139516699)
173
- 5700 val loss 6.4662
174
- 5700 val perplexity 643.0082
175
- 5700 train 6.176149 (lr=6.9597e-05) (hash(x)=140453511)
176
- 5800 val loss 6.4738
177
- 5800 val perplexity 647.9169
178
- 5800 train 6.412430 (lr=6.7480e-05) (hash(x)=162964847)
179
- 5900 val loss 6.4570
180
- 5900 val perplexity 637.1229
181
- 5900 train 6.458410 (lr=6.5378e-05) (hash(x)=150606634)
182
- 6000 val loss 6.4347
183
- 6000 val perplexity 623.0748
184
- 6000 train 6.529973 (lr=6.3294e-05) (hash(x)=149890857)
185
- 6100 val loss 6.4225
186
- 6100 val perplexity 615.5125
187
- 6100 train 6.529283 (lr=6.1230e-05) (hash(x)=173884145)
188
- 6200 val loss 6.4154
189
- 6200 val perplexity 611.2152
190
- 6200 train 6.487962 (lr=5.9188e-05) (hash(x)=151987098)
191
- 6300 val loss 6.4154
192
- 6300 val perplexity 611.1697
193
- 6300 train 6.371587 (lr=5.7169e-05) (hash(x)=148853562)
194
- 6400 val loss 6.4049
195
- 6400 val perplexity 604.7910
196
- 6400 train 6.175096 (lr=5.5177e-05) (hash(x)=141530101)
197
- 6500 val loss 6.4083
198
- 6500 val perplexity 606.8895
199
- 6500 train 6.273750 (lr=5.3213e-05) (hash(x)=142297809)
200
- 6600 val loss 6.3913
201
- 6600 val perplexity 596.6281
202
- 6600 train 6.285662 (lr=5.1279e-05) (hash(x)=142447782)
203
- 6700 val loss 6.3852
204
- 6700 val perplexity 592.9855
205
- 6700 train 6.334586 (lr=4.9377e-05) (hash(x)=147004686)
206
- 6800 val loss 6.3813
207
- 6800 val perplexity 590.7168
208
- 6800 train 6.122798 (lr=4.7509e-05) (hash(x)=133438702)
209
- 6900 val loss 6.3817
210
- 6900 val perplexity 590.9413
211
- 6900 train 6.400922 (lr=4.5676e-05) (hash(x)=157085143)
212
- 7000 val loss 6.3686
213
- 7000 val perplexity 583.2387
214
- 7000 train 6.254630 (lr=4.3882e-05) (hash(x)=139437666)
215
- 7100 val loss 6.3700
216
- 7100 val perplexity 584.0773
217
- 7100 train 6.429934 (lr=4.2128e-05) (hash(x)=159792986)
218
- 7200 val loss 6.3505
219
- 7200 val perplexity 572.7610
220
- 7200 train 6.315082 (lr=4.0414e-05) (hash(x)=144930687)
221
- 7300 val loss 6.3462
222
- 7300 val perplexity 570.3040
223
- 7300 train 6.420286 (lr=3.8745e-05) (hash(x)=156242690)
224
- 7400 val loss 6.3418
225
- 7400 val perplexity 567.8328
226
- 7400 train 6.196812 (lr=3.7120e-05) (hash(x)=148183719)
227
- 7500 val loss 6.3468
228
- 7500 val perplexity 570.6543
229
- 7500 train 6.363414 (lr=3.5541e-05) (hash(x)=152494758)
230
- 7600 val loss 6.3398
231
- 7600 val perplexity 566.7075
232
- 7600 train 6.055812 (lr=3.4011e-05) (hash(x)=142485027)
233
- 7700 val loss 6.3250
234
- 7700 val perplexity 558.3545
235
- 7700 train 6.183957 (lr=3.2531e-05) (hash(x)=147512165)
236
- 7800 val loss 6.3210
237
- 7800 val perplexity 556.1392
238
- 7800 train 6.250357 (lr=3.1102e-05) (hash(x)=160346994)
239
- 7900 val loss 6.3207
240
- 7900 val perplexity 555.9703
241
- 7900 train 6.175825 (lr=2.9726e-05) (hash(x)=144488254)
242
- 8000 val loss 6.3116
243
- 8000 val perplexity 550.9532
244
- 8000 train 6.130961 (lr=2.8405e-05) (hash(x)=147637019)
245
- 8100 val loss 6.3146
246
- 8100 val perplexity 552.5546
247
- 8100 train 6.177629 (lr=2.7138e-05) (hash(x)=147340534)
248
- 8200 val loss 6.3078
249
- 8200 val perplexity 548.8185
250
- 8200 train 6.342015 (lr=2.5929e-05) (hash(x)=151630665)
251
- 8300 val loss 6.3006
252
- 8300 val perplexity 544.8951
253
- 8300 train 6.376939 (lr=2.4778e-05) (hash(x)=149747064)
254
- 8400 val loss 6.2979
255
- 8400 val perplexity 543.4130
256
- 8400 train 6.496303 (lr=2.3686e-05) (hash(x)=154245770)
257
- 8500 val loss 6.2967
258
- 8500 val perplexity 542.7586
259
- 8500 train 6.180349 (lr=2.2655e-05) (hash(x)=152559100)
260
- 8600 val loss 6.2924
261
- 8600 val perplexity 540.4501
262
- 8600 train 6.865870 (lr=2.1685e-05) (hash(x)=181365926)
263
- 8700 val loss 6.2913
264
- 8700 val perplexity 539.8574
265
- 8700 train 6.130272 (lr=2.0777e-05) (hash(x)=154405991)
266
- 8800 val loss 6.2885
267
- 8800 val perplexity 538.3657
268
- 8800 train 6.236886 (lr=1.9933e-05) (hash(x)=153755904)
269
- 8900 val loss 6.2817
270
- 8900 val perplexity 534.7005
271
- 8900 train 6.219995 (lr=1.9153e-05) (hash(x)=152120568)
272
- 9000 val loss 6.2762
273
- 9000 val perplexity 531.7777
274
- 9000 train 6.081510 (lr=1.8439e-05) (hash(x)=142797279)
275
- 9100 val loss 6.2735
276
- 9100 val perplexity 530.3247
277
- 9100 train 6.146645 (lr=1.7790e-05) (hash(x)=143037503)
278
- 9200 val loss 6.2750
279
- 9200 val perplexity 531.1204
280
- 9200 train 6.164870 (lr=1.7208e-05) (hash(x)=113690273)
281
- 9300 val loss 6.2728
282
- 9300 val perplexity 529.9344
283
- 9300 train 6.205205 (lr=1.6692e-05) (hash(x)=158025077)
284
- 9400 val loss 6.2699
285
- 9400 val perplexity 528.4240
286
- 9400 train 6.318002 (lr=1.6245e-05) (hash(x)=158251718)
287
- 9500 val loss 6.2629
288
- 9500 val perplexity 524.7199
289
- 9500 train 6.295635 (lr=1.5865e-05) (hash(x)=154752610)
290
- 9600 val loss 6.2630
291
- 9600 val perplexity 524.7664
292
- 9600 train 6.138709 (lr=1.5554e-05) (hash(x)=146889093)
293
- 9700 val loss 6.2586
294
- 9700 val perplexity 522.4791
295
- 9700 train 6.264400 (lr=1.5312e-05) (hash(x)=156906516)
296
- 9800 val loss 6.2603
297
- 9800 val perplexity 523.3580
298
- 9800 train 6.122754 (lr=1.5139e-05) (hash(x)=153841927)
299
- 9900 val loss 6.2590
300
- 9900 val perplexity 522.6864
301
- 9900 train 6.450033 (lr=1.5035e-05) (hash(x)=163514334)
302
- 9999 val loss 6.2538
303
- 9999 val perplexity 519.9728
 
1
  max_steps: 10000
2
  0 val loss 11.8210
3
  0 val perplexity 136085.8281
4
+ 0 train 11.833096 (lr=2.5000e-07) (hash(x)=164406924)
5
+ 100 val loss 10.1314
6
+ 100 val perplexity 25120.3730
7
+ 100 train 10.435864 (lr=2.5250e-05) (hash(x)=177407419)
8
+ 200 val loss 8.5877
9
+ 200 val perplexity 5365.3306
10
+ 200 train 8.530438 (lr=5.0000e-05) (hash(x)=144903932)
11
+ 300 val loss 7.9051
12
+ 300 val perplexity 2711.0681
13
+ 300 train 8.196055 (lr=4.9988e-05) (hash(x)=173839165)
14
+ 400 val loss 7.6891
15
+ 400 val perplexity 2184.3062
16
+ 400 train 7.772710 (lr=4.9954e-05) (hash(x)=167734596)
17
+ 500 val loss 7.5944
18
+ 500 val perplexity 1987.0854
19
+ 500 train 7.608729 (lr=4.9896e-05) (hash(x)=153224076)
20
+ 600 val loss 7.5572
21
+ 600 val perplexity 1914.4473
22
+ 600 train 7.472777 (lr=4.9815e-05) (hash(x)=149619098)
23
+ 700 val loss 7.5444
24
+ 700 val perplexity 1890.2152
25
+ 700 train 7.501118 (lr=4.9712e-05) (hash(x)=146539909)
26
+ 800 val loss 7.5187
27
+ 800 val perplexity 1842.2115
28
+ 800 train 7.481338 (lr=4.9585e-05) (hash(x)=153710890)
29
+ 900 val loss 7.5052
30
+ 900 val perplexity 1817.4755
31
+ 900 train 7.457563 (lr=4.9436e-05) (hash(x)=155873620)
32
+ 1000 val loss 7.4734
33
+ 1000 val perplexity 1760.6692
34
+ 1000 train 7.401876 (lr=4.9264e-05) (hash(x)=145450636)
35
+ 1100 val loss 7.4466
36
+ 1100 val perplexity 1714.0623
37
+ 1100 train 7.503413 (lr=4.9070e-05) (hash(x)=154123388)
38
+ 1200 val loss 7.4085
39
+ 1200 val perplexity 1650.0238
40
+ 1200 train 7.273043 (lr=4.8854e-05) (hash(x)=145249251)
41
+ 1300 val loss 7.3725
42
+ 1300 val perplexity 1591.5873
43
+ 1300 train 7.281743 (lr=4.8616e-05) (hash(x)=148937127)
44
+ 1400 val loss 7.3240
45
+ 1400 val perplexity 1516.2676
46
+ 1400 train 7.429045 (lr=4.8356e-05) (hash(x)=150475545)
47
+ 1500 val loss 7.2841
48
+ 1500 val perplexity 1456.8834
49
+ 1500 train 7.223948 (lr=4.8074e-05) (hash(x)=154653428)
50
+ 1600 val loss 7.2210
51
+ 1600 val perplexity 1367.9034
52
+ 1600 train 7.197338 (lr=4.7772e-05) (hash(x)=144483776)
53
+ 1700 val loss 7.1577
54
+ 1700 val perplexity 1283.9692
55
+ 1700 train 7.327583 (lr=4.7448e-05) (hash(x)=157395496)
56
+ 1800 val loss 7.0988
57
+ 1800 val perplexity 1210.5161
58
+ 1800 train 7.118510 (lr=4.7105e-05) (hash(x)=157916369)
59
+ 1900 val loss 7.0482
60
+ 1900 val perplexity 1150.7883
61
+ 1900 train 7.250220 (lr=4.6741e-05) (hash(x)=166073923)
62
+ 2000 val loss 7.0099
63
+ 2000 val perplexity 1107.5649
64
+ 2000 train 7.078530 (lr=4.6357e-05) (hash(x)=154856891)
65
+ 2100 val loss 6.9708
66
+ 2100 val perplexity 1065.0917
67
+ 2100 train 6.957214 (lr=4.5954e-05) (hash(x)=151925203)
68
+ 2200 val loss 6.9150
69
+ 2200 val perplexity 1007.2465
70
+ 2200 train 6.623221 (lr=4.5532e-05) (hash(x)=136191502)
71
+ 2300 val loss 6.8516
72
+ 2300 val perplexity 945.3943
73
+ 2300 train 6.992955 (lr=4.5091e-05) (hash(x)=153273362)
74
+ 2400 val loss 6.8002
75
+ 2400 val perplexity 898.0457
76
+ 2400 train 6.748407 (lr=4.4633e-05) (hash(x)=148021541)
77
+ 2500 val loss 6.7577
78
+ 2500 val perplexity 860.6304
79
+ 2500 train 6.699186 (lr=4.4156e-05) (hash(x)=141356608)
80
+ 2600 val loss 6.7101
81
+ 2600 val perplexity 820.6888
82
+ 2600 train 6.662726 (lr=4.3663e-05) (hash(x)=146005217)
83
+ 2700 val loss 6.6873
84
+ 2700 val perplexity 802.1870
85
+ 2700 train 6.533102 (lr=4.3153e-05) (hash(x)=144511718)
86
+ 2800 val loss 6.6459
87
+ 2800 val perplexity 769.6336
88
+ 2800 train 6.522305 (lr=4.2627e-05) (hash(x)=146019502)
89
+ 2900 val loss 6.6170
90
+ 2900 val perplexity 747.7319
91
+ 2900 train 6.526833 (lr=4.2085e-05) (hash(x)=146496200)
92
+ 3000 val loss 6.5904
93
+ 3000 val perplexity 728.0774
94
+ 3000 train 6.532321 (lr=4.1529e-05) (hash(x)=150127281)
95
+ 3100 val loss 6.5528
96
+ 3100 val perplexity 701.1899
97
+ 3100 train 6.537275 (lr=4.0957e-05) (hash(x)=142022255)
98
+ 3200 val loss 6.5277
99
+ 3200 val perplexity 683.8523
100
+ 3200 train 6.587036 (lr=4.0373e-05) (hash(x)=154120875)
101
+ 3300 val loss 6.5102
102
+ 3300 val perplexity 671.9406
103
+ 3300 train 6.563637 (lr=3.9775e-05) (hash(x)=153999717)
104
+ 3400 val loss 6.4894
105
+ 3400 val perplexity 658.1267
106
+ 3400 train 6.270551 (lr=3.9164e-05) (hash(x)=139694097)
107
+ 3500 val loss 6.4582
108
+ 3500 val perplexity 637.9100
109
+ 3500 train 6.644126 (lr=3.8541e-05) (hash(x)=162992732)
110
+ 3600 val loss 6.4246
111
+ 3600 val perplexity 616.8521
112
+ 3600 train 6.398591 (lr=3.7907e-05) (hash(x)=147574101)
113
+ 3700 val loss 6.4115
114
+ 3700 val perplexity 608.8231
115
+ 3700 train 6.528100 (lr=3.7262e-05) (hash(x)=157763099)
116
+ 3800 val loss 6.3859
117
+ 3800 val perplexity 593.4160
118
+ 3800 train 6.540219 (lr=3.6608e-05) (hash(x)=170800034)
119
+ 3900 val loss 6.3712
120
+ 3900 val perplexity 584.7700
121
+ 3900 train 6.450828 (lr=3.5944e-05) (hash(x)=164984528)
122
+ 4000 val loss 6.3565
123
+ 4000 val perplexity 576.2042
124
+ 4000 train 6.230247 (lr=3.5271e-05) (hash(x)=141743323)
125
+ 4100 val loss 6.3548
126
+ 4100 val perplexity 575.2441
127
+ 4100 train 6.368493 (lr=3.4590e-05) (hash(x)=153392872)
128
+ 4200 val loss 6.3445
129
+ 4200 val perplexity 569.3611
130
+ 4200 train 6.203356 (lr=3.3902e-05) (hash(x)=149074933)
131
+ 4300 val loss 6.3186
132
+ 4300 val perplexity 554.7869
133
+ 4300 train 6.702516 (lr=3.3207e-05) (hash(x)=167823423)
134
+ 4400 val loss 6.2979
135
+ 4400 val perplexity 543.4063
136
+ 4400 train 6.061867 (lr=3.2507e-05) (hash(x)=141203114)
137
+ 4500 val loss 6.2891
138
+ 4500 val perplexity 538.6491
139
+ 4500 train 6.289015 (lr=3.1801e-05) (hash(x)=146284780)
140
+ 4600 val loss 6.2654
141
+ 4600 val perplexity 526.0757
142
+ 4600 train 6.073493 (lr=3.1091e-05) (hash(x)=141126464)
143
+ 4700 val loss 6.2576
144
+ 4700 val perplexity 521.9659
145
+ 4700 train 6.287447 (lr=3.0377e-05) (hash(x)=154751926)
146
+ 4800 val loss 6.2461
147
+ 4800 val perplexity 516.0110
148
+ 4800 train 6.331287 (lr=2.9661e-05) (hash(x)=154793198)
149
+ 4900 val loss 6.2420
150
+ 4900 val perplexity 513.8834
151
+ 4900 train 5.956399 (lr=2.8942e-05) (hash(x)=139406392)
152
+ 5000 val loss 6.2666
153
+ 5000 val perplexity 526.6661
154
+ 5000 train 5.975211 (lr=2.8221e-05) (hash(x)=153548741)
155
+ 5100 val loss 6.2267
156
+ 5100 val perplexity 506.0816
157
+ 5100 train 6.303495 (lr=2.7500e-05) (hash(x)=160488568)
158
+ 5200 val loss 6.2044
159
+ 5200 val perplexity 494.9333
160
+ 5200 train 6.213053 (lr=2.6779e-05) (hash(x)=149645053)
161
+ 5300 val loss 6.1867
162
+ 5300 val perplexity 486.2615
163
+ 5300 train 6.276705 (lr=2.6058e-05) (hash(x)=155820556)
164
+ 5400 val loss 6.1806
165
+ 5400 val perplexity 483.2999
166
+ 5400 train 6.128071 (lr=2.5339e-05) (hash(x)=147538134)
167
+ 5500 val loss 6.1716
168
+ 5500 val perplexity 478.9390
169
+ 5500 train 6.377961 (lr=2.4623e-05) (hash(x)=166889307)
170
+ 5600 val loss 6.1622
171
+ 5600 val perplexity 474.4739
172
+ 5600 train 5.880041 (lr=2.3909e-05) (hash(x)=139516699)
173
+ 5700 val loss 6.1641
174
+ 5700 val perplexity 475.3632
175
+ 5700 train 5.843537 (lr=2.3199e-05) (hash(x)=140453511)
176
+ 5800 val loss 6.1614
177
+ 5800 val perplexity 474.1037
178
+ 5800 train 6.102824 (lr=2.2493e-05) (hash(x)=162964847)
179
+ 5900 val loss 6.1491
180
+ 5900 val perplexity 468.2912
181
+ 5900 train 6.153123 (lr=2.1793e-05) (hash(x)=150606634)
182
+ 6000 val loss 6.1313
183
+ 6000 val perplexity 460.0215
184
+ 6000 train 6.210247 (lr=2.1098e-05) (hash(x)=149890857)
185
+ 6100 val loss 6.1292
186
+ 6100 val perplexity 459.0655
187
+ 6100 train 6.234848 (lr=2.0410e-05) (hash(x)=173884145)
188
+ 6200 val loss 6.1213
189
+ 6200 val perplexity 455.4413
190
+ 6200 train 6.167700 (lr=1.9729e-05) (hash(x)=151987098)
191
+ 6300 val loss 6.1115
192
+ 6300 val perplexity 451.0340
193
+ 6300 train 6.057473 (lr=1.9056e-05) (hash(x)=148853562)
194
+ 6400 val loss 6.1064
195
+ 6400 val perplexity 448.7010
196
+ 6400 train 5.853988 (lr=1.8392e-05) (hash(x)=141530101)
197
+ 6500 val loss 6.1060
198
+ 6500 val perplexity 448.5262
199
+ 6500 train 5.964704 (lr=1.7738e-05) (hash(x)=142297809)
200
+ 6600 val loss 6.0976
201
+ 6600 val perplexity 444.7705
202
+ 6600 train 5.990374 (lr=1.7093e-05) (hash(x)=142447782)
203
+ 6700 val loss 6.0943
204
+ 6700 val perplexity 443.3159
205
+ 6700 train 6.063104 (lr=1.6459e-05) (hash(x)=147004686)
206
+ 6800 val loss 6.0880
207
+ 6800 val perplexity 440.5413
208
+ 6800 train 5.831447 (lr=1.5836e-05) (hash(x)=133438702)
209
+ 6900 val loss 6.0855
210
+ 6900 val perplexity 439.4597
211
+ 6900 train 6.103887 (lr=1.5225e-05) (hash(x)=157085143)
212
+ 7000 val loss 6.0821
213
+ 7000 val perplexity 437.9416
214
+ 7000 train 5.957407 (lr=1.4627e-05) (hash(x)=139437666)
215
+ 7100 val loss 6.0744
216
+ 7100 val perplexity 434.5800
217
+ 7100 train 6.096306 (lr=1.4043e-05) (hash(x)=159792986)
218
+ 7200 val loss 6.0653
219
+ 7200 val perplexity 430.6621
220
+ 7200 train 6.041170 (lr=1.3471e-05) (hash(x)=144930687)
221
+ 7300 val loss 6.0612
222
+ 7300 val perplexity 428.8893
223
+ 7300 train 6.113462 (lr=1.2915e-05) (hash(x)=156242690)
224
+ 7400 val loss 6.0626
225
+ 7400 val perplexity 429.4924
226
+ 7400 train 5.896077 (lr=1.2373e-05) (hash(x)=148183719)
227
+ 7500 val loss 6.0577
228
+ 7500 val perplexity 427.3765
229
+ 7500 train 6.091302 (lr=1.1847e-05) (hash(x)=152494758)
230
+ 7600 val loss 6.0547
231
+ 7600 val perplexity 426.1260
232
+ 7600 train 5.754051 (lr=1.1337e-05) (hash(x)=142485027)
233
+ 7700 val loss 6.0560
234
+ 7700 val perplexity 426.6740
235
+ 7700 train 5.928751 (lr=1.0844e-05) (hash(x)=147512165)
236
+ 7800 val loss 6.0404
237
+ 7800 val perplexity 420.0570
238
+ 7800 train 5.927812 (lr=1.0367e-05) (hash(x)=160346994)
239
+ 7900 val loss 6.0410
240
+ 7900 val perplexity 420.3325
241
+ 7900 train 5.907988 (lr=9.9088e-06) (hash(x)=144488254)
242
+ 8000 val loss 6.0358
243
+ 8000 val perplexity 418.1136
244
+ 8000 train 5.854425 (lr=9.4682e-06) (hash(x)=147637019)
245
+ 8100 val loss 6.0361
246
+ 8100 val perplexity 418.2736
247
+ 8100 train 5.901888 (lr=9.0461e-06) (hash(x)=147340534)
248
+ 8200 val loss 6.0326
249
+ 8200 val perplexity 416.7949
250
+ 8200 train 6.087666 (lr=8.6430e-06) (hash(x)=151630665)
251
+ 8300 val loss 6.0267
252
+ 8300 val perplexity 414.3367
253
+ 8300 train 6.126692 (lr=8.2593e-06) (hash(x)=149747064)
254
+ 8400 val loss 6.0232
255
+ 8400 val perplexity 412.8936
256
+ 8400 train 6.212832 (lr=7.8953e-06) (hash(x)=154245770)
257
+ 8500 val loss 6.0230
258
+ 8500 val perplexity 412.8336
259
+ 8500 train 5.904833 (lr=7.5515e-06) (hash(x)=152559100)
260
+ 8600 val loss 6.0187
261
+ 8600 val perplexity 411.0398
262
+ 8600 train 6.535484 (lr=7.2282e-06) (hash(x)=181365926)
263
+ 8700 val loss 6.0188
264
+ 8700 val perplexity 411.0829
265
+ 8700 train 5.863358 (lr=6.9257e-06) (hash(x)=154405991)
266
+ 8800 val loss 6.0153
267
+ 8800 val perplexity 409.6678
268
+ 8800 train 5.970211 (lr=6.6444e-06) (hash(x)=153755904)
269
+ 8900 val loss 6.0107
270
+ 8900 val perplexity 407.7833
271
+ 8900 train 5.926088 (lr=6.3845e-06) (hash(x)=152120568)
272
+ 9000 val loss 6.0077
273
+ 9000 val perplexity 406.5505
274
+ 9000 train 5.799891 (lr=6.1462e-06) (hash(x)=142797279)
275
+ 9100 val loss 6.0051
276
+ 9100 val perplexity 405.5031
277
+ 9100 train 5.871176 (lr=5.9300e-06) (hash(x)=143037503)
278
+ 9200 val loss 6.0051
279
+ 9200 val perplexity 405.4734
280
+ 9200 train 5.892231 (lr=5.7359e-06) (hash(x)=113690273)
281
+ 9300 val loss 6.0039
282
+ 9300 val perplexity 405.0020
283
+ 9300 train 5.939885 (lr=5.5641e-06) (hash(x)=158025077)
284
+ 9400 val loss 6.0019
285
+ 9400 val perplexity 404.1820
286
+ 9400 train 6.068281 (lr=5.4149e-06) (hash(x)=158251718)
287
+ 9500 val loss 5.9988
288
+ 9500 val perplexity 402.9359
289
+ 9500 train 6.039095 (lr=5.2884e-06) (hash(x)=154752610)
290
+ 9600 val loss 5.9971
291
+ 9600 val perplexity 402.2651
292
+ 9600 train 5.874845 (lr=5.1847e-06) (hash(x)=146889093)
293
+ 9700 val loss 5.9940
294
+ 9700 val perplexity 401.0011
295
+ 9700 train 6.001687 (lr=5.1040e-06) (hash(x)=156906516)
296
+ 9800 val loss 5.9932
297
+ 9800 val perplexity 400.7096
298
+ 9800 train 5.831883 (lr=5.0462e-06) (hash(x)=153841927)
299
+ 9900 val loss 5.9949
300
+ 9900 val perplexity 401.3906
301
+ 9900 train 6.192818 (lr=5.0116e-06) (hash(x)=163514334)
302
+ 9999 val loss 5.9891
303
+ 9999 val perplexity 399.0517
attention_kindselective_n_heads2_seed1340/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7e5fcb57a54c44092399cfd81ff1ac237270f67858a3cbfcd984c918e5f518b3
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5aeb7b7022a7ef095259330e13212b1ff8ec10d9fe91f9cfe5321726439400e
3
  size 38587970
attention_kindselective_n_heads2_seed1340/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:85ad5d27f8738f65092f79c3eeee065788669448b947dec7932419f34cd6f1a0
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad1dcff0aef7e71a44d4bafe3d6fa2064b4970bceaa1db35e56cacedd4bce39b
3
  size 38587970
attention_kindselective_n_heads2_seed1340/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:74f8572300b137d51dd45501fa67ab3f549533819957f432f7f5d55c9f76c581
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df381c5d857ee4e2185137efa7971ff6ecca01a7071f0e3523bebefff488b94d
3
  size 38587970
attention_kindselective_n_heads2_seed1340/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:deb71d2e49ed3051060f7ebba35294fc1e13af32124e503566ccc123a84c22dc
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3260dd2ac1f7c16a62ed6d3c8b157a3f0a33efe14c610d63fec80ece19e6295c
3
  size 38587970
attention_kindselective_n_heads2_seed1340/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7f5bf95d69aa7f485da54020766d70183985312d3ac8860e1443297ebb1e31b8
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f494e8b5205e169278e98dc46247c57b4eecddb1d447131b03a3e8d70073d9e
3
  size 70895430
attention_kindselective_n_heads2_seed1340/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:63a4b101db6ae2a081903120ed89323d2034db639836f58e6be1ba93a3bcd119
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7f3fc56d8050fdf69a5ced1176d19cd3d7141742bed03ead99504850e90b4be
3
  size 70895430
attention_kindselective_n_heads2_seed1340/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:431eee67e4652c32b9c022648a5dc62e869954c5f106c4d37472fb51133c83c2
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c05973832a45530c4593bc265a28c7ae61596a93a88b25f70012a7d34e37954
3
  size 70895430
attention_kindselective_n_heads2_seed1340/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a9c8dbdc83a59eafb4e6d02f7622375e007acbd97945d3ec79e4cc349aaf7c51
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2084254e2cd8f9f5a63850ea73b259adc84ee7fc54797ca79e71efee5351e261
3
  size 70895430