|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 4516, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.930304099422615, |
|
"learning_rate": 4.424778761061947e-08, |
|
"loss": 3.0935, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 7.305727380493262, |
|
"learning_rate": 2.2123893805309737e-07, |
|
"loss": 3.1396, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.4270699174865396, |
|
"learning_rate": 4.4247787610619474e-07, |
|
"loss": 3.1388, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.671918513947623, |
|
"learning_rate": 6.637168141592922e-07, |
|
"loss": 3.1484, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.0245634924182, |
|
"learning_rate": 8.849557522123895e-07, |
|
"loss": 3.1184, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.173190300012323, |
|
"learning_rate": 1.106194690265487e-06, |
|
"loss": 3.103, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.460088162533796, |
|
"learning_rate": 1.3274336283185843e-06, |
|
"loss": 3.0867, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.767560044921046, |
|
"learning_rate": 1.5486725663716816e-06, |
|
"loss": 3.1042, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.379339741370094, |
|
"learning_rate": 1.769911504424779e-06, |
|
"loss": 3.0784, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.895121380602072, |
|
"learning_rate": 1.991150442477876e-06, |
|
"loss": 3.0141, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.677059038981083, |
|
"learning_rate": 2.212389380530974e-06, |
|
"loss": 3.0004, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.752319256653901, |
|
"learning_rate": 2.433628318584071e-06, |
|
"loss": 2.9576, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.316390868733562, |
|
"learning_rate": 2.6548672566371687e-06, |
|
"loss": 2.8668, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.8593085844165222, |
|
"learning_rate": 2.876106194690266e-06, |
|
"loss": 2.8042, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.8115618967029907, |
|
"learning_rate": 3.097345132743363e-06, |
|
"loss": 2.8078, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.829539709532706, |
|
"learning_rate": 3.3185840707964607e-06, |
|
"loss": 2.8016, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.351243660521748, |
|
"learning_rate": 3.539823008849558e-06, |
|
"loss": 2.7504, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.8506066275284163, |
|
"learning_rate": 3.7610619469026547e-06, |
|
"loss": 2.704, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.862874080524425, |
|
"learning_rate": 3.982300884955752e-06, |
|
"loss": 2.6994, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.656743731541692, |
|
"learning_rate": 4.20353982300885e-06, |
|
"loss": 2.7037, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5392900186032414, |
|
"learning_rate": 4.424778761061948e-06, |
|
"loss": 2.6752, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4811972785348595, |
|
"learning_rate": 4.646017699115045e-06, |
|
"loss": 2.6402, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3348119020905695, |
|
"learning_rate": 4.867256637168142e-06, |
|
"loss": 2.5909, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4456539801680695, |
|
"learning_rate": 5.088495575221239e-06, |
|
"loss": 2.5723, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3067847495988907, |
|
"learning_rate": 5.309734513274337e-06, |
|
"loss": 2.5968, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2110111799173893, |
|
"learning_rate": 5.530973451327434e-06, |
|
"loss": 2.5543, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2297290805278025, |
|
"learning_rate": 5.752212389380532e-06, |
|
"loss": 2.547, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.0591855523781752, |
|
"learning_rate": 5.973451327433629e-06, |
|
"loss": 2.5224, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.198993960580823, |
|
"learning_rate": 6.194690265486726e-06, |
|
"loss": 2.5319, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.0376647316019254, |
|
"learning_rate": 6.415929203539823e-06, |
|
"loss": 2.5201, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9705847367620112, |
|
"learning_rate": 6.6371681415929215e-06, |
|
"loss": 2.5057, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9252024617490701, |
|
"learning_rate": 6.858407079646018e-06, |
|
"loss": 2.4912, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.018964018809788, |
|
"learning_rate": 7.079646017699116e-06, |
|
"loss": 2.4463, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9703390541789, |
|
"learning_rate": 7.300884955752213e-06, |
|
"loss": 2.4624, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.9057071022587323, |
|
"learning_rate": 7.5221238938053095e-06, |
|
"loss": 2.4615, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.815475552596325, |
|
"learning_rate": 7.743362831858407e-06, |
|
"loss": 2.4454, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.7996538200625122, |
|
"learning_rate": 7.964601769911505e-06, |
|
"loss": 2.4415, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.8320424438332031, |
|
"learning_rate": 8.185840707964603e-06, |
|
"loss": 2.4079, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.8335775784452143, |
|
"learning_rate": 8.4070796460177e-06, |
|
"loss": 2.3985, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.7297280352692577, |
|
"learning_rate": 8.628318584070797e-06, |
|
"loss": 2.4109, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.7489884135512616, |
|
"learning_rate": 8.849557522123895e-06, |
|
"loss": 2.3918, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.7152194171185691, |
|
"learning_rate": 9.070796460176992e-06, |
|
"loss": 2.4075, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6879086834140353, |
|
"learning_rate": 9.29203539823009e-06, |
|
"loss": 2.3503, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6573968191388021, |
|
"learning_rate": 9.513274336283188e-06, |
|
"loss": 2.3712, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7478073225800381, |
|
"learning_rate": 9.734513274336284e-06, |
|
"loss": 2.3479, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7750936560448133, |
|
"learning_rate": 9.95575221238938e-06, |
|
"loss": 2.3588, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6814792096209249, |
|
"learning_rate": 1.0176991150442479e-05, |
|
"loss": 2.3755, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6960510920090566, |
|
"learning_rate": 1.0398230088495575e-05, |
|
"loss": 2.3441, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6554225465154023, |
|
"learning_rate": 1.0619469026548675e-05, |
|
"loss": 2.3577, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6572109459056528, |
|
"learning_rate": 1.0840707964601771e-05, |
|
"loss": 2.3275, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6669976244308056, |
|
"learning_rate": 1.1061946902654867e-05, |
|
"loss": 2.3732, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.7164325377725764, |
|
"learning_rate": 1.1283185840707967e-05, |
|
"loss": 2.328, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.6569844083414794, |
|
"learning_rate": 1.1504424778761064e-05, |
|
"loss": 2.3491, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.6445910979637719, |
|
"learning_rate": 1.172566371681416e-05, |
|
"loss": 2.3367, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.6470683438720587, |
|
"learning_rate": 1.1946902654867258e-05, |
|
"loss": 2.3329, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7324779037958515, |
|
"learning_rate": 1.2168141592920354e-05, |
|
"loss": 2.3242, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7587858248341571, |
|
"learning_rate": 1.2389380530973452e-05, |
|
"loss": 2.3032, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6242496586661685, |
|
"learning_rate": 1.261061946902655e-05, |
|
"loss": 2.3124, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7245562947017128, |
|
"learning_rate": 1.2831858407079647e-05, |
|
"loss": 2.3282, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6518683680367873, |
|
"learning_rate": 1.3053097345132743e-05, |
|
"loss": 2.3092, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6696195394800407, |
|
"learning_rate": 1.3274336283185843e-05, |
|
"loss": 2.3308, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.615647831879852, |
|
"learning_rate": 1.349557522123894e-05, |
|
"loss": 2.3077, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.70262400758076, |
|
"learning_rate": 1.3716814159292036e-05, |
|
"loss": 2.3023, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6919739502596038, |
|
"learning_rate": 1.3938053097345134e-05, |
|
"loss": 2.3016, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.655280353854059, |
|
"learning_rate": 1.4159292035398232e-05, |
|
"loss": 2.2784, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6260285463133508, |
|
"learning_rate": 1.4380530973451328e-05, |
|
"loss": 2.2807, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6717030543933317, |
|
"learning_rate": 1.4601769911504426e-05, |
|
"loss": 2.2697, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6064561633220698, |
|
"learning_rate": 1.4823008849557523e-05, |
|
"loss": 2.254, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6626479263371015, |
|
"learning_rate": 1.5044247787610619e-05, |
|
"loss": 2.2579, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6081497592276981, |
|
"learning_rate": 1.5265486725663717e-05, |
|
"loss": 2.2852, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7434751510364214, |
|
"learning_rate": 1.5486725663716813e-05, |
|
"loss": 2.2955, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6709582392697053, |
|
"learning_rate": 1.5707964601769913e-05, |
|
"loss": 2.3011, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6236060523180039, |
|
"learning_rate": 1.592920353982301e-05, |
|
"loss": 2.302, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5847305408449913, |
|
"learning_rate": 1.6150442477876106e-05, |
|
"loss": 2.2925, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6411123981033705, |
|
"learning_rate": 1.6371681415929206e-05, |
|
"loss": 2.3076, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5765845621685052, |
|
"learning_rate": 1.6592920353982302e-05, |
|
"loss": 2.2737, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6893803269109151, |
|
"learning_rate": 1.68141592920354e-05, |
|
"loss": 2.2756, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6460524233139842, |
|
"learning_rate": 1.7035398230088498e-05, |
|
"loss": 2.2554, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6298284604270863, |
|
"learning_rate": 1.7256637168141594e-05, |
|
"loss": 2.2913, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6228067438596485, |
|
"learning_rate": 1.747787610619469e-05, |
|
"loss": 2.2653, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6065906413992886, |
|
"learning_rate": 1.769911504424779e-05, |
|
"loss": 2.2759, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.609651416291683, |
|
"learning_rate": 1.7920353982300887e-05, |
|
"loss": 2.2639, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6911683656527201, |
|
"learning_rate": 1.8141592920353983e-05, |
|
"loss": 2.2795, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6406982275015309, |
|
"learning_rate": 1.8362831858407083e-05, |
|
"loss": 2.2792, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6515859065407227, |
|
"learning_rate": 1.858407079646018e-05, |
|
"loss": 2.2585, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6228091928618628, |
|
"learning_rate": 1.8805309734513276e-05, |
|
"loss": 2.2501, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6504024479164172, |
|
"learning_rate": 1.9026548672566376e-05, |
|
"loss": 2.2336, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6370910527115533, |
|
"learning_rate": 1.9247787610619472e-05, |
|
"loss": 2.2508, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.601000429948101, |
|
"learning_rate": 1.946902654867257e-05, |
|
"loss": 2.2465, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.635437140175951, |
|
"learning_rate": 1.9690265486725665e-05, |
|
"loss": 2.2411, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6557815758908471, |
|
"learning_rate": 1.991150442477876e-05, |
|
"loss": 2.2354, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6425952346764991, |
|
"learning_rate": 1.9999973109141592e-05, |
|
"loss": 2.2483, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6128064155933831, |
|
"learning_rate": 1.9999808776641724e-05, |
|
"loss": 2.2485, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6555273483560258, |
|
"learning_rate": 1.9999495053459817e-05, |
|
"loss": 2.2449, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6054675162222453, |
|
"learning_rate": 1.999903194428269e-05, |
|
"loss": 2.2406, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6459922410608205, |
|
"learning_rate": 1.99984194560289e-05, |
|
"loss": 2.2639, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6489337490074292, |
|
"learning_rate": 1.999765759784862e-05, |
|
"loss": 2.2333, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6673842459430778, |
|
"learning_rate": 1.9996746381123522e-05, |
|
"loss": 2.2226, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5874119981990013, |
|
"learning_rate": 1.9995685819466593e-05, |
|
"loss": 2.2414, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6419544612148393, |
|
"learning_rate": 1.999447592872194e-05, |
|
"loss": 2.2334, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6675238498823681, |
|
"learning_rate": 1.9993116726964554e-05, |
|
"loss": 2.2323, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6026085109762018, |
|
"learning_rate": 1.9991608234500023e-05, |
|
"loss": 2.2134, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5994013246983577, |
|
"learning_rate": 1.9989950473864254e-05, |
|
"loss": 2.2522, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5793447158839681, |
|
"learning_rate": 1.998814346982312e-05, |
|
"loss": 2.2358, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5925116103902308, |
|
"learning_rate": 1.998618724937209e-05, |
|
"loss": 2.2102, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5801411320188756, |
|
"learning_rate": 1.998408184173584e-05, |
|
"loss": 2.2334, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6820517692563461, |
|
"learning_rate": 1.9981827278367796e-05, |
|
"loss": 2.2361, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6153960561263684, |
|
"learning_rate": 1.9979423592949677e-05, |
|
"loss": 2.2187, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6460408306015739, |
|
"learning_rate": 1.997687082139099e-05, |
|
"loss": 2.1972, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6348335992926066, |
|
"learning_rate": 1.9974169001828495e-05, |
|
"loss": 2.2321, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6349181990741091, |
|
"learning_rate": 1.9971318174625633e-05, |
|
"loss": 2.1893, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6618352153109166, |
|
"learning_rate": 1.9968318382371912e-05, |
|
"loss": 2.1962, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6733017679374194, |
|
"learning_rate": 1.9965169669882293e-05, |
|
"loss": 2.1999, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5939464772905917, |
|
"learning_rate": 1.9961872084196514e-05, |
|
"loss": 2.1935, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6666717977597533, |
|
"learning_rate": 1.9958425674578364e-05, |
|
"loss": 2.2195, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.7006105420553784, |
|
"learning_rate": 1.9954830492514984e-05, |
|
"loss": 2.2411, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6678642596767388, |
|
"learning_rate": 1.995108659171607e-05, |
|
"loss": 2.222, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6137923309553905, |
|
"learning_rate": 1.9947194028113072e-05, |
|
"loss": 2.2005, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6487079453357394, |
|
"learning_rate": 1.9943152859858386e-05, |
|
"loss": 2.195, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6340277998890589, |
|
"learning_rate": 1.993896314732445e-05, |
|
"loss": 2.2221, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6198253490316062, |
|
"learning_rate": 1.9934624953102858e-05, |
|
"loss": 2.2019, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.5972267632381862, |
|
"learning_rate": 1.993013834200344e-05, |
|
"loss": 2.2061, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.578988572823383, |
|
"learning_rate": 1.9925503381053258e-05, |
|
"loss": 2.2058, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6089924200111341, |
|
"learning_rate": 1.9920720139495632e-05, |
|
"loss": 2.2161, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6457738354072138, |
|
"learning_rate": 1.9915788688789107e-05, |
|
"loss": 2.2136, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6944455933486234, |
|
"learning_rate": 1.9910709102606373e-05, |
|
"loss": 2.214, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6225933279029192, |
|
"learning_rate": 1.990548145683315e-05, |
|
"loss": 2.1901, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6099333431964626, |
|
"learning_rate": 1.9900105829567107e-05, |
|
"loss": 2.1967, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.5997046716007125, |
|
"learning_rate": 1.9894582301116633e-05, |
|
"loss": 2.2032, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5805409130278519, |
|
"learning_rate": 1.988891095399967e-05, |
|
"loss": 2.1968, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5603386982718491, |
|
"learning_rate": 1.9883091872942484e-05, |
|
"loss": 2.1964, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5750221553061181, |
|
"learning_rate": 1.9877125144878387e-05, |
|
"loss": 2.1925, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6197165299229819, |
|
"learning_rate": 1.9871010858946443e-05, |
|
"loss": 2.1971, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6009410075114336, |
|
"learning_rate": 1.9864749106490128e-05, |
|
"loss": 2.1718, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6516918727844194, |
|
"learning_rate": 1.985833998105598e-05, |
|
"loss": 2.1824, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.5914052739901434, |
|
"learning_rate": 1.9851783578392198e-05, |
|
"loss": 2.1656, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6038328583266748, |
|
"learning_rate": 1.984507999644719e-05, |
|
"loss": 2.2016, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.5996471846061533, |
|
"learning_rate": 1.9838229335368145e-05, |
|
"loss": 2.1878, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6795312890587618, |
|
"learning_rate": 1.9831231697499515e-05, |
|
"loss": 2.1904, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6474874816341436, |
|
"learning_rate": 1.9824087187381486e-05, |
|
"loss": 2.2294, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.615345278282287, |
|
"learning_rate": 1.9816795911748422e-05, |
|
"loss": 2.1912, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6294736010109583, |
|
"learning_rate": 1.9809357979527274e-05, |
|
"loss": 2.2011, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6277824790650756, |
|
"learning_rate": 1.980177350183594e-05, |
|
"loss": 2.1862, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6153557699875509, |
|
"learning_rate": 1.9794042591981615e-05, |
|
"loss": 2.1984, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5797646270941416, |
|
"learning_rate": 1.9786165365459102e-05, |
|
"loss": 2.1977, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6042494784966351, |
|
"learning_rate": 1.977814193994907e-05, |
|
"loss": 2.1847, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5955605828050561, |
|
"learning_rate": 1.976997243531632e-05, |
|
"loss": 2.1773, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5885029767068523, |
|
"learning_rate": 1.976165697360796e-05, |
|
"loss": 2.1906, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5889597596107146, |
|
"learning_rate": 1.975319567905163e-05, |
|
"loss": 2.178, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5955383595208404, |
|
"learning_rate": 1.9744588678053592e-05, |
|
"loss": 2.1829, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6110614739988084, |
|
"learning_rate": 1.9735836099196882e-05, |
|
"loss": 2.1858, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6048232894110059, |
|
"learning_rate": 1.972693807323938e-05, |
|
"loss": 2.174, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5776013394444761, |
|
"learning_rate": 1.971789473311184e-05, |
|
"loss": 2.1761, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.706600785345219, |
|
"learning_rate": 1.9708706213915917e-05, |
|
"loss": 2.1804, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6338816831125917, |
|
"learning_rate": 1.9699372652922154e-05, |
|
"loss": 2.2129, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6456657354311602, |
|
"learning_rate": 1.968989418956792e-05, |
|
"loss": 2.2087, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6251515528997028, |
|
"learning_rate": 1.9680270965455343e-05, |
|
"loss": 2.1826, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6275458922112793, |
|
"learning_rate": 1.967050312434916e-05, |
|
"loss": 2.1953, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5999775128349963, |
|
"learning_rate": 1.966059081217461e-05, |
|
"loss": 2.167, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5935006109382566, |
|
"learning_rate": 1.9650534177015233e-05, |
|
"loss": 2.1937, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6197323449524398, |
|
"learning_rate": 1.9640333369110662e-05, |
|
"loss": 2.1657, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.5963878212445941, |
|
"learning_rate": 1.9629988540854373e-05, |
|
"loss": 2.176, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6741348267377739, |
|
"learning_rate": 1.9619499846791426e-05, |
|
"loss": 2.2016, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6489601186240154, |
|
"learning_rate": 1.960886744361612e-05, |
|
"loss": 2.1665, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6242251289955277, |
|
"learning_rate": 1.9598091490169696e-05, |
|
"loss": 2.1665, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6041320945672697, |
|
"learning_rate": 1.958717214743793e-05, |
|
"loss": 2.1784, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6402051000857503, |
|
"learning_rate": 1.9576109578548757e-05, |
|
"loss": 2.1756, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6020762171036862, |
|
"learning_rate": 1.95649039487698e-05, |
|
"loss": 2.1588, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6148926236382385, |
|
"learning_rate": 1.9553555425505933e-05, |
|
"loss": 2.1367, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6251287885157795, |
|
"learning_rate": 1.9542064178296755e-05, |
|
"loss": 2.1786, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.5794398028862306, |
|
"learning_rate": 1.953043037881408e-05, |
|
"loss": 2.1709, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.636525494463088, |
|
"learning_rate": 1.9518654200859356e-05, |
|
"loss": 2.2028, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6848233739985045, |
|
"learning_rate": 1.9506735820361065e-05, |
|
"loss": 2.1683, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6686925583606164, |
|
"learning_rate": 1.9494675415372123e-05, |
|
"loss": 2.1371, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6138508127147465, |
|
"learning_rate": 1.9482473166067177e-05, |
|
"loss": 2.2098, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5894403336053977, |
|
"learning_rate": 1.9470129254739952e-05, |
|
"loss": 2.1817, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6052751724091622, |
|
"learning_rate": 1.945764386580051e-05, |
|
"loss": 2.1842, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6127198984647372, |
|
"learning_rate": 1.9445017185772493e-05, |
|
"loss": 2.1662, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6155598360277302, |
|
"learning_rate": 1.9432249403290337e-05, |
|
"loss": 2.1888, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6200181910647699, |
|
"learning_rate": 1.941934070909647e-05, |
|
"loss": 2.16, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6131834645099616, |
|
"learning_rate": 1.940629129603844e-05, |
|
"loss": 2.1702, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6717724002729109, |
|
"learning_rate": 1.9393101359066047e-05, |
|
"loss": 2.1705, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.603303091661555, |
|
"learning_rate": 1.9379771095228426e-05, |
|
"loss": 2.1467, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5870283987577958, |
|
"learning_rate": 1.9366300703671104e-05, |
|
"loss": 2.1664, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6093286702784105, |
|
"learning_rate": 1.935269038563303e-05, |
|
"loss": 2.1474, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6147638608441618, |
|
"learning_rate": 1.9338940344443564e-05, |
|
"loss": 2.1892, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5740315005578517, |
|
"learning_rate": 1.9325050785519438e-05, |
|
"loss": 2.1651, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5619548060635118, |
|
"learning_rate": 1.9311021916361675e-05, |
|
"loss": 2.1608, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6105612104485567, |
|
"learning_rate": 1.9296853946552532e-05, |
|
"loss": 2.1566, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6075640457988983, |
|
"learning_rate": 1.9282547087752314e-05, |
|
"loss": 2.1828, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.7799838729640503, |
|
"learning_rate": 1.9268101553696255e-05, |
|
"loss": 2.1465, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.5713020590972815, |
|
"learning_rate": 1.9253517560191292e-05, |
|
"loss": 2.1649, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6212120747400542, |
|
"learning_rate": 1.9238795325112867e-05, |
|
"loss": 2.1461, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6614452129201494, |
|
"learning_rate": 1.9223935068401668e-05, |
|
"loss": 2.1614, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5937507487574722, |
|
"learning_rate": 1.9208937012060316e-05, |
|
"loss": 2.1594, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5766434351128928, |
|
"learning_rate": 1.9193801380150093e-05, |
|
"loss": 2.1579, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.594791992310427, |
|
"learning_rate": 1.9178528398787553e-05, |
|
"loss": 2.1812, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5963780994217495, |
|
"learning_rate": 1.9163118296141172e-05, |
|
"loss": 2.1474, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6091086591295349, |
|
"learning_rate": 1.9147571302427927e-05, |
|
"loss": 2.1694, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5946104127668519, |
|
"learning_rate": 1.913188764990986e-05, |
|
"loss": 2.1363, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6107449553392038, |
|
"learning_rate": 1.9116067572890603e-05, |
|
"loss": 2.1521, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.6140936036865221, |
|
"learning_rate": 1.9100111307711888e-05, |
|
"loss": 2.1337, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.603185509143094, |
|
"learning_rate": 1.9084019092750007e-05, |
|
"loss": 2.1405, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.6641273578455278, |
|
"learning_rate": 1.906779116841225e-05, |
|
"loss": 2.1518, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.6664197867547751, |
|
"learning_rate": 1.9051427777133328e-05, |
|
"loss": 2.1342, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.6069208394060698, |
|
"learning_rate": 1.9034929163371726e-05, |
|
"loss": 2.1516, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5809790009337985, |
|
"learning_rate": 1.901829557360608e-05, |
|
"loss": 2.1397, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5789051710344713, |
|
"learning_rate": 1.9001527256331474e-05, |
|
"loss": 2.1431, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5938467343831092, |
|
"learning_rate": 1.8984624462055724e-05, |
|
"loss": 2.1409, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.6112803686579346, |
|
"learning_rate": 1.896758744329567e-05, |
|
"loss": 2.16, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6173735877940054, |
|
"learning_rate": 1.895041645457335e-05, |
|
"loss": 2.121, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5957632011857998, |
|
"learning_rate": 1.8933111752412255e-05, |
|
"loss": 2.1327, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.588369749731721, |
|
"learning_rate": 1.8915673595333443e-05, |
|
"loss": 2.1737, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6707689167355805, |
|
"learning_rate": 1.8898102243851722e-05, |
|
"loss": 2.1542, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5926525320214855, |
|
"learning_rate": 1.8880397960471724e-05, |
|
"loss": 2.1289, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.573700133128793, |
|
"learning_rate": 1.8862561009684e-05, |
|
"loss": 2.1255, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.582897997956482, |
|
"learning_rate": 1.8844591657961083e-05, |
|
"loss": 2.1359, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6324153310918673, |
|
"learning_rate": 1.8826490173753464e-05, |
|
"loss": 2.1501, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5752649440468934, |
|
"learning_rate": 1.880825682748563e-05, |
|
"loss": 2.1684, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5611976527302409, |
|
"learning_rate": 1.878989189155199e-05, |
|
"loss": 2.1497, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6586583264978896, |
|
"learning_rate": 1.877139564031282e-05, |
|
"loss": 2.1581, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5860606052051305, |
|
"learning_rate": 1.8752768350090162e-05, |
|
"loss": 2.1254, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6024814550474707, |
|
"learning_rate": 1.87340102991637e-05, |
|
"loss": 2.1171, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5755204242260478, |
|
"learning_rate": 1.871512176776659e-05, |
|
"loss": 2.1404, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5960207644423895, |
|
"learning_rate": 1.8696103038081297e-05, |
|
"loss": 2.1369, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6321212427114619, |
|
"learning_rate": 1.8676954394235346e-05, |
|
"loss": 2.1115, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5983358362353409, |
|
"learning_rate": 1.86576761222971e-05, |
|
"loss": 2.1392, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6001723148056742, |
|
"learning_rate": 1.8638268510271492e-05, |
|
"loss": 2.1291, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6161026670998452, |
|
"learning_rate": 1.8618731848095706e-05, |
|
"loss": 2.1363, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.630135413860988, |
|
"learning_rate": 1.859906642763485e-05, |
|
"loss": 2.1242, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.632447226596799, |
|
"learning_rate": 1.8579272542677597e-05, |
|
"loss": 2.1589, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6387692877614597, |
|
"learning_rate": 1.8559350488931805e-05, |
|
"loss": 2.1378, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6158587043682214, |
|
"learning_rate": 1.853930056402008e-05, |
|
"loss": 2.1465, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6094349922422543, |
|
"learning_rate": 1.851912306747535e-05, |
|
"loss": 2.1193, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5783593876786368, |
|
"learning_rate": 1.849881830073637e-05, |
|
"loss": 2.1348, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5815765161412122, |
|
"learning_rate": 1.847838656714324e-05, |
|
"loss": 2.1212, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6083937111863463, |
|
"learning_rate": 1.845782817193286e-05, |
|
"loss": 2.1203, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5751837869678675, |
|
"learning_rate": 1.843714342223437e-05, |
|
"loss": 2.1376, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5672160349049488, |
|
"learning_rate": 1.841633262706456e-05, |
|
"loss": 2.1429, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5916085781373154, |
|
"learning_rate": 1.8395396097323268e-05, |
|
"loss": 2.1389, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.581595681457954, |
|
"learning_rate": 1.8374334145788723e-05, |
|
"loss": 2.1281, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.6249517784152311, |
|
"learning_rate": 1.835314708711287e-05, |
|
"loss": 2.1653, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.6472309852695914, |
|
"learning_rate": 1.833183523781668e-05, |
|
"loss": 2.1426, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5906448061620913, |
|
"learning_rate": 1.8310398916285403e-05, |
|
"loss": 2.1253, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5689154785436862, |
|
"learning_rate": 1.8288838442763838e-05, |
|
"loss": 2.1093, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5782028604709565, |
|
"learning_rate": 1.826715413935153e-05, |
|
"loss": 2.1408, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.593822360557939, |
|
"learning_rate": 1.824534632999796e-05, |
|
"loss": 2.1198, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.6227369683027493, |
|
"learning_rate": 1.8223415340497707e-05, |
|
"loss": 2.1202, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5855558786143908, |
|
"learning_rate": 1.820136149848559e-05, |
|
"loss": 2.1334, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.6004300920951476, |
|
"learning_rate": 1.8179185133431748e-05, |
|
"loss": 2.1371, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5754848054817067, |
|
"learning_rate": 1.8156886576636758e-05, |
|
"loss": 2.1545, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5972399939673669, |
|
"learning_rate": 1.8134466161226644e-05, |
|
"loss": 2.1378, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6145239871873435, |
|
"learning_rate": 1.8111924222147927e-05, |
|
"loss": 2.1259, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6243469445134759, |
|
"learning_rate": 1.8089261096162617e-05, |
|
"loss": 2.119, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6086851976775106, |
|
"learning_rate": 1.8066477121843163e-05, |
|
"loss": 2.1242, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6141092603996082, |
|
"learning_rate": 1.8043572639567434e-05, |
|
"loss": 2.1414, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.6302076777353116, |
|
"learning_rate": 1.8020547991513583e-05, |
|
"loss": 2.1221, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.602271815510926, |
|
"learning_rate": 1.799740352165498e-05, |
|
"loss": 2.1367, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5712061821943022, |
|
"learning_rate": 1.7974139575755055e-05, |
|
"loss": 2.118, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.6472726791787887, |
|
"learning_rate": 1.7950756501362122e-05, |
|
"loss": 2.145, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.6497034135258102, |
|
"learning_rate": 1.792725464780421e-05, |
|
"loss": 2.1302, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5840066396486088, |
|
"learning_rate": 1.790363436618382e-05, |
|
"loss": 2.1354, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.565550504201554, |
|
"learning_rate": 1.7879896009372698e-05, |
|
"loss": 2.1278, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5698369791237824, |
|
"learning_rate": 1.7856039932006567e-05, |
|
"loss": 2.1567, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5719796112677206, |
|
"learning_rate": 1.7832066490479797e-05, |
|
"loss": 2.1356, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5609821935777941, |
|
"learning_rate": 1.780797604294012e-05, |
|
"loss": 2.1408, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.6099777159178194, |
|
"learning_rate": 1.7783768949283258e-05, |
|
"loss": 2.1207, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.6148720286231735, |
|
"learning_rate": 1.7759445571147548e-05, |
|
"loss": 2.1432, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7730136547316218, |
|
"learning_rate": 1.773500627190854e-05, |
|
"loss": 2.1322, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.6530875685207002, |
|
"learning_rate": 1.771045141667358e-05, |
|
"loss": 2.1331, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.6144642649540333, |
|
"learning_rate": 1.7685781372276338e-05, |
|
"loss": 2.14, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5986300351130668, |
|
"learning_rate": 1.7660996507271334e-05, |
|
"loss": 2.1528, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.610048635856303, |
|
"learning_rate": 1.7636097191928437e-05, |
|
"loss": 2.1019, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5855993560153752, |
|
"learning_rate": 1.7611083798227334e-05, |
|
"loss": 2.1438, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.6269819524464116, |
|
"learning_rate": 1.758595669985197e-05, |
|
"loss": 2.1309, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.6187624097313912, |
|
"learning_rate": 1.7560716272184947e-05, |
|
"loss": 2.1326, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5983921861208105, |
|
"learning_rate": 1.7535362892301953e-05, |
|
"loss": 2.1432, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.6485102797252494, |
|
"learning_rate": 1.7509896938966108e-05, |
|
"loss": 2.1289, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.6091970961723204, |
|
"learning_rate": 1.748431879262229e-05, |
|
"loss": 2.1357, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5913528187132204, |
|
"learning_rate": 1.7458628835391485e-05, |
|
"loss": 2.1453, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.6076326725765262, |
|
"learning_rate": 1.7432827451065052e-05, |
|
"loss": 2.1338, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5925726698617465, |
|
"learning_rate": 1.7406915025099005e-05, |
|
"loss": 2.1059, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.6026048806905011, |
|
"learning_rate": 1.7380891944608243e-05, |
|
"loss": 2.138, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.6168592591329729, |
|
"learning_rate": 1.7354758598360778e-05, |
|
"loss": 2.0998, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.589031703580834, |
|
"learning_rate": 1.732851537677191e-05, |
|
"loss": 2.1113, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5745099140298806, |
|
"learning_rate": 1.730216267189842e-05, |
|
"loss": 2.1218, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5870276946472704, |
|
"learning_rate": 1.7275700877432693e-05, |
|
"loss": 2.0829, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5810749220606013, |
|
"learning_rate": 1.7249130388696836e-05, |
|
"loss": 2.132, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5849581356958, |
|
"learning_rate": 1.7222451602636785e-05, |
|
"loss": 2.1023, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5814560865690377, |
|
"learning_rate": 1.7195664917816367e-05, |
|
"loss": 2.1147, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5748045246682798, |
|
"learning_rate": 1.7168770734411344e-05, |
|
"loss": 2.1167, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5932359233979072, |
|
"learning_rate": 1.7141769454203438e-05, |
|
"loss": 2.1273, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6278719805137509, |
|
"learning_rate": 1.711466148057433e-05, |
|
"loss": 2.0997, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6555092252818303, |
|
"learning_rate": 1.7087447218499637e-05, |
|
"loss": 2.1226, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6309157472170276, |
|
"learning_rate": 1.7060127074542847e-05, |
|
"loss": 2.0897, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5738342343183923, |
|
"learning_rate": 1.7032701456849253e-05, |
|
"loss": 2.1274, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5859960028346695, |
|
"learning_rate": 1.700517077513987e-05, |
|
"loss": 2.0945, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5949621775745878, |
|
"learning_rate": 1.697753544070529e-05, |
|
"loss": 2.1101, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6037114018800186, |
|
"learning_rate": 1.6949795866399554e-05, |
|
"loss": 2.1283, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6132756328750626, |
|
"learning_rate": 1.6921952466633985e-05, |
|
"loss": 2.1307, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5940249527936792, |
|
"learning_rate": 1.689400565737098e-05, |
|
"loss": 2.1425, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6089848662184931, |
|
"learning_rate": 1.6865955856117814e-05, |
|
"loss": 2.1311, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5865489982423884, |
|
"learning_rate": 1.6837803481920393e-05, |
|
"loss": 2.129, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5621917926479736, |
|
"learning_rate": 1.6809548955357e-05, |
|
"loss": 2.1347, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5975473251769039, |
|
"learning_rate": 1.6781192698532e-05, |
|
"loss": 2.0946, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6271561640084149, |
|
"learning_rate": 1.6752735135069556e-05, |
|
"loss": 2.1328, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6309143491893552, |
|
"learning_rate": 1.6724176690107272e-05, |
|
"loss": 2.1372, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6515330784087832, |
|
"learning_rate": 1.669551779028987e-05, |
|
"loss": 2.1318, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.607269365823935, |
|
"learning_rate": 1.6666758863762796e-05, |
|
"loss": 2.1132, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5940405800099899, |
|
"learning_rate": 1.6637900340165825e-05, |
|
"loss": 2.1021, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.617464320553719, |
|
"learning_rate": 1.6608942650626655e-05, |
|
"loss": 2.1286, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5957314278353825, |
|
"learning_rate": 1.6579886227754466e-05, |
|
"loss": 2.1193, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5664148029499578, |
|
"learning_rate": 1.655073150563343e-05, |
|
"loss": 2.0815, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.603630754622845, |
|
"learning_rate": 1.6521478919816263e-05, |
|
"loss": 2.1251, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6042525404005226, |
|
"learning_rate": 1.6492128907317696e-05, |
|
"loss": 2.0957, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5587880381266731, |
|
"learning_rate": 1.6462681906607955e-05, |
|
"loss": 2.0884, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5936488613861916, |
|
"learning_rate": 1.6433138357606198e-05, |
|
"loss": 2.1238, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6025640944009074, |
|
"learning_rate": 1.6403498701673966e-05, |
|
"loss": 2.1239, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.635131758548356, |
|
"learning_rate": 1.637376338160856e-05, |
|
"loss": 2.1077, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6680740660247405, |
|
"learning_rate": 1.6343932841636455e-05, |
|
"loss": 2.11, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5736305760712634, |
|
"learning_rate": 1.6314007527406643e-05, |
|
"loss": 2.1181, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5766722006194103, |
|
"learning_rate": 1.6283987885983984e-05, |
|
"loss": 2.1304, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6182752621318294, |
|
"learning_rate": 1.6253874365842518e-05, |
|
"loss": 2.123, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5664601381036513, |
|
"learning_rate": 1.6223667416858786e-05, |
|
"loss": 2.1093, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5793939414363568, |
|
"learning_rate": 1.619336749030509e-05, |
|
"loss": 2.1262, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5951644156672043, |
|
"learning_rate": 1.6162975038842748e-05, |
|
"loss": 2.0933, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5966397949892372, |
|
"learning_rate": 1.613249051651535e-05, |
|
"loss": 2.1161, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.568691581435577, |
|
"learning_rate": 1.6101914378741964e-05, |
|
"loss": 2.1084, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6110717119582951, |
|
"learning_rate": 1.6071247082310337e-05, |
|
"loss": 2.1126, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5647272922826961, |
|
"learning_rate": 1.6040489085370055e-05, |
|
"loss": 2.1271, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6618793669437936, |
|
"learning_rate": 1.6009640847425726e-05, |
|
"loss": 2.1099, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5714407404279128, |
|
"learning_rate": 1.5978702829330086e-05, |
|
"loss": 2.1007, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5921086518077258, |
|
"learning_rate": 1.594767549327714e-05, |
|
"loss": 2.1036, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.559814138870615, |
|
"learning_rate": 1.591655930279524e-05, |
|
"loss": 2.1085, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5851671121337831, |
|
"learning_rate": 1.588535472274017e-05, |
|
"loss": 2.1065, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.563721698981161, |
|
"learning_rate": 1.5854062219288188e-05, |
|
"loss": 2.0916, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6392995057151976, |
|
"learning_rate": 1.5822682259929086e-05, |
|
"loss": 2.148, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6144632340271624, |
|
"learning_rate": 1.5791215313459172e-05, |
|
"loss": 2.101, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5829916350501447, |
|
"learning_rate": 1.57596618499743e-05, |
|
"loss": 2.0927, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6064032972995057, |
|
"learning_rate": 1.572802234086283e-05, |
|
"loss": 2.0894, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5873172143593829, |
|
"learning_rate": 1.5696297258798573e-05, |
|
"loss": 2.1098, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6149790722746036, |
|
"learning_rate": 1.566448707773377e-05, |
|
"loss": 2.0899, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5776666410081498, |
|
"learning_rate": 1.5632592272891964e-05, |
|
"loss": 2.0955, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6442450337823722, |
|
"learning_rate": 1.560061332076094e-05, |
|
"loss": 2.1076, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6432230413252014, |
|
"learning_rate": 1.5568550699085574e-05, |
|
"loss": 2.0953, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5968306623247007, |
|
"learning_rate": 1.5536404886860718e-05, |
|
"loss": 2.0904, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5649272208929623, |
|
"learning_rate": 1.550417636432404e-05, |
|
"loss": 2.1049, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5789701403962935, |
|
"learning_rate": 1.547186561294884e-05, |
|
"loss": 2.0996, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5969272774469193, |
|
"learning_rate": 1.5439473115436872e-05, |
|
"loss": 2.1031, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6417501068011607, |
|
"learning_rate": 1.540699935571111e-05, |
|
"loss": 2.0973, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5807602673972329, |
|
"learning_rate": 1.5374444818908553e-05, |
|
"loss": 2.0665, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5689407638595382, |
|
"learning_rate": 1.5341809991372936e-05, |
|
"loss": 2.1004, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.6024314510332974, |
|
"learning_rate": 1.5309095360647505e-05, |
|
"loss": 2.1153, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5904216224717677, |
|
"learning_rate": 1.5276301415467703e-05, |
|
"loss": 2.1125, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5882514621078204, |
|
"learning_rate": 1.5243428645753877e-05, |
|
"loss": 2.0956, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.6186764200191274, |
|
"learning_rate": 1.5210477542603976e-05, |
|
"loss": 2.1055, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5899161495395727, |
|
"learning_rate": 1.5177448598286182e-05, |
|
"loss": 2.1271, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.634357316986597, |
|
"learning_rate": 1.5144342306231587e-05, |
|
"loss": 2.0854, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5980082850417965, |
|
"learning_rate": 1.5111159161026802e-05, |
|
"loss": 2.1024, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.6501130185522511, |
|
"learning_rate": 1.5077899658406581e-05, |
|
"loss": 2.1008, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.558365552497711, |
|
"learning_rate": 1.5044564295246395e-05, |
|
"loss": 2.0921, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5731388133995782, |
|
"learning_rate": 1.501115356955504e-05, |
|
"loss": 2.1135, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5797209582126353, |
|
"learning_rate": 1.4977667980467162e-05, |
|
"loss": 2.1084, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.6028997293307427, |
|
"learning_rate": 1.4944108028235831e-05, |
|
"loss": 2.0967, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5855736108748338, |
|
"learning_rate": 1.491047421422505e-05, |
|
"loss": 2.1295, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5629699794040355, |
|
"learning_rate": 1.4876767040902267e-05, |
|
"loss": 2.0865, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5757014775993767, |
|
"learning_rate": 1.4842987011830871e-05, |
|
"loss": 2.1355, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5861649169139695, |
|
"learning_rate": 1.4809134631662672e-05, |
|
"loss": 2.051, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.6425624935119288, |
|
"learning_rate": 1.4775210406130358e-05, |
|
"loss": 2.1311, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5943913748968095, |
|
"learning_rate": 1.4741214842039939e-05, |
|
"loss": 2.0785, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.612017489293917, |
|
"learning_rate": 1.4707148447263178e-05, |
|
"loss": 2.0921, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5764862655973384, |
|
"learning_rate": 1.4673011730730001e-05, |
|
"loss": 2.1209, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.6344038886859545, |
|
"learning_rate": 1.4638805202420896e-05, |
|
"loss": 2.1175, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.6115853328793905, |
|
"learning_rate": 1.4604529373359294e-05, |
|
"loss": 2.109, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5854059306933792, |
|
"learning_rate": 1.4570184755603936e-05, |
|
"loss": 2.0959, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.580547868040832, |
|
"learning_rate": 1.4535771862241218e-05, |
|
"loss": 2.0806, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.59733509081403, |
|
"learning_rate": 1.4501291207377537e-05, |
|
"loss": 2.1225, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5882514486206899, |
|
"learning_rate": 1.4466743306131594e-05, |
|
"loss": 2.1137, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.6064508350468485, |
|
"learning_rate": 1.4432128674626713e-05, |
|
"loss": 2.1015, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.6027302170211182, |
|
"learning_rate": 1.4397447829983122e-05, |
|
"loss": 2.1133, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.6092274515107855, |
|
"learning_rate": 1.4362701290310234e-05, |
|
"loss": 2.0894, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5995665084317409, |
|
"learning_rate": 1.43278895746989e-05, |
|
"loss": 2.09, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5878688400295196, |
|
"learning_rate": 1.4293013203213662e-05, |
|
"loss": 2.0881, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.582907908620598, |
|
"learning_rate": 1.4258072696884966e-05, |
|
"loss": 2.0893, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5868011100219134, |
|
"learning_rate": 1.422306857770141e-05, |
|
"loss": 2.0997, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.6045905907850748, |
|
"learning_rate": 1.4188001368601918e-05, |
|
"loss": 2.0907, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.552986694414777, |
|
"learning_rate": 1.415287159346793e-05, |
|
"loss": 2.1045, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.6103467497134711, |
|
"learning_rate": 1.4117679777115593e-05, |
|
"loss": 2.1089, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5728833442351586, |
|
"learning_rate": 1.4082426445287904e-05, |
|
"loss": 2.0741, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5471596570564455, |
|
"learning_rate": 1.4047112124646864e-05, |
|
"loss": 2.0926, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.574025403990718, |
|
"learning_rate": 1.4011737342765604e-05, |
|
"loss": 2.126, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.6103452690588049, |
|
"learning_rate": 1.3976302628120508e-05, |
|
"loss": 2.1295, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.6086240497676804, |
|
"learning_rate": 1.3940808510083321e-05, |
|
"loss": 2.1049, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5819915739338899, |
|
"learning_rate": 1.390525551891323e-05, |
|
"loss": 2.0891, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5797745176415381, |
|
"learning_rate": 1.3869644185748954e-05, |
|
"loss": 2.102, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5925630378069294, |
|
"learning_rate": 1.3833975042600799e-05, |
|
"loss": 2.1285, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5747738372435807, |
|
"learning_rate": 1.3798248622342719e-05, |
|
"loss": 2.0779, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.584382618164055, |
|
"learning_rate": 1.3762465458704347e-05, |
|
"loss": 2.0988, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5861084222891083, |
|
"learning_rate": 1.3726626086263029e-05, |
|
"loss": 2.0905, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5643851759202737, |
|
"learning_rate": 1.3690731040435832e-05, |
|
"loss": 2.0888, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.6048893995449998, |
|
"learning_rate": 1.3654780857471548e-05, |
|
"loss": 2.0838, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.567316386534612, |
|
"learning_rate": 1.3618776074442685e-05, |
|
"loss": 2.1225, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5859046448142026, |
|
"learning_rate": 1.3582717229237434e-05, |
|
"loss": 2.0892, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.6097636079486066, |
|
"learning_rate": 1.3546604860551648e-05, |
|
"loss": 2.1151, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.6032675526034351, |
|
"learning_rate": 1.3510439507880778e-05, |
|
"loss": 2.1161, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.6186215628178273, |
|
"learning_rate": 1.3474221711511827e-05, |
|
"loss": 2.0932, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5710572304732956, |
|
"learning_rate": 1.3437952012515275e-05, |
|
"loss": 2.1053, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.581802008231799, |
|
"learning_rate": 1.3401630952736988e-05, |
|
"loss": 2.0995, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5915503333410738, |
|
"learning_rate": 1.336525907479013e-05, |
|
"loss": 2.1066, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5970554285650588, |
|
"learning_rate": 1.3328836922047058e-05, |
|
"loss": 2.0982, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.58456967436958, |
|
"learning_rate": 1.32923650386312e-05, |
|
"loss": 2.0883, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5873440360250707, |
|
"learning_rate": 1.3255843969408932e-05, |
|
"loss": 2.1057, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5572695471760302, |
|
"learning_rate": 1.321927425998143e-05, |
|
"loss": 2.1128, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5822414777814289, |
|
"learning_rate": 1.318265645667652e-05, |
|
"loss": 2.1021, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.6043347373724505, |
|
"learning_rate": 1.314599110654053e-05, |
|
"loss": 2.0902, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5707239588619374, |
|
"learning_rate": 1.3109278757330098e-05, |
|
"loss": 2.1148, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.6127525778390996, |
|
"learning_rate": 1.3072519957504e-05, |
|
"loss": 2.0816, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5980261254164336, |
|
"learning_rate": 1.3035715256214956e-05, |
|
"loss": 2.0759, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5750845126693687, |
|
"learning_rate": 1.2998865203301424e-05, |
|
"loss": 2.0992, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.601944765424719, |
|
"learning_rate": 1.296197034927938e-05, |
|
"loss": 2.0959, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5773689320272927, |
|
"learning_rate": 1.2925031245334112e-05, |
|
"loss": 2.1006, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.6130426292738024, |
|
"learning_rate": 1.288804844331196e-05, |
|
"loss": 2.1009, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5795780658965399, |
|
"learning_rate": 1.2851022495712092e-05, |
|
"loss": 2.094, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.626231627915789, |
|
"learning_rate": 1.2813953955678243e-05, |
|
"loss": 2.089, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5814682075095083, |
|
"learning_rate": 1.2776843376990448e-05, |
|
"loss": 2.083, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5754124419107088, |
|
"learning_rate": 1.273969131405678e-05, |
|
"loss": 2.1041, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5458027056722354, |
|
"learning_rate": 1.270249832190505e-05, |
|
"loss": 2.0661, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.54114905299215, |
|
"learning_rate": 1.2665264956174532e-05, |
|
"loss": 2.0762, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.5693931222692631, |
|
"learning_rate": 1.2627991773107651e-05, |
|
"loss": 2.0917, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.5950576880381783, |
|
"learning_rate": 1.259067932954168e-05, |
|
"loss": 2.1076, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.6273107471049862, |
|
"learning_rate": 1.2553328182900414e-05, |
|
"loss": 2.1121, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.586482228993905, |
|
"learning_rate": 1.2515938891185856e-05, |
|
"loss": 2.084, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5809857509913409, |
|
"learning_rate": 1.2478512012969864e-05, |
|
"loss": 2.0746, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5893783979335608, |
|
"learning_rate": 1.2441048107385815e-05, |
|
"loss": 2.088, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5953263375713842, |
|
"learning_rate": 1.2403547734120253e-05, |
|
"loss": 2.0987, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5732983166389841, |
|
"learning_rate": 1.2366011453404527e-05, |
|
"loss": 2.0886, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5741249904020347, |
|
"learning_rate": 1.2328439826006415e-05, |
|
"loss": 2.0994, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5874052905748295, |
|
"learning_rate": 1.2290833413221757e-05, |
|
"loss": 2.0874, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5738655674844397, |
|
"learning_rate": 1.2253192776866059e-05, |
|
"loss": 2.0889, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5544624263204088, |
|
"learning_rate": 1.2215518479266108e-05, |
|
"loss": 2.0667, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5483512496643629, |
|
"learning_rate": 1.2177811083251572e-05, |
|
"loss": 2.0565, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5676189894522147, |
|
"learning_rate": 1.214007115214658e-05, |
|
"loss": 2.0626, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.6146371310899279, |
|
"learning_rate": 1.2102299249761315e-05, |
|
"loss": 2.1122, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5956439065297068, |
|
"learning_rate": 1.2064495940383602e-05, |
|
"loss": 2.0882, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5961610719418295, |
|
"learning_rate": 1.2026661788770453e-05, |
|
"loss": 2.0876, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.6365230245544026, |
|
"learning_rate": 1.1988797360139649e-05, |
|
"loss": 2.0871, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.567762554954768, |
|
"learning_rate": 1.1950903220161286e-05, |
|
"loss": 2.0858, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5944522247966103, |
|
"learning_rate": 1.1912979934949331e-05, |
|
"loss": 2.1263, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5775844213725809, |
|
"learning_rate": 1.1875028071053165e-05, |
|
"loss": 2.081, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5972309566692702, |
|
"learning_rate": 1.1837048195449112e-05, |
|
"loss": 2.0961, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5748650002793964, |
|
"learning_rate": 1.1799040875531975e-05, |
|
"loss": 2.0799, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5616028370009407, |
|
"learning_rate": 1.1761006679106552e-05, |
|
"loss": 2.1065, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.0951695442199707, |
|
"eval_runtime": 26.3476, |
|
"eval_samples_per_second": 1213.735, |
|
"eval_steps_per_second": 37.954, |
|
"step": 2258 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5527773340311358, |
|
"learning_rate": 1.1722946174379168e-05, |
|
"loss": 2.0977, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6112372645637757, |
|
"learning_rate": 1.168485992994917e-05, |
|
"loss": 2.0676, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.5749966887334458, |
|
"learning_rate": 1.1646748514800441e-05, |
|
"loss": 2.0853, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.5960665627574333, |
|
"learning_rate": 1.16086124982929e-05, |
|
"loss": 2.0861, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.576974664532595, |
|
"learning_rate": 1.1570452450153992e-05, |
|
"loss": 2.0676, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.577844600467795, |
|
"learning_rate": 1.1532268940470182e-05, |
|
"loss": 2.0798, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.5951322487948181, |
|
"learning_rate": 1.149406253967843e-05, |
|
"loss": 2.0923, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.5479611569179274, |
|
"learning_rate": 1.1455833818557678e-05, |
|
"loss": 2.0834, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.5622922091778799, |
|
"learning_rate": 1.1417583348220322e-05, |
|
"loss": 2.0712, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.5581169265081379, |
|
"learning_rate": 1.1379311700103673e-05, |
|
"loss": 2.0826, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.560318829308528, |
|
"learning_rate": 1.134101944596143e-05, |
|
"loss": 2.091, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5717241173056814, |
|
"learning_rate": 1.1302707157855122e-05, |
|
"loss": 2.0955, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5596805714050055, |
|
"learning_rate": 1.1264375408145582e-05, |
|
"loss": 2.0954, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5748014011724205, |
|
"learning_rate": 1.1226024769484385e-05, |
|
"loss": 2.0649, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5985395562035934, |
|
"learning_rate": 1.118765581480529e-05, |
|
"loss": 2.0664, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5699090787648166, |
|
"learning_rate": 1.1149269117315693e-05, |
|
"loss": 2.0939, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.568093215865292, |
|
"learning_rate": 1.1110865250488047e-05, |
|
"loss": 2.0875, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.5555984174512915, |
|
"learning_rate": 1.1072444788051314e-05, |
|
"loss": 2.1043, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.6693171925796504, |
|
"learning_rate": 1.1034008303982373e-05, |
|
"loss": 2.0733, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.6092675439553443, |
|
"learning_rate": 1.0995556372497467e-05, |
|
"loss": 2.0722, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.625051043078541, |
|
"learning_rate": 1.0957089568043607e-05, |
|
"loss": 2.0778, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.5802806420820981, |
|
"learning_rate": 1.0918608465289993e-05, |
|
"loss": 2.0737, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.5738374441827276, |
|
"learning_rate": 1.088011363911944e-05, |
|
"loss": 2.0655, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.5645836653492234, |
|
"learning_rate": 1.084160566461978e-05, |
|
"loss": 2.0793, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.5528303864059704, |
|
"learning_rate": 1.080308511707527e-05, |
|
"loss": 2.0849, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.5642584459957846, |
|
"learning_rate": 1.0764552571957999e-05, |
|
"loss": 2.071, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.5823468636424974, |
|
"learning_rate": 1.0726008604919296e-05, |
|
"loss": 2.0711, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.6086979958774408, |
|
"learning_rate": 1.0687453791781122e-05, |
|
"loss": 2.0885, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.5515739330908145, |
|
"learning_rate": 1.0648888708527481e-05, |
|
"loss": 2.0815, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.598340419665785, |
|
"learning_rate": 1.0610313931295793e-05, |
|
"loss": 2.0818, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.6138286896112974, |
|
"learning_rate": 1.0571730036368308e-05, |
|
"loss": 2.0563, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.6031187180652934, |
|
"learning_rate": 1.0533137600163488e-05, |
|
"loss": 2.0576, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.5704628628292385, |
|
"learning_rate": 1.0494537199227393e-05, |
|
"loss": 2.0751, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.6051744007777525, |
|
"learning_rate": 1.045592941022507e-05, |
|
"loss": 2.0766, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.5902359354404348, |
|
"learning_rate": 1.0417314809931945e-05, |
|
"loss": 2.0601, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.6228712328080301, |
|
"learning_rate": 1.0378693975225194e-05, |
|
"loss": 2.054, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.5789232266106427, |
|
"learning_rate": 1.0340067483075135e-05, |
|
"loss": 2.0666, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.5913980921419343, |
|
"learning_rate": 1.0301435910536603e-05, |
|
"loss": 2.1175, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.5584410329086974, |
|
"learning_rate": 1.0262799834740334e-05, |
|
"loss": 2.0669, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.6001286222275667, |
|
"learning_rate": 1.0224159832884335e-05, |
|
"loss": 2.1061, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.5743799874130536, |
|
"learning_rate": 1.0185516482225264e-05, |
|
"loss": 2.0742, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.5636242247339052, |
|
"learning_rate": 1.0146870360069819e-05, |
|
"loss": 2.0943, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.5777029674493895, |
|
"learning_rate": 1.0108222043766087e-05, |
|
"loss": 2.0746, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.5815434775204681, |
|
"learning_rate": 1.0069572110694946e-05, |
|
"loss": 2.0382, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.5768545198118096, |
|
"learning_rate": 1.0030921138261422e-05, |
|
"loss": 2.0556, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.6165471237092065, |
|
"learning_rate": 9.992269703886073e-06, |
|
"loss": 2.0863, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.5818581018098411, |
|
"learning_rate": 9.953618384996353e-06, |
|
"loss": 2.0707, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.653087127356428, |
|
"learning_rate": 9.914967759017993e-06, |
|
"loss": 2.0574, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.5722171834036991, |
|
"learning_rate": 9.876318403366371e-06, |
|
"loss": 2.0675, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.5528347937828104, |
|
"learning_rate": 9.83767089543789e-06, |
|
"loss": 2.0928, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.5745800719417502, |
|
"learning_rate": 9.79902581260135e-06, |
|
"loss": 2.0546, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.6082996475323975, |
|
"learning_rate": 9.76038373218931e-06, |
|
"loss": 2.1075, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5507336537375889, |
|
"learning_rate": 9.721745231489499e-06, |
|
"loss": 2.0441, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5814273840900069, |
|
"learning_rate": 9.683110887736134e-06, |
|
"loss": 2.094, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5782967337954177, |
|
"learning_rate": 9.644481278101366e-06, |
|
"loss": 2.0959, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5915434449312523, |
|
"learning_rate": 9.60585697968659e-06, |
|
"loss": 2.0782, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5795496817902605, |
|
"learning_rate": 9.567238569513872e-06, |
|
"loss": 2.0512, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.5717933361924271, |
|
"learning_rate": 9.52862662451731e-06, |
|
"loss": 2.0725, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.6025184429872964, |
|
"learning_rate": 9.49002172153442e-06, |
|
"loss": 2.083, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.5443233883979909, |
|
"learning_rate": 9.451424437297494e-06, |
|
"loss": 2.0712, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.5768672614038616, |
|
"learning_rate": 9.412835348425038e-06, |
|
"loss": 2.0857, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.6079345591117705, |
|
"learning_rate": 9.374255031413089e-06, |
|
"loss": 2.0779, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.5722866437088752, |
|
"learning_rate": 9.335684062626669e-06, |
|
"loss": 2.0704, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.5827552590203432, |
|
"learning_rate": 9.297123018291122e-06, |
|
"loss": 2.0532, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.5925758511534281, |
|
"learning_rate": 9.25857247448354e-06, |
|
"loss": 2.0771, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.5815198070386709, |
|
"learning_rate": 9.220033007124135e-06, |
|
"loss": 2.0668, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.5810966855111676, |
|
"learning_rate": 9.181505191967656e-06, |
|
"loss": 2.0669, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.5600798462945777, |
|
"learning_rate": 9.142989604594757e-06, |
|
"loss": 2.0631, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.6038743499184152, |
|
"learning_rate": 9.104486820403438e-06, |
|
"loss": 2.0913, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.5657558451281521, |
|
"learning_rate": 9.06599741460041e-06, |
|
"loss": 2.0853, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5435496002874937, |
|
"learning_rate": 9.027521962192532e-06, |
|
"loss": 2.0776, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5667695734621062, |
|
"learning_rate": 8.989061037978196e-06, |
|
"loss": 2.0863, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5654008215606692, |
|
"learning_rate": 8.950615216538765e-06, |
|
"loss": 2.0996, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5632877417455171, |
|
"learning_rate": 8.912185072229974e-06, |
|
"loss": 2.0802, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5843397325897507, |
|
"learning_rate": 8.873771179173339e-06, |
|
"loss": 2.0565, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.6309742147425943, |
|
"learning_rate": 8.83537411124761e-06, |
|
"loss": 2.0812, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.5561980660211074, |
|
"learning_rate": 8.796994442080167e-06, |
|
"loss": 2.0811, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.5872965498615417, |
|
"learning_rate": 8.758632745038478e-06, |
|
"loss": 2.0818, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.5912596130210314, |
|
"learning_rate": 8.720289593221502e-06, |
|
"loss": 2.0752, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.5743520498892418, |
|
"learning_rate": 8.681965559451159e-06, |
|
"loss": 2.0319, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.5822259144979813, |
|
"learning_rate": 8.643661216263744e-06, |
|
"loss": 2.0622, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.5528017113241958, |
|
"learning_rate": 8.605377135901404e-06, |
|
"loss": 2.093, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.5746962666002999, |
|
"learning_rate": 8.567113890303554e-06, |
|
"loss": 2.0781, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.5624683464972017, |
|
"learning_rate": 8.52887205109837e-06, |
|
"loss": 2.0842, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.5659908998943362, |
|
"learning_rate": 8.490652189594212e-06, |
|
"loss": 2.0607, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.6165531739361667, |
|
"learning_rate": 8.452454876771124e-06, |
|
"loss": 2.0667, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.6036586601629782, |
|
"learning_rate": 8.414280683272273e-06, |
|
"loss": 2.111, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.5668609006019637, |
|
"learning_rate": 8.376130179395452e-06, |
|
"loss": 2.078, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5322323646733972, |
|
"learning_rate": 8.338003935084531e-06, |
|
"loss": 2.0791, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5816890357323893, |
|
"learning_rate": 8.299902519920977e-06, |
|
"loss": 2.0759, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5531255003520236, |
|
"learning_rate": 8.2618265031153e-06, |
|
"loss": 2.0794, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5516866583585986, |
|
"learning_rate": 8.223776453498599e-06, |
|
"loss": 2.0708, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5665675543328689, |
|
"learning_rate": 8.185752939514026e-06, |
|
"loss": 2.0506, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.5658015830777628, |
|
"learning_rate": 8.147756529208318e-06, |
|
"loss": 2.0708, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.5815201450481382, |
|
"learning_rate": 8.109787790223285e-06, |
|
"loss": 2.0553, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.5893314619540386, |
|
"learning_rate": 8.071847289787367e-06, |
|
"loss": 2.0614, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.6414534373242382, |
|
"learning_rate": 8.033935594707116e-06, |
|
"loss": 2.0694, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.598007345581066, |
|
"learning_rate": 7.996053271358764e-06, |
|
"loss": 2.086, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.5910268858990115, |
|
"learning_rate": 7.958200885679752e-06, |
|
"loss": 2.0885, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.5680723634435483, |
|
"learning_rate": 7.920379003160255e-06, |
|
"loss": 2.1094, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.5552770411674977, |
|
"learning_rate": 7.88258818883477e-06, |
|
"loss": 2.0788, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.5495012641900135, |
|
"learning_rate": 7.844829007273634e-06, |
|
"loss": 2.0732, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.5874940267814824, |
|
"learning_rate": 7.807102022574631e-06, |
|
"loss": 2.0546, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.5759378150271451, |
|
"learning_rate": 7.769407798354536e-06, |
|
"loss": 2.0565, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.5823894049027681, |
|
"learning_rate": 7.7317468977407e-06, |
|
"loss": 2.0822, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.5612296833138716, |
|
"learning_rate": 7.694119883362644e-06, |
|
"loss": 2.0611, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.5805293821926759, |
|
"learning_rate": 7.65652731734366e-06, |
|
"loss": 2.0488, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.5638320420331838, |
|
"learning_rate": 7.618969761292383e-06, |
|
"loss": 2.0838, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.600777339296587, |
|
"learning_rate": 7.5814477762944435e-06, |
|
"loss": 2.0986, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.5974720852908599, |
|
"learning_rate": 7.5439619229040466e-06, |
|
"loss": 2.0687, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.6160253655569089, |
|
"learning_rate": 7.506512761135627e-06, |
|
"loss": 2.0684, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.5762856204729371, |
|
"learning_rate": 7.4691008504554595e-06, |
|
"loss": 2.0442, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.5610875900690672, |
|
"learning_rate": 7.431726749773322e-06, |
|
"loss": 2.0444, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.5445604834217658, |
|
"learning_rate": 7.394391017434126e-06, |
|
"loss": 2.0824, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.5840776079669121, |
|
"learning_rate": 7.3570942112095955e-06, |
|
"loss": 2.0832, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.5619608619733877, |
|
"learning_rate": 7.3198368882899095e-06, |
|
"loss": 2.0541, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.5844761394452193, |
|
"learning_rate": 7.282619605275409e-06, |
|
"loss": 2.0884, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.6070574954232625, |
|
"learning_rate": 7.245442918168244e-06, |
|
"loss": 2.0555, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.5676178752245379, |
|
"learning_rate": 7.208307382364111e-06, |
|
"loss": 2.0655, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.5462795247032544, |
|
"learning_rate": 7.1712135526439094e-06, |
|
"loss": 2.0825, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.595069094824675, |
|
"learning_rate": 7.134161983165498e-06, |
|
"loss": 2.0728, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.5547056237323679, |
|
"learning_rate": 7.097153227455379e-06, |
|
"loss": 2.0716, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.5837456556363589, |
|
"learning_rate": 7.060187838400451e-06, |
|
"loss": 2.0834, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.5627509121201649, |
|
"learning_rate": 7.023266368239745e-06, |
|
"loss": 2.0474, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5701325951295837, |
|
"learning_rate": 6.986389368556168e-06, |
|
"loss": 2.0935, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5708414741976688, |
|
"learning_rate": 6.949557390268272e-06, |
|
"loss": 2.0876, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5756526602085583, |
|
"learning_rate": 6.912770983622008e-06, |
|
"loss": 2.0729, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5607085276371621, |
|
"learning_rate": 6.87603069818253e-06, |
|
"loss": 2.0926, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5536289198774175, |
|
"learning_rate": 6.839337082825954e-06, |
|
"loss": 2.07, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.5781895838365114, |
|
"learning_rate": 6.802690685731197e-06, |
|
"loss": 2.0592, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.5790712059804543, |
|
"learning_rate": 6.766092054371744e-06, |
|
"loss": 2.0826, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.5749107615925475, |
|
"learning_rate": 6.729541735507503e-06, |
|
"loss": 2.097, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.5551440352307594, |
|
"learning_rate": 6.693040275176623e-06, |
|
"loss": 2.0863, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.5772660080516377, |
|
"learning_rate": 6.656588218687341e-06, |
|
"loss": 2.0881, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.5659850187851291, |
|
"learning_rate": 6.62018611060982e-06, |
|
"loss": 2.0462, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.5363757482405287, |
|
"learning_rate": 6.583834494768042e-06, |
|
"loss": 2.082, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.6189273385035811, |
|
"learning_rate": 6.547533914231654e-06, |
|
"loss": 2.0679, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.5978105882298892, |
|
"learning_rate": 6.511284911307883e-06, |
|
"loss": 2.0934, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.5553761429009801, |
|
"learning_rate": 6.475088027533399e-06, |
|
"loss": 2.0878, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.568217220480086, |
|
"learning_rate": 6.4389438036662686e-06, |
|
"loss": 2.0823, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.5921703666188227, |
|
"learning_rate": 6.40285277967784e-06, |
|
"loss": 2.0863, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.5698593276512977, |
|
"learning_rate": 6.3668154947446905e-06, |
|
"loss": 2.0452, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5995785250095343, |
|
"learning_rate": 6.330832487240573e-06, |
|
"loss": 2.0646, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5804556037681438, |
|
"learning_rate": 6.294904294728375e-06, |
|
"loss": 2.0708, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5558363235935789, |
|
"learning_rate": 6.2590314539520695e-06, |
|
"loss": 2.0927, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5696340847088165, |
|
"learning_rate": 6.223214500828729e-06, |
|
"loss": 2.065, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5971642720079355, |
|
"learning_rate": 6.187453970440484e-06, |
|
"loss": 2.0648, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.5954157837795512, |
|
"learning_rate": 6.151750397026556e-06, |
|
"loss": 2.1034, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.5930972913855831, |
|
"learning_rate": 6.116104313975267e-06, |
|
"loss": 2.0657, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.5572014146279308, |
|
"learning_rate": 6.080516253816055e-06, |
|
"loss": 2.0449, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.5553821095894338, |
|
"learning_rate": 6.044986748211556e-06, |
|
"loss": 2.0637, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.5706592731946465, |
|
"learning_rate": 6.009516327949621e-06, |
|
"loss": 2.0589, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.5531132813272496, |
|
"learning_rate": 5.974105522935416e-06, |
|
"loss": 2.0738, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.5653166797049978, |
|
"learning_rate": 5.93875486218348e-06, |
|
"loss": 2.0515, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.5837426456642572, |
|
"learning_rate": 5.903464873809854e-06, |
|
"loss": 2.0576, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.6113399620896649, |
|
"learning_rate": 5.868236085024153e-06, |
|
"loss": 2.0491, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.6028265573779915, |
|
"learning_rate": 5.833069022121727e-06, |
|
"loss": 2.0964, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.5598527925248651, |
|
"learning_rate": 5.797964210475766e-06, |
|
"loss": 2.0339, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.5762519974495129, |
|
"learning_rate": 5.762922174529482e-06, |
|
"loss": 2.0559, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.5888564253804358, |
|
"learning_rate": 5.7279434377882435e-06, |
|
"loss": 2.0928, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.5833066727929147, |
|
"learning_rate": 5.693028522811783e-06, |
|
"loss": 2.0347, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.5591866868143437, |
|
"learning_rate": 5.658177951206367e-06, |
|
"loss": 2.0735, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.5726642686405102, |
|
"learning_rate": 5.6233922436170205e-06, |
|
"loss": 2.0542, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.5703877357888981, |
|
"learning_rate": 5.588671919719735e-06, |
|
"loss": 2.0958, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.5834447412156415, |
|
"learning_rate": 5.5540174982137185e-06, |
|
"loss": 2.053, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.5691237831147797, |
|
"learning_rate": 5.519429496813637e-06, |
|
"loss": 2.0715, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.5493289065838657, |
|
"learning_rate": 5.484908432241889e-06, |
|
"loss": 2.0697, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.6246925504683438, |
|
"learning_rate": 5.4504548202208644e-06, |
|
"loss": 2.0532, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.5830289606439206, |
|
"learning_rate": 5.416069175465274e-06, |
|
"loss": 2.0829, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.5770291253377781, |
|
"learning_rate": 5.381752011674426e-06, |
|
"loss": 2.1069, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.59236769745392, |
|
"learning_rate": 5.347503841524582e-06, |
|
"loss": 2.0695, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.5619994598924011, |
|
"learning_rate": 5.313325176661268e-06, |
|
"loss": 2.0615, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.5848552643647227, |
|
"learning_rate": 5.279216527691657e-06, |
|
"loss": 2.0912, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.5494971686532062, |
|
"learning_rate": 5.24517840417693e-06, |
|
"loss": 2.0483, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.5751937447176411, |
|
"learning_rate": 5.211211314624653e-06, |
|
"loss": 2.0832, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.5996417456881793, |
|
"learning_rate": 5.177315766481204e-06, |
|
"loss": 2.053, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.5758051720529916, |
|
"learning_rate": 5.143492266124164e-06, |
|
"loss": 2.0957, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.542941584217134, |
|
"learning_rate": 5.1097413188547805e-06, |
|
"loss": 2.0651, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.551644732396901, |
|
"learning_rate": 5.076063428890393e-06, |
|
"loss": 2.0726, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.5593477883272872, |
|
"learning_rate": 5.042459099356925e-06, |
|
"loss": 2.0643, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.5784316909609539, |
|
"learning_rate": 5.008928832281339e-06, |
|
"loss": 2.0561, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.5521629620854346, |
|
"learning_rate": 4.975473128584167e-06, |
|
"loss": 2.0738, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.5850672666437684, |
|
"learning_rate": 4.942092488072e-06, |
|
"loss": 2.0692, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.6062338884368135, |
|
"learning_rate": 4.908787409430044e-06, |
|
"loss": 2.0541, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.54076739975257, |
|
"learning_rate": 4.875558390214652e-06, |
|
"loss": 2.0631, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.5596620397642873, |
|
"learning_rate": 4.842405926845906e-06, |
|
"loss": 2.0729, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.5741142208224713, |
|
"learning_rate": 4.8093305146001815e-06, |
|
"loss": 2.0458, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.578505283715987, |
|
"learning_rate": 4.776332647602774e-06, |
|
"loss": 2.0902, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.5549331942474833, |
|
"learning_rate": 4.743412818820488e-06, |
|
"loss": 2.0477, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.5842249886396319, |
|
"learning_rate": 4.710571520054302e-06, |
|
"loss": 2.0813, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.5394085936858433, |
|
"learning_rate": 4.677809241931994e-06, |
|
"loss": 2.058, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.555464125864039, |
|
"learning_rate": 4.645126473900839e-06, |
|
"loss": 2.0685, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.5466391317247038, |
|
"learning_rate": 4.612523704220264e-06, |
|
"loss": 2.0673, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.6046306763120934, |
|
"learning_rate": 4.580001419954593e-06, |
|
"loss": 2.0611, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.5767596169539325, |
|
"learning_rate": 4.5475601069657304e-06, |
|
"loss": 2.0723, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.5793964285430453, |
|
"learning_rate": 4.51520024990594e-06, |
|
"loss": 2.05, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.6056394334021761, |
|
"learning_rate": 4.482922332210569e-06, |
|
"loss": 2.0684, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.5647907230296755, |
|
"learning_rate": 4.45072683609086e-06, |
|
"loss": 2.0222, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.5655807754063283, |
|
"learning_rate": 4.418614242526717e-06, |
|
"loss": 2.0855, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.5666681644161747, |
|
"learning_rate": 4.386585031259541e-06, |
|
"loss": 2.0529, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.5609970826183392, |
|
"learning_rate": 4.354639680785059e-06, |
|
"loss": 2.0856, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.5584979378480206, |
|
"learning_rate": 4.322778668346158e-06, |
|
"loss": 2.1002, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.594902997426089, |
|
"learning_rate": 4.291002469925782e-06, |
|
"loss": 2.064, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.5467648254535455, |
|
"learning_rate": 4.259311560239804e-06, |
|
"loss": 2.067, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.5627121427858941, |
|
"learning_rate": 4.227706412729943e-06, |
|
"loss": 2.0657, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.5518500010210696, |
|
"learning_rate": 4.196187499556672e-06, |
|
"loss": 2.07, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.5487607197426364, |
|
"learning_rate": 4.1647552915922e-06, |
|
"loss": 2.0601, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.5621361968208098, |
|
"learning_rate": 4.133410258413394e-06, |
|
"loss": 2.0672, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.5597561843146505, |
|
"learning_rate": 4.1021528682948064e-06, |
|
"loss": 2.0378, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.5629323084854226, |
|
"learning_rate": 4.070983588201643e-06, |
|
"loss": 2.0612, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5443035677857906, |
|
"learning_rate": 4.039902883782814e-06, |
|
"loss": 2.09, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5541350170918213, |
|
"learning_rate": 4.008911219363956e-06, |
|
"loss": 2.0986, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5502103094835759, |
|
"learning_rate": 3.978009057940518e-06, |
|
"loss": 2.0446, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5588520138156516, |
|
"learning_rate": 3.947196861170818e-06, |
|
"loss": 2.0586, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5536845364599826, |
|
"learning_rate": 3.916475089369175e-06, |
|
"loss": 2.0684, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5362574486998208, |
|
"learning_rate": 3.8858442014990005e-06, |
|
"loss": 2.069, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5530091914842842, |
|
"learning_rate": 3.855304655165978e-06, |
|
"loss": 2.0839, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5545491457122881, |
|
"learning_rate": 3.824856906611188e-06, |
|
"loss": 2.0736, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5603180643382628, |
|
"learning_rate": 3.794501410704331e-06, |
|
"loss": 2.0606, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.5685698576728719, |
|
"learning_rate": 3.764238620936892e-06, |
|
"loss": 2.0526, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.5362971575714992, |
|
"learning_rate": 3.7340689894154023e-06, |
|
"loss": 2.0395, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.5724617878761099, |
|
"learning_rate": 3.7039929668546636e-06, |
|
"loss": 2.0654, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.5340423554272131, |
|
"learning_rate": 3.674011002571022e-06, |
|
"loss": 2.0479, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.584358312998901, |
|
"learning_rate": 3.6441235444756474e-06, |
|
"loss": 2.0632, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.550986181649296, |
|
"learning_rate": 3.6143310390678544e-06, |
|
"loss": 2.0507, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.5601064709679608, |
|
"learning_rate": 3.5846339314284283e-06, |
|
"loss": 2.0772, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.5623905123341948, |
|
"learning_rate": 3.555032665212964e-06, |
|
"loss": 2.0967, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.5599781620806531, |
|
"learning_rate": 3.5255276826452568e-06, |
|
"loss": 2.0718, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.5625441672306304, |
|
"learning_rate": 3.496119424510678e-06, |
|
"loss": 2.0418, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.5866362542708108, |
|
"learning_rate": 3.466808330149607e-06, |
|
"loss": 2.0234, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.584027756142362, |
|
"learning_rate": 3.4375948374508516e-06, |
|
"loss": 2.0506, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.5513278147742432, |
|
"learning_rate": 3.4084793828451212e-06, |
|
"loss": 2.0542, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.5662734847784412, |
|
"learning_rate": 3.3794624012984913e-06, |
|
"loss": 2.0713, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.5583271775861873, |
|
"learning_rate": 3.3505443263059225e-06, |
|
"loss": 2.0526, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.5766432453605096, |
|
"learning_rate": 3.3217255898847635e-06, |
|
"loss": 2.077, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.5672547394564335, |
|
"learning_rate": 3.2930066225683245e-06, |
|
"loss": 2.0743, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.599180811938737, |
|
"learning_rate": 3.2643878533994145e-06, |
|
"loss": 2.0622, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.57167754691101, |
|
"learning_rate": 3.2358697099239587e-06, |
|
"loss": 2.0377, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.5485475587826795, |
|
"learning_rate": 3.20745261818459e-06, |
|
"loss": 2.0648, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.5641467390521516, |
|
"learning_rate": 3.1791370027143e-06, |
|
"loss": 2.0633, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.555047481455344, |
|
"learning_rate": 3.1509232865300886e-06, |
|
"loss": 2.0587, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.5488538540545334, |
|
"learning_rate": 3.1228118911266492e-06, |
|
"loss": 2.0618, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.5401835111054806, |
|
"learning_rate": 3.09480323647006e-06, |
|
"loss": 2.0498, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.6194716984424374, |
|
"learning_rate": 3.0668977409915313e-06, |
|
"loss": 2.0693, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.5431718707553973, |
|
"learning_rate": 3.039095821581127e-06, |
|
"loss": 2.0643, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.540925633971234, |
|
"learning_rate": 3.011397893581568e-06, |
|
"loss": 2.07, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.558669798292064, |
|
"learning_rate": 2.983804370781996e-06, |
|
"loss": 2.0627, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.5724618424347248, |
|
"learning_rate": 2.9563156654118185e-06, |
|
"loss": 2.0932, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.549959983330695, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 2.0597, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.5680563384909777, |
|
"learning_rate": 2.9016543480415792e-06, |
|
"loss": 2.0448, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.574427841699941, |
|
"learning_rate": 2.8744825526462882e-06, |
|
"loss": 2.0877, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.5451057256624539, |
|
"learning_rate": 2.847417207877714e-06, |
|
"loss": 2.0659, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.5264181150974605, |
|
"learning_rate": 2.8204587180746256e-06, |
|
"loss": 2.0709, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.5796631302393072, |
|
"learning_rate": 2.793607485979435e-06, |
|
"loss": 2.0922, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.5526007681586762, |
|
"learning_rate": 2.7668639127322084e-06, |
|
"loss": 2.0692, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.5517600480742954, |
|
"learning_rate": 2.7402283978646436e-06, |
|
"loss": 2.0491, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.560411245567384, |
|
"learning_rate": 2.713701339294129e-06, |
|
"loss": 2.0668, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.5320482638168332, |
|
"learning_rate": 2.687283133317774e-06, |
|
"loss": 2.0565, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.5958292298602899, |
|
"learning_rate": 2.66097417460651e-06, |
|
"loss": 2.0666, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.5633323232989965, |
|
"learning_rate": 2.6347748561991815e-06, |
|
"loss": 2.0513, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.56151870282004, |
|
"learning_rate": 2.6086855694966795e-06, |
|
"loss": 2.0393, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.5540776884196815, |
|
"learning_rate": 2.5827067042560848e-06, |
|
"loss": 2.0604, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.5632630801317332, |
|
"learning_rate": 2.5568386485848663e-06, |
|
"loss": 2.0782, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.5881111533036575, |
|
"learning_rate": 2.5310817889350526e-06, |
|
"loss": 2.0801, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.5616707812779389, |
|
"learning_rate": 2.505436510097494e-06, |
|
"loss": 2.0744, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.5472849874991459, |
|
"learning_rate": 2.4799031951960784e-06, |
|
"loss": 2.0763, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.6287534555479183, |
|
"learning_rate": 2.45448222568204e-06, |
|
"loss": 2.0676, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.5698089677189078, |
|
"learning_rate": 2.4291739813282324e-06, |
|
"loss": 2.0619, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.5829906823304772, |
|
"learning_rate": 2.4039788402234787e-06, |
|
"loss": 2.0288, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5506516838712906, |
|
"learning_rate": 2.3788971787669023e-06, |
|
"loss": 2.0711, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.552719116539876, |
|
"learning_rate": 2.3539293716623268e-06, |
|
"loss": 2.0901, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5379962879355252, |
|
"learning_rate": 2.3290757919126516e-06, |
|
"loss": 2.0633, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5545159173596396, |
|
"learning_rate": 2.304336810814305e-06, |
|
"loss": 2.0659, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.5590413512920404, |
|
"learning_rate": 2.2797127979516742e-06, |
|
"loss": 2.0735, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.5441185660566144, |
|
"learning_rate": 2.2552041211916052e-06, |
|
"loss": 2.0492, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.6001099874809708, |
|
"learning_rate": 2.230811146677896e-06, |
|
"loss": 2.072, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.5591027208489258, |
|
"learning_rate": 2.2065342388258193e-06, |
|
"loss": 2.0315, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.5613843093701428, |
|
"learning_rate": 2.182373760316694e-06, |
|
"loss": 2.0248, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.5629378640545444, |
|
"learning_rate": 2.1583300720924604e-06, |
|
"loss": 2.0511, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.5463602944639252, |
|
"learning_rate": 2.1344035333502878e-06, |
|
"loss": 2.0306, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.5945927514050589, |
|
"learning_rate": 2.1105945015371985e-06, |
|
"loss": 2.0807, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.5543960785521562, |
|
"learning_rate": 2.086903332344752e-06, |
|
"loss": 2.0587, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.5611823027794715, |
|
"learning_rate": 2.063330379703702e-06, |
|
"loss": 2.0804, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.5389176983968915, |
|
"learning_rate": 2.039875995778735e-06, |
|
"loss": 2.0519, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.5460529140680597, |
|
"learning_rate": 2.016540530963188e-06, |
|
"loss": 2.0575, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.6055111821940942, |
|
"learning_rate": 1.9933243338738328e-06, |
|
"loss": 2.0646, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.5772124945152812, |
|
"learning_rate": 1.9702277513456493e-06, |
|
"loss": 2.0682, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.5525547871753611, |
|
"learning_rate": 1.9472511284266604e-06, |
|
"loss": 2.0844, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.5465140529346122, |
|
"learning_rate": 1.9243948083727626e-06, |
|
"loss": 2.0588, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.5849356114529488, |
|
"learning_rate": 1.9016591326426148e-06, |
|
"loss": 2.0747, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.5318951332961098, |
|
"learning_rate": 1.879044440892517e-06, |
|
"loss": 2.0622, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.5421157945030903, |
|
"learning_rate": 1.8565510709713574e-06, |
|
"loss": 2.0968, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.5920448668331559, |
|
"learning_rate": 1.8341793589155444e-06, |
|
"loss": 2.0209, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.5659639149165588, |
|
"learning_rate": 1.8119296389440067e-06, |
|
"loss": 2.0408, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.5586140420829416, |
|
"learning_rate": 1.789802243453178e-06, |
|
"loss": 2.081, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.5467186331610561, |
|
"learning_rate": 1.7677975030120554e-06, |
|
"loss": 2.0775, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.6054343067129582, |
|
"learning_rate": 1.7459157463572396e-06, |
|
"loss": 2.0867, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.5404374233870032, |
|
"learning_rate": 1.724157300388042e-06, |
|
"loss": 2.0621, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.5787065531860801, |
|
"learning_rate": 1.7025224901615811e-06, |
|
"loss": 2.0401, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.5458827554222345, |
|
"learning_rate": 1.681011638887946e-06, |
|
"loss": 2.0631, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.557594773469663, |
|
"learning_rate": 1.6596250679253568e-06, |
|
"loss": 2.0735, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.5786124262528648, |
|
"learning_rate": 1.6383630967753628e-06, |
|
"loss": 2.0479, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.5679215749233313, |
|
"learning_rate": 1.6172260430780772e-06, |
|
"loss": 2.0608, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.6174462138499704, |
|
"learning_rate": 1.596214222607424e-06, |
|
"loss": 2.0694, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.5546472699096427, |
|
"learning_rate": 1.5753279492664264e-06, |
|
"loss": 2.024, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5633392281015893, |
|
"learning_rate": 1.5545675350825097e-06, |
|
"loss": 2.0787, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5668297305400977, |
|
"learning_rate": 1.5339332902028537e-06, |
|
"loss": 2.0699, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5276050490941967, |
|
"learning_rate": 1.5134255228897376e-06, |
|
"loss": 2.0569, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5803720324508781, |
|
"learning_rate": 1.493044539515961e-06, |
|
"loss": 2.0773, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.5487310188704412, |
|
"learning_rate": 1.4727906445602425e-06, |
|
"loss": 2.0679, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.5687253193676447, |
|
"learning_rate": 1.4526641406026898e-06, |
|
"loss": 2.0723, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.5600288221120294, |
|
"learning_rate": 1.432665328320263e-06, |
|
"loss": 2.0639, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.5502091991762269, |
|
"learning_rate": 1.4127945064823023e-06, |
|
"loss": 2.0448, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.5733961910045919, |
|
"learning_rate": 1.3930519719460411e-06, |
|
"loss": 2.0323, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.5578609623095209, |
|
"learning_rate": 1.3734380196521923e-06, |
|
"loss": 2.0585, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.5808549222065049, |
|
"learning_rate": 1.35395294262053e-06, |
|
"loss": 2.0516, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.5460038342098935, |
|
"learning_rate": 1.334597031945517e-06, |
|
"loss": 2.0665, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.538478341190344, |
|
"learning_rate": 1.3153705767919478e-06, |
|
"loss": 2.0362, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5739461798200457, |
|
"learning_rate": 1.296273864390646e-06, |
|
"loss": 2.0795, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5428267489387266, |
|
"learning_rate": 1.2773071800341497e-06, |
|
"loss": 2.0473, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5688645407372156, |
|
"learning_rate": 1.2584708070724738e-06, |
|
"loss": 2.0546, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5531424930513219, |
|
"learning_rate": 1.2397650269088557e-06, |
|
"loss": 2.0563, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5640526713665361, |
|
"learning_rate": 1.2211901189955689e-06, |
|
"loss": 2.0411, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5475288826783192, |
|
"learning_rate": 1.2027463608297308e-06, |
|
"loss": 2.0502, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5675024103250322, |
|
"learning_rate": 1.1844340279491772e-06, |
|
"loss": 2.0581, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5650379481310613, |
|
"learning_rate": 1.166253393928325e-06, |
|
"loss": 2.0519, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.550221388225615, |
|
"learning_rate": 1.1482047303740996e-06, |
|
"loss": 2.0903, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5751814833435122, |
|
"learning_rate": 1.1302883069218773e-06, |
|
"loss": 2.0425, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.5354968233749302, |
|
"learning_rate": 1.1125043912314438e-06, |
|
"loss": 2.0409, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.5423531875054028, |
|
"learning_rate": 1.0948532489830121e-06, |
|
"loss": 2.0624, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.5492917305314641, |
|
"learning_rate": 1.0773351438732392e-06, |
|
"loss": 2.0546, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.5649496617816726, |
|
"learning_rate": 1.0599503376113017e-06, |
|
"loss": 2.0596, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.5638945024313866, |
|
"learning_rate": 1.0426990899149658e-06, |
|
"loss": 2.0897, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.5422873063911972, |
|
"learning_rate": 1.0255816585067302e-06, |
|
"loss": 2.0668, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.590370361549812, |
|
"learning_rate": 1.0085982991099585e-06, |
|
"loss": 2.0303, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.5479138336575772, |
|
"learning_rate": 9.9174926544507e-07, |
|
"loss": 2.089, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.5820601876183154, |
|
"learning_rate": 9.750348092257368e-07, |
|
"loss": 2.0671, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.5578865447639919, |
|
"learning_rate": 9.58455180155139e-07, |
|
"loss": 2.0749, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.5539677477037274, |
|
"learning_rate": 9.420106259222184e-07, |
|
"loss": 2.0558, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.5491594627916455, |
|
"learning_rate": 9.25701392197994e-07, |
|
"loss": 2.0685, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.5400411167735509, |
|
"learning_rate": 9.095277226318766e-07, |
|
"loss": 2.0738, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.5491328342697185, |
|
"learning_rate": 8.934898588480434e-07, |
|
"loss": 2.0723, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.5391611273790669, |
|
"learning_rate": 8.775880404418113e-07, |
|
"loss": 2.0709, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.5633567583678802, |
|
"learning_rate": 8.618225049760787e-07, |
|
"loss": 2.0652, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.5406960546050117, |
|
"learning_rate": 8.461934879777545e-07, |
|
"loss": 2.078, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.5443208828013373, |
|
"learning_rate": 8.307012229342581e-07, |
|
"loss": 2.0661, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.5395340166973847, |
|
"learning_rate": 8.153459412900156e-07, |
|
"loss": 2.0737, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.5655876237562419, |
|
"learning_rate": 8.001278724430173e-07, |
|
"loss": 2.0518, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.556993099999802, |
|
"learning_rate": 7.850472437413748e-07, |
|
"loss": 2.0522, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.5482487155202556, |
|
"learning_rate": 7.701042804799419e-07, |
|
"loss": 2.0639, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.5612668831011128, |
|
"learning_rate": 7.552992058969299e-07, |
|
"loss": 2.0563, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.5373258036943281, |
|
"learning_rate": 7.406322411705891e-07, |
|
"loss": 2.0618, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.5646394333182677, |
|
"learning_rate": 7.261036054158965e-07, |
|
"loss": 2.0513, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.5526362277439785, |
|
"learning_rate": 7.117135156812849e-07, |
|
"loss": 2.0686, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.5507311491511547, |
|
"learning_rate": 6.974621869453924e-07, |
|
"loss": 2.0762, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.5855709483535855, |
|
"learning_rate": 6.833498321138665e-07, |
|
"loss": 2.0427, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.5842514363104292, |
|
"learning_rate": 6.693766620161691e-07, |
|
"loss": 2.0312, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.5578119511698102, |
|
"learning_rate": 6.555428854024304e-07, |
|
"loss": 2.0788, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.5459901910588468, |
|
"learning_rate": 6.418487089403392e-07, |
|
"loss": 2.0448, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5492384738167058, |
|
"learning_rate": 6.282943372120399e-07, |
|
"loss": 2.1001, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5657546545315134, |
|
"learning_rate": 6.148799727110911e-07, |
|
"loss": 2.0533, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5946901729626537, |
|
"learning_rate": 6.016058158394278e-07, |
|
"loss": 2.0704, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.563539319197655, |
|
"learning_rate": 5.884720649043807e-07, |
|
"loss": 2.0587, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5758781450778095, |
|
"learning_rate": 5.754789161157004e-07, |
|
"loss": 2.064, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.5596215250460662, |
|
"learning_rate": 5.626265635826367e-07, |
|
"loss": 2.0342, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.5338664266068491, |
|
"learning_rate": 5.499151993110286e-07, |
|
"loss": 2.0553, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.5661258373015575, |
|
"learning_rate": 5.373450132004499e-07, |
|
"loss": 2.0559, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.5331205843490355, |
|
"learning_rate": 5.249161930413549e-07, |
|
"loss": 2.0715, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.5458160800504736, |
|
"learning_rate": 5.126289245122906e-07, |
|
"loss": 2.0443, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.5758379630852071, |
|
"learning_rate": 5.004833911771045e-07, |
|
"loss": 2.0513, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.5509984382957713, |
|
"learning_rate": 4.884797744822212e-07, |
|
"loss": 2.0728, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.5790337271309526, |
|
"learning_rate": 4.7661825375391767e-07, |
|
"loss": 2.0372, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.5415868549099945, |
|
"learning_rate": 4.648990061956493e-07, |
|
"loss": 2.0318, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.5472932561767699, |
|
"learning_rate": 4.5332220688540263e-07, |
|
"loss": 2.0793, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.5517133417713413, |
|
"learning_rate": 4.418880287730798e-07, |
|
"loss": 2.0668, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.551493081487752, |
|
"learning_rate": 4.305966426779118e-07, |
|
"loss": 2.0594, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.5480924947566852, |
|
"learning_rate": 4.194482172859127e-07, |
|
"loss": 2.0899, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.5465321947806794, |
|
"learning_rate": 4.08442919147356e-07, |
|
"loss": 2.0697, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.5671185964851608, |
|
"learning_rate": 3.9758091267428245e-07, |
|
"loss": 2.0705, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.5537391848914435, |
|
"learning_rate": 3.8686236013805387e-07, |
|
"loss": 2.0462, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.5412635038024728, |
|
"learning_rate": 3.762874216669166e-07, |
|
"loss": 2.076, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.5617295286944065, |
|
"learning_rate": 3.658562552436207e-07, |
|
"loss": 2.0551, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.5543296109053409, |
|
"learning_rate": 3.555690167030512e-07, |
|
"loss": 2.0588, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.570734365615978, |
|
"learning_rate": 3.454258597299065e-07, |
|
"loss": 2.0462, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.5444769814916149, |
|
"learning_rate": 3.354269358563966e-07, |
|
"loss": 2.0335, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.5568631229208969, |
|
"learning_rate": 3.2557239445998534e-07, |
|
"loss": 2.037, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.5666986604150396, |
|
"learning_rate": 3.158623827611529e-07, |
|
"loss": 2.0572, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.5473217851705103, |
|
"learning_rate": 3.062970458212e-07, |
|
"loss": 2.0783, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.561186898693919, |
|
"learning_rate": 2.968765265400808e-07, |
|
"loss": 2.0699, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.5634998042629299, |
|
"learning_rate": 2.876009656542655e-07, |
|
"loss": 2.0645, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.5594140324992009, |
|
"learning_rate": 2.784705017346423e-07, |
|
"loss": 2.0628, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5532992640525732, |
|
"learning_rate": 2.6948527118444313e-07, |
|
"loss": 2.0814, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5538450367775031, |
|
"learning_rate": 2.606454082372045e-07, |
|
"loss": 2.0862, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5609198884417353, |
|
"learning_rate": 2.519510449547691e-07, |
|
"loss": 2.0347, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5721217264915227, |
|
"learning_rate": 2.4340231122530477e-07, |
|
"loss": 2.0742, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.5420905032636947, |
|
"learning_rate": 2.3499933476137215e-07, |
|
"loss": 2.0517, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.5730584800738652, |
|
"learning_rate": 2.2674224109800913e-07, |
|
"loss": 2.1013, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.571138087850591, |
|
"learning_rate": 2.186311535908603e-07, |
|
"loss": 2.066, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.5690707945520317, |
|
"learning_rate": 2.106661934143317e-07, |
|
"loss": 2.0192, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.5685222643277975, |
|
"learning_rate": 2.0284747955978346e-07, |
|
"loss": 2.049, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.5448239388538445, |
|
"learning_rate": 1.9517512883374667e-07, |
|
"loss": 2.0342, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.5475361023003688, |
|
"learning_rate": 1.87649255856186e-07, |
|
"loss": 2.0629, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.5474633727513906, |
|
"learning_rate": 1.802699730587798e-07, |
|
"loss": 2.056, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.5537494585606899, |
|
"learning_rate": 1.73037390683245e-07, |
|
"loss": 2.0271, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.5298180410852568, |
|
"learning_rate": 1.659516167796904e-07, |
|
"loss": 2.0514, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.5481552679951542, |
|
"learning_rate": 1.5901275720499821e-07, |
|
"loss": 2.059, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.5525970793231114, |
|
"learning_rate": 1.522209156212484e-07, |
|
"loss": 2.0601, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.5449820333871138, |
|
"learning_rate": 1.4557619349416574e-07, |
|
"loss": 2.0586, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.5530952097088393, |
|
"learning_rate": 1.3907869009160525e-07, |
|
"loss": 2.0567, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.5319055165598181, |
|
"learning_rate": 1.3272850248206905e-07, |
|
"loss": 2.0387, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.5709194678106907, |
|
"learning_rate": 1.265257255332586e-07, |
|
"loss": 2.0462, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.538720885627274, |
|
"learning_rate": 1.2047045191065144e-07, |
|
"loss": 2.0708, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.5782187587932515, |
|
"learning_rate": 1.1456277207612554e-07, |
|
"loss": 2.0601, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.5903912474115653, |
|
"learning_rate": 1.0880277428659935e-07, |
|
"loss": 2.0915, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.5442220297516078, |
|
"learning_rate": 1.0319054459271837e-07, |
|
"loss": 2.0548, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.5451744551127824, |
|
"learning_rate": 9.77261668375673e-08, |
|
"loss": 2.0743, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.5620230161100563, |
|
"learning_rate": 9.240972265541992e-08, |
|
"loss": 2.0584, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.568779993004579, |
|
"learning_rate": 8.724129147051786e-08, |
|
"loss": 2.044, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.5722921538404754, |
|
"learning_rate": 8.222095049588264e-08, |
|
"loss": 2.0624, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.5604540898963536, |
|
"learning_rate": 7.734877473216329e-08, |
|
"loss": 2.0723, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.5639040563435611, |
|
"learning_rate": 7.262483696652167e-08, |
|
"loss": 2.0696, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.5444502986069792, |
|
"learning_rate": 6.804920777153112e-08, |
|
"loss": 2.0384, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.5511132839769315, |
|
"learning_rate": 6.362195550413953e-08, |
|
"loss": 2.0604, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.5313461092371143, |
|
"learning_rate": 5.934314630463234e-08, |
|
"loss": 2.063, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.5460063823773216, |
|
"learning_rate": 5.521284409565675e-08, |
|
"loss": 2.0535, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.5528907367904549, |
|
"learning_rate": 5.123111058125574e-08, |
|
"loss": 2.0844, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.5539186535554169, |
|
"learning_rate": 4.739800524595884e-08, |
|
"loss": 2.0612, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.5779122207064056, |
|
"learning_rate": 4.371358535388059e-08, |
|
"loss": 2.0533, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.5636179253461239, |
|
"learning_rate": 4.017790594787574e-08, |
|
"loss": 2.0659, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.578098911440071, |
|
"learning_rate": 3.679101984870759e-08, |
|
"loss": 2.0521, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.555120944929623, |
|
"learning_rate": 3.355297765426868e-08, |
|
"loss": 2.0689, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.5894533238870514, |
|
"learning_rate": 3.046382773881584e-08, |
|
"loss": 2.0598, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.564289460074182, |
|
"learning_rate": 2.7523616252252972e-08, |
|
"loss": 2.033, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.5321566368429799, |
|
"learning_rate": 2.4732387119440483e-08, |
|
"loss": 2.0494, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.5260589282752344, |
|
"learning_rate": 2.2090182039538055e-08, |
|
"loss": 2.0399, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.589081690743795, |
|
"learning_rate": 1.9597040485380692e-08, |
|
"loss": 2.058, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.5415180849098572, |
|
"learning_rate": 1.7252999702894736e-08, |
|
"loss": 2.0501, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.5580416141751612, |
|
"learning_rate": 1.5058094710533877e-08, |
|
"loss": 2.0765, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.5501754417944723, |
|
"learning_rate": 1.3012358298760686e-08, |
|
"loss": 2.05, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.5259651171197626, |
|
"learning_rate": 1.1115821029555884e-08, |
|
"loss": 2.0725, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5475473045792645, |
|
"learning_rate": 9.368511235958722e-09, |
|
"loss": 2.0702, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5276264105357096, |
|
"learning_rate": 7.770455021651746e-09, |
|
"loss": 2.0532, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5702447196540836, |
|
"learning_rate": 6.3216762605589064e-09, |
|
"loss": 2.0521, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5474870087552693, |
|
"learning_rate": 5.022196596501383e-09, |
|
"loss": 2.0554, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5403069523221226, |
|
"learning_rate": 3.87203544286563e-09, |
|
"loss": 2.0864, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.558174766559108, |
|
"learning_rate": 2.8712099823147156e-09, |
|
"loss": 2.0462, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.5753711088485489, |
|
"learning_rate": 2.019735166534087e-09, |
|
"loss": 2.0521, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.5398095963232096, |
|
"learning_rate": 1.3176237160095195e-09, |
|
"loss": 2.0768, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.5449384582755612, |
|
"learning_rate": 7.648861198306101e-10, |
|
"loss": 2.0765, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.526913643719635, |
|
"learning_rate": 3.6153063554089653e-10, |
|
"loss": 2.046, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.5639829795453959, |
|
"learning_rate": 1.0756328901018188e-10, |
|
"loss": 2.0689, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.5551736446591217, |
|
"learning_rate": 2.987874346827013e-12, |
|
"loss": 2.0646, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.0746941566467285, |
|
"eval_runtime": 26.3825, |
|
"eval_samples_per_second": 1212.13, |
|
"eval_steps_per_second": 37.904, |
|
"step": 4516 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 4516, |
|
"total_flos": 13637863342080.0, |
|
"train_loss": 2.136634065948618, |
|
"train_runtime": 1187.976, |
|
"train_samples_per_second": 486.421, |
|
"train_steps_per_second": 3.801 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4516, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"total_flos": 13637863342080.0, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|