|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 200, |
|
"global_step": 3742, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002672367717797969, |
|
"grad_norm": 8.823081418058603, |
|
"learning_rate": 5.3333333333333334e-08, |
|
"loss": 1.906, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0013361838588989846, |
|
"grad_norm": 9.375402635266639, |
|
"learning_rate": 2.666666666666667e-07, |
|
"loss": 1.8044, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002672367717797969, |
|
"grad_norm": 8.65210459980887, |
|
"learning_rate": 5.333333333333335e-07, |
|
"loss": 1.8182, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004008551576696953, |
|
"grad_norm": 6.51103320753397, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.7509, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005344735435595938, |
|
"grad_norm": 4.268692783385918, |
|
"learning_rate": 1.066666666666667e-06, |
|
"loss": 1.767, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.006680919294494923, |
|
"grad_norm": 4.3863193537387355, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 1.7519, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.008017103153393906, |
|
"grad_norm": 3.465625691597637, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.7079, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.00935328701229289, |
|
"grad_norm": 2.6102827946343203, |
|
"learning_rate": 1.8666666666666669e-06, |
|
"loss": 1.6753, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.010689470871191877, |
|
"grad_norm": 2.3789490657646937, |
|
"learning_rate": 2.133333333333334e-06, |
|
"loss": 1.6337, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.012025654730090861, |
|
"grad_norm": 2.0835126121909235, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.6641, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.013361838588989846, |
|
"grad_norm": 1.8559179146822158, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 1.6296, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01469802244788883, |
|
"grad_norm": 1.8106018636994041, |
|
"learning_rate": 2.9333333333333338e-06, |
|
"loss": 1.5509, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.016034206306787813, |
|
"grad_norm": 1.8755467245728914, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.5601, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.017370390165686797, |
|
"grad_norm": 1.7318124276404359, |
|
"learning_rate": 3.4666666666666672e-06, |
|
"loss": 1.5308, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01870657402458578, |
|
"grad_norm": 1.6976770309274594, |
|
"learning_rate": 3.7333333333333337e-06, |
|
"loss": 1.5609, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.020042757883484766, |
|
"grad_norm": 1.7565922656582902, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.5082, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.021378941742383754, |
|
"grad_norm": 1.7393962369808933, |
|
"learning_rate": 4.266666666666668e-06, |
|
"loss": 1.5306, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.022715125601282738, |
|
"grad_norm": 1.720884117447266, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 1.5378, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.024051309460181722, |
|
"grad_norm": 1.7101397134036984, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.4813, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.025387493319080707, |
|
"grad_norm": 1.615617842911665, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 1.4817, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02672367717797969, |
|
"grad_norm": 2.691934178931898, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 1.4931, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.028059861036878676, |
|
"grad_norm": 1.5821332066035256, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.4285, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02939604489577766, |
|
"grad_norm": 1.7190020107955757, |
|
"learning_rate": 5.8666666666666675e-06, |
|
"loss": 1.4481, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.030732228754676644, |
|
"grad_norm": 1.6645689315016792, |
|
"learning_rate": 6.133333333333334e-06, |
|
"loss": 1.465, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.032068412613575625, |
|
"grad_norm": 1.6462290185572064, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.4665, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03340459647247461, |
|
"grad_norm": 1.649228040380627, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.4209, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.034740780331373594, |
|
"grad_norm": 1.6335763283148064, |
|
"learning_rate": 6.9333333333333344e-06, |
|
"loss": 1.4151, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03607696419027258, |
|
"grad_norm": 1.6194942414847415, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.4377, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03741314804917156, |
|
"grad_norm": 1.7105888808318117, |
|
"learning_rate": 7.4666666666666675e-06, |
|
"loss": 1.3934, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03874933190807055, |
|
"grad_norm": 1.4814484480334174, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 1.4015, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.04008551576696953, |
|
"grad_norm": 1.5777144338014444, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.4021, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.041421699625868516, |
|
"grad_norm": 1.5930794844441725, |
|
"learning_rate": 8.266666666666667e-06, |
|
"loss": 1.434, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.04275788348476751, |
|
"grad_norm": 1.6091950467097091, |
|
"learning_rate": 8.533333333333335e-06, |
|
"loss": 1.3956, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04409406734366649, |
|
"grad_norm": 1.6975335096598412, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.3996, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.045430251202565476, |
|
"grad_norm": 1.5343847156625996, |
|
"learning_rate": 9.066666666666667e-06, |
|
"loss": 1.3799, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04676643506146446, |
|
"grad_norm": 1.770466644954361, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 1.3545, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.048102618920363445, |
|
"grad_norm": 1.5660422280689605, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.4292, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04943880277926243, |
|
"grad_norm": 1.536676360877846, |
|
"learning_rate": 9.866666666666668e-06, |
|
"loss": 1.4028, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.050774986638161414, |
|
"grad_norm": 1.4804803919916396, |
|
"learning_rate": 1.0133333333333335e-05, |
|
"loss": 1.3899, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.0521111704970604, |
|
"grad_norm": 1.6476917950123358, |
|
"learning_rate": 1.04e-05, |
|
"loss": 1.4214, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.05344735435595938, |
|
"grad_norm": 1.5933336407770893, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 1.3663, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05344735435595938, |
|
"eval_loss": 1.3954896926879883, |
|
"eval_runtime": 525.9836, |
|
"eval_samples_per_second": 25.183, |
|
"eval_steps_per_second": 3.148, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05478353821485837, |
|
"grad_norm": 1.55465727793257, |
|
"learning_rate": 1.0933333333333334e-05, |
|
"loss": 1.3566, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.05611972207375735, |
|
"grad_norm": 1.6465837885812638, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 1.3572, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.057455905932656336, |
|
"grad_norm": 1.6314586937231732, |
|
"learning_rate": 1.1466666666666668e-05, |
|
"loss": 1.4066, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05879208979155532, |
|
"grad_norm": 1.5940186416338837, |
|
"learning_rate": 1.1733333333333335e-05, |
|
"loss": 1.3727, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.060128273650454304, |
|
"grad_norm": 1.587832961506543, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3755, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.06146445750935329, |
|
"grad_norm": 1.5731554504380694, |
|
"learning_rate": 1.2266666666666667e-05, |
|
"loss": 1.4007, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.06280064136825227, |
|
"grad_norm": 1.5722181818634398, |
|
"learning_rate": 1.2533333333333336e-05, |
|
"loss": 1.3783, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.06413682522715125, |
|
"grad_norm": 1.467532784965517, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 1.4386, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06547300908605024, |
|
"grad_norm": 1.5125828644122057, |
|
"learning_rate": 1.3066666666666668e-05, |
|
"loss": 1.3186, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.06680919294494922, |
|
"grad_norm": 1.5183168300505157, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.3551, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0681453768038482, |
|
"grad_norm": 1.4941218649910812, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 1.3664, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.06948156066274719, |
|
"grad_norm": 1.4869296436066686, |
|
"learning_rate": 1.3866666666666669e-05, |
|
"loss": 1.3576, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.07081774452164617, |
|
"grad_norm": 1.4293831791321558, |
|
"learning_rate": 1.4133333333333334e-05, |
|
"loss": 1.3556, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.07215392838054516, |
|
"grad_norm": 1.5191533813334088, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 1.3366, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07349011223944414, |
|
"grad_norm": 1.4810082339187982, |
|
"learning_rate": 1.4666666666666666e-05, |
|
"loss": 1.3573, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.07482629609834313, |
|
"grad_norm": 1.5063612979974181, |
|
"learning_rate": 1.4933333333333335e-05, |
|
"loss": 1.3427, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07616247995724211, |
|
"grad_norm": 1.618950454636469, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 1.3554, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.0774986638161411, |
|
"grad_norm": 1.5626468747954396, |
|
"learning_rate": 1.546666666666667e-05, |
|
"loss": 1.3771, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.07883484767504008, |
|
"grad_norm": 1.5292548773347097, |
|
"learning_rate": 1.5733333333333334e-05, |
|
"loss": 1.3918, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.08017103153393906, |
|
"grad_norm": 1.5358359721754233, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.3815, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08150721539283805, |
|
"grad_norm": 1.3953477584618263, |
|
"learning_rate": 1.6266666666666668e-05, |
|
"loss": 1.3854, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.08284339925173703, |
|
"grad_norm": 1.4468372312833726, |
|
"learning_rate": 1.6533333333333333e-05, |
|
"loss": 1.3727, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.08417958311063603, |
|
"grad_norm": 1.488344751961309, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.3706, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.08551576696953501, |
|
"grad_norm": 1.4093480949221249, |
|
"learning_rate": 1.706666666666667e-05, |
|
"loss": 1.3491, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.086851950828434, |
|
"grad_norm": 1.5432094268322356, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 1.3586, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.08818813468733298, |
|
"grad_norm": 1.4550377653252384, |
|
"learning_rate": 1.76e-05, |
|
"loss": 1.39, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.08952431854623197, |
|
"grad_norm": 1.4281640050412778, |
|
"learning_rate": 1.7866666666666666e-05, |
|
"loss": 1.3669, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.09086050240513095, |
|
"grad_norm": 1.4426556416465324, |
|
"learning_rate": 1.8133333333333335e-05, |
|
"loss": 1.3876, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.09219668626402994, |
|
"grad_norm": 1.406168800248235, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 1.3686, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.09353287012292892, |
|
"grad_norm": 1.4301962338673828, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 1.3689, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0948690539818279, |
|
"grad_norm": 1.6044731560961005, |
|
"learning_rate": 1.8933333333333334e-05, |
|
"loss": 1.4118, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.09620523784072689, |
|
"grad_norm": 1.4879888050516272, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.3791, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.09754142169962587, |
|
"grad_norm": 1.4698368446133456, |
|
"learning_rate": 1.9466666666666668e-05, |
|
"loss": 1.348, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.09887760555852486, |
|
"grad_norm": 1.4186637789128058, |
|
"learning_rate": 1.9733333333333336e-05, |
|
"loss": 1.3325, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.10021378941742384, |
|
"grad_norm": 1.5476694754206417, |
|
"learning_rate": 2e-05, |
|
"loss": 1.3642, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.10154997327632283, |
|
"grad_norm": 1.4733865828906345, |
|
"learning_rate": 1.9999891176487904e-05, |
|
"loss": 1.3807, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.10288615713522181, |
|
"grad_norm": 1.4551876993221122, |
|
"learning_rate": 1.9999564708320124e-05, |
|
"loss": 1.3679, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.1042223409941208, |
|
"grad_norm": 1.5257129447187303, |
|
"learning_rate": 1.999902060260214e-05, |
|
"loss": 1.4111, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.10555852485301978, |
|
"grad_norm": 1.3923714980063469, |
|
"learning_rate": 1.9998258871176252e-05, |
|
"loss": 1.3729, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.10689470871191876, |
|
"grad_norm": 1.4311098965377238, |
|
"learning_rate": 1.999727953062132e-05, |
|
"loss": 1.3413, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.10689470871191876, |
|
"eval_loss": 1.3721706867218018, |
|
"eval_runtime": 526.6891, |
|
"eval_samples_per_second": 25.15, |
|
"eval_steps_per_second": 3.144, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.10823089257081775, |
|
"grad_norm": 1.3829932271044623, |
|
"learning_rate": 1.99960826022524e-05, |
|
"loss": 1.3466, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.10956707642971673, |
|
"grad_norm": 1.3521931502381987, |
|
"learning_rate": 1.9994668112120283e-05, |
|
"loss": 1.3713, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.11090326028861572, |
|
"grad_norm": 1.4976307839881127, |
|
"learning_rate": 1.9993036091010924e-05, |
|
"loss": 1.3564, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.1122394441475147, |
|
"grad_norm": 1.3701705887791993, |
|
"learning_rate": 1.999118657444477e-05, |
|
"loss": 1.3666, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.11357562800641369, |
|
"grad_norm": 1.4279953676541446, |
|
"learning_rate": 1.9989119602676007e-05, |
|
"loss": 1.3498, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.11491181186531267, |
|
"grad_norm": 1.4077392862076545, |
|
"learning_rate": 1.9986835220691662e-05, |
|
"loss": 1.3655, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.11624799572421166, |
|
"grad_norm": 1.5303947503986481, |
|
"learning_rate": 1.9984333478210622e-05, |
|
"loss": 1.3758, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.11758417958311064, |
|
"grad_norm": 1.4002227761633332, |
|
"learning_rate": 1.9981614429682576e-05, |
|
"loss": 1.338, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.11892036344200962, |
|
"grad_norm": 1.3305325078036485, |
|
"learning_rate": 1.9978678134286796e-05, |
|
"loss": 1.3502, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.12025654730090861, |
|
"grad_norm": 1.4218348465021207, |
|
"learning_rate": 1.9975524655930884e-05, |
|
"loss": 1.3526, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1215927311598076, |
|
"grad_norm": 1.3929817535643747, |
|
"learning_rate": 1.997215406324936e-05, |
|
"loss": 1.3575, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.12292891501870658, |
|
"grad_norm": 1.4164344515855345, |
|
"learning_rate": 1.9968566429602166e-05, |
|
"loss": 1.3548, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.12426509887760556, |
|
"grad_norm": 1.3940688971110364, |
|
"learning_rate": 1.996476183307308e-05, |
|
"loss": 1.3651, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.12560128273650453, |
|
"grad_norm": 1.4429246742185509, |
|
"learning_rate": 1.996074035646802e-05, |
|
"loss": 1.3566, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.12693746659540353, |
|
"grad_norm": 1.4139926227619217, |
|
"learning_rate": 1.9956502087313217e-05, |
|
"loss": 1.4029, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1282736504543025, |
|
"grad_norm": 1.36708702924336, |
|
"learning_rate": 1.9952047117853345e-05, |
|
"loss": 1.3558, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1296098343132015, |
|
"grad_norm": 1.4333918113560515, |
|
"learning_rate": 1.994737554504949e-05, |
|
"loss": 1.3735, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.13094601817210047, |
|
"grad_norm": 1.3176020208151544, |
|
"learning_rate": 1.994248747057704e-05, |
|
"loss": 1.341, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.13228220203099947, |
|
"grad_norm": 1.4308466916521525, |
|
"learning_rate": 1.9937383000823485e-05, |
|
"loss": 1.405, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.13361838588989844, |
|
"grad_norm": 1.4229715319544036, |
|
"learning_rate": 1.9932062246886087e-05, |
|
"loss": 1.3655, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.13495456974879744, |
|
"grad_norm": 1.3858117778473207, |
|
"learning_rate": 1.992652532456947e-05, |
|
"loss": 1.3533, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.1362907536076964, |
|
"grad_norm": 1.3134241462309153, |
|
"learning_rate": 1.992077235438311e-05, |
|
"loss": 1.3152, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.1376269374665954, |
|
"grad_norm": 1.365018139049316, |
|
"learning_rate": 1.991480346153868e-05, |
|
"loss": 1.3399, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.13896312132549438, |
|
"grad_norm": 1.426928066531938, |
|
"learning_rate": 1.9908618775947364e-05, |
|
"loss": 1.3933, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.14029930518439337, |
|
"grad_norm": 1.428258632489371, |
|
"learning_rate": 1.9902218432216996e-05, |
|
"loss": 1.4028, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.14163548904329235, |
|
"grad_norm": 1.4241733392483251, |
|
"learning_rate": 1.989560256964916e-05, |
|
"loss": 1.4106, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.14297167290219134, |
|
"grad_norm": 1.3754571749169886, |
|
"learning_rate": 1.9888771332236137e-05, |
|
"loss": 1.383, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.14430785676109031, |
|
"grad_norm": 1.3253761034149971, |
|
"learning_rate": 1.9881724868657768e-05, |
|
"loss": 1.3846, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.1456440406199893, |
|
"grad_norm": 1.3292314064664583, |
|
"learning_rate": 1.9874463332278245e-05, |
|
"loss": 1.3831, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.14698022447888828, |
|
"grad_norm": 1.5134929576884686, |
|
"learning_rate": 1.9866986881142737e-05, |
|
"loss": 1.3535, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.14831640833778728, |
|
"grad_norm": 1.4571661850393143, |
|
"learning_rate": 1.9859295677973988e-05, |
|
"loss": 1.3482, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.14965259219668625, |
|
"grad_norm": 1.3720756399964669, |
|
"learning_rate": 1.9851389890168738e-05, |
|
"loss": 1.3677, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.15098877605558525, |
|
"grad_norm": 1.3902503536690078, |
|
"learning_rate": 1.9843269689794114e-05, |
|
"loss": 1.3201, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.15232495991448422, |
|
"grad_norm": 1.4845499289340192, |
|
"learning_rate": 1.983493525358385e-05, |
|
"loss": 1.4121, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.15366114377338322, |
|
"grad_norm": 1.5074167691063687, |
|
"learning_rate": 1.982638676293448e-05, |
|
"loss": 1.3842, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.1549973276322822, |
|
"grad_norm": 1.386731826246787, |
|
"learning_rate": 1.981762440390136e-05, |
|
"loss": 1.3248, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.1563335114911812, |
|
"grad_norm": 1.398496405386941, |
|
"learning_rate": 1.9808648367194614e-05, |
|
"loss": 1.4116, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.15766969535008016, |
|
"grad_norm": 1.3353006312197904, |
|
"learning_rate": 1.9799458848175023e-05, |
|
"loss": 1.3557, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.15900587920897916, |
|
"grad_norm": 1.3229490481790953, |
|
"learning_rate": 1.9790056046849726e-05, |
|
"loss": 1.3425, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.16034206306787813, |
|
"grad_norm": 1.3413421989191214, |
|
"learning_rate": 1.97804401678679e-05, |
|
"loss": 1.365, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.16034206306787813, |
|
"eval_loss": 1.3631840944290161, |
|
"eval_runtime": 523.8522, |
|
"eval_samples_per_second": 25.286, |
|
"eval_steps_per_second": 3.161, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.16167824692677712, |
|
"grad_norm": 1.3574397972892758, |
|
"learning_rate": 1.9770611420516286e-05, |
|
"loss": 1.3677, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.1630144307856761, |
|
"grad_norm": 1.3451601369327404, |
|
"learning_rate": 1.9760570018714647e-05, |
|
"loss": 1.3509, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.1643506146445751, |
|
"grad_norm": 1.384864628214446, |
|
"learning_rate": 1.975031618101111e-05, |
|
"loss": 1.3258, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.16568679850347406, |
|
"grad_norm": 1.3682344543460991, |
|
"learning_rate": 1.9739850130577393e-05, |
|
"loss": 1.3552, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.16702298236237306, |
|
"grad_norm": 1.4305243117138575, |
|
"learning_rate": 1.9729172095203977e-05, |
|
"loss": 1.3849, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.16835916622127206, |
|
"grad_norm": 1.3723669913018262, |
|
"learning_rate": 1.9718282307295115e-05, |
|
"loss": 1.3659, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.16969535008017103, |
|
"grad_norm": 1.3516044026149312, |
|
"learning_rate": 1.970718100386381e-05, |
|
"loss": 1.3736, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.17103153393907003, |
|
"grad_norm": 1.436359994097034, |
|
"learning_rate": 1.969586842652662e-05, |
|
"loss": 1.3818, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.172367717797969, |
|
"grad_norm": 1.4157888272139716, |
|
"learning_rate": 1.9684344821498432e-05, |
|
"loss": 1.3342, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.173703901656868, |
|
"grad_norm": 1.3227193318099222, |
|
"learning_rate": 1.9672610439587073e-05, |
|
"loss": 1.3862, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.17504008551576697, |
|
"grad_norm": 1.3534338266553816, |
|
"learning_rate": 1.9660665536187875e-05, |
|
"loss": 1.3617, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.17637626937466597, |
|
"grad_norm": 1.3075745113464525, |
|
"learning_rate": 1.9648510371278106e-05, |
|
"loss": 1.351, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.17771245323356494, |
|
"grad_norm": 1.3543581869904175, |
|
"learning_rate": 1.9636145209411318e-05, |
|
"loss": 1.3866, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.17904863709246394, |
|
"grad_norm": 1.3462890650035784, |
|
"learning_rate": 1.9623570319711574e-05, |
|
"loss": 1.3754, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.1803848209513629, |
|
"grad_norm": 1.2991571392936196, |
|
"learning_rate": 1.9610785975867608e-05, |
|
"loss": 1.323, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.1817210048102619, |
|
"grad_norm": 1.434385743767408, |
|
"learning_rate": 1.9597792456126855e-05, |
|
"loss": 1.383, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.18305718866916088, |
|
"grad_norm": 1.3595317685749773, |
|
"learning_rate": 1.9584590043289416e-05, |
|
"loss": 1.2787, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.18439337252805987, |
|
"grad_norm": 1.6484220578907431, |
|
"learning_rate": 1.957117902470187e-05, |
|
"loss": 1.3747, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.18572955638695884, |
|
"grad_norm": 1.2945139560173622, |
|
"learning_rate": 1.9557559692251047e-05, |
|
"loss": 1.3621, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.18706574024585784, |
|
"grad_norm": 1.343012210724551, |
|
"learning_rate": 1.9543732342357664e-05, |
|
"loss": 1.3205, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.1884019241047568, |
|
"grad_norm": 1.3223543378479201, |
|
"learning_rate": 1.9529697275969876e-05, |
|
"loss": 1.3101, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.1897381079636558, |
|
"grad_norm": 1.3069901527523717, |
|
"learning_rate": 1.951545479855673e-05, |
|
"loss": 1.3752, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.19107429182255478, |
|
"grad_norm": 1.3767101616598136, |
|
"learning_rate": 1.9501005220101507e-05, |
|
"loss": 1.3854, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.19241047568145378, |
|
"grad_norm": 1.3595423389248131, |
|
"learning_rate": 1.948634885509498e-05, |
|
"loss": 1.3708, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.19374665954035275, |
|
"grad_norm": 1.269545504198035, |
|
"learning_rate": 1.947148602252858e-05, |
|
"loss": 1.3443, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.19508284339925175, |
|
"grad_norm": 1.2745742850192638, |
|
"learning_rate": 1.9456417045887423e-05, |
|
"loss": 1.3339, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.19641902725815072, |
|
"grad_norm": 1.4182072631536495, |
|
"learning_rate": 1.944114225314331e-05, |
|
"loss": 1.3103, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.19775521111704972, |
|
"grad_norm": 1.3233791325230955, |
|
"learning_rate": 1.9425661976747552e-05, |
|
"loss": 1.3625, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.1990913949759487, |
|
"grad_norm": 1.4760222437081327, |
|
"learning_rate": 1.9409976553623767e-05, |
|
"loss": 1.3624, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.20042757883484769, |
|
"grad_norm": 1.4210858490294298, |
|
"learning_rate": 1.9394086325160515e-05, |
|
"loss": 1.3599, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.20176376269374666, |
|
"grad_norm": 1.3258331784580841, |
|
"learning_rate": 1.9377991637203894e-05, |
|
"loss": 1.3411, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.20309994655264565, |
|
"grad_norm": 1.5486410395081083, |
|
"learning_rate": 1.9361692840049997e-05, |
|
"loss": 1.4124, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.20443613041154463, |
|
"grad_norm": 1.3108858505753092, |
|
"learning_rate": 1.9345190288437292e-05, |
|
"loss": 1.3323, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.20577231427044362, |
|
"grad_norm": 1.3401186763256265, |
|
"learning_rate": 1.9328484341538903e-05, |
|
"loss": 1.3589, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.2071084981293426, |
|
"grad_norm": 1.3258497911117144, |
|
"learning_rate": 1.93115753629548e-05, |
|
"loss": 1.3263, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.2084446819882416, |
|
"grad_norm": 1.279705605242652, |
|
"learning_rate": 1.929446372070386e-05, |
|
"loss": 1.3577, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.20978086584714056, |
|
"grad_norm": 1.3688317662377307, |
|
"learning_rate": 1.9277149787215893e-05, |
|
"loss": 1.3325, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.21111704970603956, |
|
"grad_norm": 1.3308089715501914, |
|
"learning_rate": 1.9259633939323504e-05, |
|
"loss": 1.3594, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.21245323356493853, |
|
"grad_norm": 1.3306187188699843, |
|
"learning_rate": 1.924191655825391e-05, |
|
"loss": 1.3706, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.21378941742383753, |
|
"grad_norm": 1.3079863145055592, |
|
"learning_rate": 1.922399802962064e-05, |
|
"loss": 1.33, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.21378941742383753, |
|
"eval_loss": 1.3532133102416992, |
|
"eval_runtime": 523.8533, |
|
"eval_samples_per_second": 25.286, |
|
"eval_steps_per_second": 3.161, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.2151256012827365, |
|
"grad_norm": 1.2760851031416467, |
|
"learning_rate": 1.9205878743415137e-05, |
|
"loss": 1.321, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.2164617851416355, |
|
"grad_norm": 1.278778697243769, |
|
"learning_rate": 1.9187559093998275e-05, |
|
"loss": 1.3632, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.21779796900053447, |
|
"grad_norm": 1.3321470791378587, |
|
"learning_rate": 1.916903948009177e-05, |
|
"loss": 1.3079, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.21913415285943347, |
|
"grad_norm": 1.3698830796691455, |
|
"learning_rate": 1.915032030476951e-05, |
|
"loss": 1.3835, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.22047033671833244, |
|
"grad_norm": 1.314469593484434, |
|
"learning_rate": 1.913140197544877e-05, |
|
"loss": 1.3423, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.22180652057723144, |
|
"grad_norm": 1.3585983787357763, |
|
"learning_rate": 1.911228490388136e-05, |
|
"loss": 1.3038, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.2231427044361304, |
|
"grad_norm": 1.309351148797077, |
|
"learning_rate": 1.9092969506144653e-05, |
|
"loss": 1.3711, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.2244788882950294, |
|
"grad_norm": 1.2522351293958003, |
|
"learning_rate": 1.907345620263254e-05, |
|
"loss": 1.3436, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.22581507215392838, |
|
"grad_norm": 1.2176016475207827, |
|
"learning_rate": 1.9053745418046257e-05, |
|
"loss": 1.3544, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.22715125601282737, |
|
"grad_norm": 1.3937619861187576, |
|
"learning_rate": 1.903383758138517e-05, |
|
"loss": 1.3563, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.22848743987172634, |
|
"grad_norm": 1.3085996735753536, |
|
"learning_rate": 1.9013733125937412e-05, |
|
"loss": 1.3149, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.22982362373062534, |
|
"grad_norm": 1.5210379330796175, |
|
"learning_rate": 1.8993432489270484e-05, |
|
"loss": 1.3202, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.2311598075895243, |
|
"grad_norm": 1.3435267655155434, |
|
"learning_rate": 1.8972936113221696e-05, |
|
"loss": 1.357, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.2324959914484233, |
|
"grad_norm": 1.32208190242327, |
|
"learning_rate": 1.8952244443888573e-05, |
|
"loss": 1.3838, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.23383217530732228, |
|
"grad_norm": 1.3123583179163099, |
|
"learning_rate": 1.8931357931619143e-05, |
|
"loss": 1.3576, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.23516835916622128, |
|
"grad_norm": 1.2321415331674137, |
|
"learning_rate": 1.8910277031002125e-05, |
|
"loss": 1.3413, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.23650454302512025, |
|
"grad_norm": 1.2717606000257338, |
|
"learning_rate": 1.888900220085706e-05, |
|
"loss": 1.3412, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.23784072688401925, |
|
"grad_norm": 1.3184808872543174, |
|
"learning_rate": 1.886753390422428e-05, |
|
"loss": 1.3616, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.23917691074291822, |
|
"grad_norm": 1.2849347258583887, |
|
"learning_rate": 1.8845872608354877e-05, |
|
"loss": 1.3021, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.24051309460181722, |
|
"grad_norm": 1.2973872521270147, |
|
"learning_rate": 1.882401878470052e-05, |
|
"loss": 1.3365, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2418492784607162, |
|
"grad_norm": 1.3250631489181373, |
|
"learning_rate": 1.8801972908903162e-05, |
|
"loss": 1.3433, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.2431854623196152, |
|
"grad_norm": 1.263181234454277, |
|
"learning_rate": 1.877973546078474e-05, |
|
"loss": 1.3703, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.24452164617851416, |
|
"grad_norm": 1.379416457226466, |
|
"learning_rate": 1.875730692433669e-05, |
|
"loss": 1.353, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.24585783003741316, |
|
"grad_norm": 1.293087775842146, |
|
"learning_rate": 1.873468778770944e-05, |
|
"loss": 1.3255, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.24719401389631213, |
|
"grad_norm": 1.3395508283607358, |
|
"learning_rate": 1.8711878543201757e-05, |
|
"loss": 1.2999, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.24853019775521112, |
|
"grad_norm": 1.368607821812935, |
|
"learning_rate": 1.8688879687250067e-05, |
|
"loss": 1.3305, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.2498663816141101, |
|
"grad_norm": 1.323555417532407, |
|
"learning_rate": 1.8665691720417624e-05, |
|
"loss": 1.3662, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.25120256547300907, |
|
"grad_norm": 1.367652347855113, |
|
"learning_rate": 1.8642315147383628e-05, |
|
"loss": 1.3469, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.25253874933190806, |
|
"grad_norm": 1.2966904825916514, |
|
"learning_rate": 1.8618750476932237e-05, |
|
"loss": 1.3071, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.25387493319080706, |
|
"grad_norm": 1.4450003246232057, |
|
"learning_rate": 1.8594998221941482e-05, |
|
"loss": 1.3721, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.25521111704970606, |
|
"grad_norm": 1.284360919841786, |
|
"learning_rate": 1.857105889937213e-05, |
|
"loss": 1.3638, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.256547300908605, |
|
"grad_norm": 1.288974034075832, |
|
"learning_rate": 1.8546933030256417e-05, |
|
"loss": 1.3349, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.257883484767504, |
|
"grad_norm": 1.3405220877515678, |
|
"learning_rate": 1.85226211396867e-05, |
|
"loss": 1.3052, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.259219668626403, |
|
"grad_norm": 1.3868830711147844, |
|
"learning_rate": 1.8498123756804038e-05, |
|
"loss": 1.3609, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.260555852485302, |
|
"grad_norm": 1.3022776914197354, |
|
"learning_rate": 1.8473441414786692e-05, |
|
"loss": 1.3385, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.26189203634420094, |
|
"grad_norm": 1.4058613068210792, |
|
"learning_rate": 1.8448574650838477e-05, |
|
"loss": 1.3234, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.26322822020309994, |
|
"grad_norm": 1.3008663043779487, |
|
"learning_rate": 1.842352400617712e-05, |
|
"loss": 1.3201, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.26456440406199894, |
|
"grad_norm": 1.274233066139097, |
|
"learning_rate": 1.8398290026022444e-05, |
|
"loss": 1.3372, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.26590058792089794, |
|
"grad_norm": 1.2895370237805952, |
|
"learning_rate": 1.8372873259584517e-05, |
|
"loss": 1.3082, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.2672367717797969, |
|
"grad_norm": 1.2250435736281928, |
|
"learning_rate": 1.83472742600517e-05, |
|
"loss": 1.3219, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2672367717797969, |
|
"eval_loss": 1.3463348150253296, |
|
"eval_runtime": 523.6844, |
|
"eval_samples_per_second": 25.294, |
|
"eval_steps_per_second": 3.162, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2685729556386959, |
|
"grad_norm": 1.335279277755242, |
|
"learning_rate": 1.83214935845786e-05, |
|
"loss": 1.3195, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.2699091394975949, |
|
"grad_norm": 1.3383554327189036, |
|
"learning_rate": 1.8295531794273948e-05, |
|
"loss": 1.3471, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.2712453233564939, |
|
"grad_norm": 1.272300682321573, |
|
"learning_rate": 1.826938945418837e-05, |
|
"loss": 1.3156, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.2725815072153928, |
|
"grad_norm": 1.4984409833622696, |
|
"learning_rate": 1.8243067133302143e-05, |
|
"loss": 1.3528, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.2739176910742918, |
|
"grad_norm": 1.2475449536511878, |
|
"learning_rate": 1.8216565404512732e-05, |
|
"loss": 1.341, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.2752538749331908, |
|
"grad_norm": 1.251734878893183, |
|
"learning_rate": 1.818988484462238e-05, |
|
"loss": 1.3106, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.2765900587920898, |
|
"grad_norm": 1.322345839521324, |
|
"learning_rate": 1.8163026034325532e-05, |
|
"loss": 1.294, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.27792624265098875, |
|
"grad_norm": 1.2838160402093648, |
|
"learning_rate": 1.8135989558196207e-05, |
|
"loss": 1.3484, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.27926242650988775, |
|
"grad_norm": 1.2943916155535866, |
|
"learning_rate": 1.8108776004675255e-05, |
|
"loss": 1.3052, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.28059861036878675, |
|
"grad_norm": 1.2762154467420836, |
|
"learning_rate": 1.808138596605758e-05, |
|
"loss": 1.3284, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.28193479422768575, |
|
"grad_norm": 1.255384319187328, |
|
"learning_rate": 1.8053820038479214e-05, |
|
"loss": 1.3686, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.2832709780865847, |
|
"grad_norm": 1.2673273813526882, |
|
"learning_rate": 1.802607882190437e-05, |
|
"loss": 1.3382, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.2846071619454837, |
|
"grad_norm": 1.2940248666834164, |
|
"learning_rate": 1.799816292011237e-05, |
|
"loss": 1.3498, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.2859433458043827, |
|
"grad_norm": 1.2046683889500032, |
|
"learning_rate": 1.7970072940684514e-05, |
|
"loss": 1.3329, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.2872795296632817, |
|
"grad_norm": 1.366921247819638, |
|
"learning_rate": 1.7941809494990838e-05, |
|
"loss": 1.3152, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.28861571352218063, |
|
"grad_norm": 1.2494608185941236, |
|
"learning_rate": 1.7913373198176832e-05, |
|
"loss": 1.3242, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.2899518973810796, |
|
"grad_norm": 1.2279808317447078, |
|
"learning_rate": 1.7884764669150035e-05, |
|
"loss": 1.3359, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.2912880812399786, |
|
"grad_norm": 1.2317749726693008, |
|
"learning_rate": 1.7855984530566564e-05, |
|
"loss": 1.3186, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.2926242650988776, |
|
"grad_norm": 1.3570393319531886, |
|
"learning_rate": 1.7827033408817573e-05, |
|
"loss": 1.3449, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.29396044895777657, |
|
"grad_norm": 1.247405852772324, |
|
"learning_rate": 1.779791193401561e-05, |
|
"loss": 1.3416, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.29529663281667556, |
|
"grad_norm": 1.2893644804363795, |
|
"learning_rate": 1.776862073998091e-05, |
|
"loss": 1.3674, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.29663281667557456, |
|
"grad_norm": 1.2741258542191471, |
|
"learning_rate": 1.7739160464227593e-05, |
|
"loss": 1.3291, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.29796900053447356, |
|
"grad_norm": 1.4051921967322472, |
|
"learning_rate": 1.7709531747949796e-05, |
|
"loss": 1.3592, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.2993051843933725, |
|
"grad_norm": 1.2562313322258833, |
|
"learning_rate": 1.7679735236007715e-05, |
|
"loss": 1.3259, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.3006413682522715, |
|
"grad_norm": 1.316830880638165, |
|
"learning_rate": 1.7649771576913553e-05, |
|
"loss": 1.3448, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.3019775521111705, |
|
"grad_norm": 1.3518115028546631, |
|
"learning_rate": 1.7619641422817446e-05, |
|
"loss": 1.3291, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.3033137359700695, |
|
"grad_norm": 1.3279266806644043, |
|
"learning_rate": 1.758934542949323e-05, |
|
"loss": 1.3589, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.30464991982896844, |
|
"grad_norm": 1.2655330414005816, |
|
"learning_rate": 1.755888425632418e-05, |
|
"loss": 1.3267, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.30598610368786744, |
|
"grad_norm": 1.2109654835513972, |
|
"learning_rate": 1.7528258566288666e-05, |
|
"loss": 1.3264, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.30732228754676644, |
|
"grad_norm": 1.269808597072721, |
|
"learning_rate": 1.7497469025945722e-05, |
|
"loss": 1.2766, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.30865847140566544, |
|
"grad_norm": 1.2456464717450948, |
|
"learning_rate": 1.7466516305420524e-05, |
|
"loss": 1.3352, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.3099946552645644, |
|
"grad_norm": 1.2359484363944293, |
|
"learning_rate": 1.743540107838983e-05, |
|
"loss": 1.3295, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.3113308391234634, |
|
"grad_norm": 1.2244387854880765, |
|
"learning_rate": 1.74041240220673e-05, |
|
"loss": 1.2754, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.3126670229823624, |
|
"grad_norm": 1.34164790224033, |
|
"learning_rate": 1.7372685817188747e-05, |
|
"loss": 1.3066, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3140032068412614, |
|
"grad_norm": 1.274512676357063, |
|
"learning_rate": 1.734108714799735e-05, |
|
"loss": 1.337, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.3153393907001603, |
|
"grad_norm": 1.1877061220307676, |
|
"learning_rate": 1.7309328702228742e-05, |
|
"loss": 1.304, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.3166755745590593, |
|
"grad_norm": 1.307991502348843, |
|
"learning_rate": 1.7277411171096042e-05, |
|
"loss": 1.3234, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.3180117584179583, |
|
"grad_norm": 1.2459886266896583, |
|
"learning_rate": 1.7245335249274818e-05, |
|
"loss": 1.344, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.3193479422768573, |
|
"grad_norm": 1.3088574044809078, |
|
"learning_rate": 1.7213101634887968e-05, |
|
"loss": 1.3213, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.32068412613575625, |
|
"grad_norm": 1.401051770726762, |
|
"learning_rate": 1.718071102949051e-05, |
|
"loss": 1.3355, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.32068412613575625, |
|
"eval_loss": 1.3390916585922241, |
|
"eval_runtime": 523.6061, |
|
"eval_samples_per_second": 25.298, |
|
"eval_steps_per_second": 3.163, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.32202030999465525, |
|
"grad_norm": 1.2972562697211982, |
|
"learning_rate": 1.7148164138054333e-05, |
|
"loss": 1.3181, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.32335649385355425, |
|
"grad_norm": 1.2825002290000433, |
|
"learning_rate": 1.7115461668952848e-05, |
|
"loss": 1.3422, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.32469267771245325, |
|
"grad_norm": 1.2161188921788924, |
|
"learning_rate": 1.7082604333945557e-05, |
|
"loss": 1.3505, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.3260288615713522, |
|
"grad_norm": 1.2075785701291617, |
|
"learning_rate": 1.7049592848162583e-05, |
|
"loss": 1.331, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.3273650454302512, |
|
"grad_norm": 1.3030995176122255, |
|
"learning_rate": 1.701642793008909e-05, |
|
"loss": 1.3266, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.3287012292891502, |
|
"grad_norm": 1.2590894105748518, |
|
"learning_rate": 1.6983110301549652e-05, |
|
"loss": 1.3199, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.3300374131480492, |
|
"grad_norm": 1.2284782881775649, |
|
"learning_rate": 1.6949640687692535e-05, |
|
"loss": 1.3243, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.33137359700694813, |
|
"grad_norm": 1.2623721858490347, |
|
"learning_rate": 1.691601981697393e-05, |
|
"loss": 1.3299, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.3327097808658471, |
|
"grad_norm": 1.226343404396135, |
|
"learning_rate": 1.688224842114208e-05, |
|
"loss": 1.3031, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.3340459647247461, |
|
"grad_norm": 1.2453664354622114, |
|
"learning_rate": 1.6848327235221368e-05, |
|
"loss": 1.3047, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.3353821485836451, |
|
"grad_norm": 1.2917212982794357, |
|
"learning_rate": 1.681425699749631e-05, |
|
"loss": 1.3367, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.3367183324425441, |
|
"grad_norm": 1.1431548737930348, |
|
"learning_rate": 1.6780038449495492e-05, |
|
"loss": 1.2894, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.33805451630144306, |
|
"grad_norm": 1.305490289585266, |
|
"learning_rate": 1.674567233597542e-05, |
|
"loss": 1.3303, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.33939070016034206, |
|
"grad_norm": 1.3295423667445367, |
|
"learning_rate": 1.6711159404904346e-05, |
|
"loss": 1.3795, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.34072688401924106, |
|
"grad_norm": 1.2557371261898338, |
|
"learning_rate": 1.667650040744593e-05, |
|
"loss": 1.3923, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.34206306787814006, |
|
"grad_norm": 1.2709385762973946, |
|
"learning_rate": 1.6641696097942937e-05, |
|
"loss": 1.3633, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.343399251737039, |
|
"grad_norm": 1.2297417601415581, |
|
"learning_rate": 1.6606747233900816e-05, |
|
"loss": 1.3475, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.344735435595938, |
|
"grad_norm": 1.270772993810448, |
|
"learning_rate": 1.6571654575971186e-05, |
|
"loss": 1.3215, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.346071619454837, |
|
"grad_norm": 1.243136160915021, |
|
"learning_rate": 1.6536418887935307e-05, |
|
"loss": 1.2909, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.347407803313736, |
|
"grad_norm": 1.2453939190767478, |
|
"learning_rate": 1.6501040936687444e-05, |
|
"loss": 1.299, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.34874398717263494, |
|
"grad_norm": 1.3190132616463197, |
|
"learning_rate": 1.6465521492218175e-05, |
|
"loss": 1.3242, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.35008017103153394, |
|
"grad_norm": 1.2836129069270563, |
|
"learning_rate": 1.6429861327597643e-05, |
|
"loss": 1.3257, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.35141635489043294, |
|
"grad_norm": 1.3046224543331126, |
|
"learning_rate": 1.6394061218958714e-05, |
|
"loss": 1.3735, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.35275253874933193, |
|
"grad_norm": 1.195969330334029, |
|
"learning_rate": 1.63581219454801e-05, |
|
"loss": 1.3547, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.3540887226082309, |
|
"grad_norm": 1.2293037135379614, |
|
"learning_rate": 1.63220442893694e-05, |
|
"loss": 1.3258, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.3554249064671299, |
|
"grad_norm": 1.221432152844297, |
|
"learning_rate": 1.6285829035846057e-05, |
|
"loss": 1.3368, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.3567610903260289, |
|
"grad_norm": 1.2802671063330058, |
|
"learning_rate": 1.624947697312429e-05, |
|
"loss": 1.3497, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.35809727418492787, |
|
"grad_norm": 1.408250486602362, |
|
"learning_rate": 1.621298889239592e-05, |
|
"loss": 1.3856, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.3594334580438268, |
|
"grad_norm": 1.2601768753005949, |
|
"learning_rate": 1.617636558781318e-05, |
|
"loss": 1.3151, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.3607696419027258, |
|
"grad_norm": 1.2756859712312687, |
|
"learning_rate": 1.6139607856471377e-05, |
|
"loss": 1.2932, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.3621058257616248, |
|
"grad_norm": 1.2546446591863418, |
|
"learning_rate": 1.610271649839161e-05, |
|
"loss": 1.3112, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.3634420096205238, |
|
"grad_norm": 1.2883557482491337, |
|
"learning_rate": 1.6065692316503306e-05, |
|
"loss": 1.3155, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.36477819347942275, |
|
"grad_norm": 1.2350958249428101, |
|
"learning_rate": 1.6028536116626763e-05, |
|
"loss": 1.2951, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.36611437733832175, |
|
"grad_norm": 1.209684203322871, |
|
"learning_rate": 1.5991248707455614e-05, |
|
"loss": 1.3159, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.36745056119722075, |
|
"grad_norm": 1.3434534905824467, |
|
"learning_rate": 1.595383090053923e-05, |
|
"loss": 1.3079, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.36878674505611975, |
|
"grad_norm": 1.2129683097633361, |
|
"learning_rate": 1.5916283510265037e-05, |
|
"loss": 1.2949, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.3701229289150187, |
|
"grad_norm": 1.3085166024177262, |
|
"learning_rate": 1.5878607353840814e-05, |
|
"loss": 1.3557, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.3714591127739177, |
|
"grad_norm": 1.2946759032750124, |
|
"learning_rate": 1.5840803251276892e-05, |
|
"loss": 1.3005, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.3727952966328167, |
|
"grad_norm": 1.283448494880763, |
|
"learning_rate": 1.5802872025368316e-05, |
|
"loss": 1.2877, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.3741314804917157, |
|
"grad_norm": 1.2309029767719335, |
|
"learning_rate": 1.576481450167693e-05, |
|
"loss": 1.334, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.3741314804917157, |
|
"eval_loss": 1.330536127090454, |
|
"eval_runtime": 523.7724, |
|
"eval_samples_per_second": 25.29, |
|
"eval_steps_per_second": 3.162, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.3754676643506146, |
|
"grad_norm": 1.2496837334635877, |
|
"learning_rate": 1.5726631508513412e-05, |
|
"loss": 1.3089, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.3768038482095136, |
|
"grad_norm": 1.374132042278566, |
|
"learning_rate": 1.568832387691924e-05, |
|
"loss": 1.3465, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.3781400320684126, |
|
"grad_norm": 1.2412100387519944, |
|
"learning_rate": 1.5649892440648625e-05, |
|
"loss": 1.338, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3794762159273116, |
|
"grad_norm": 1.2515878984452657, |
|
"learning_rate": 1.5611338036150338e-05, |
|
"loss": 1.302, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.38081239978621056, |
|
"grad_norm": 1.3524756121075554, |
|
"learning_rate": 1.5572661502549514e-05, |
|
"loss": 1.3297, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.38214858364510956, |
|
"grad_norm": 1.3613331896664973, |
|
"learning_rate": 1.5533863681629404e-05, |
|
"loss": 1.3375, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.38348476750400856, |
|
"grad_norm": 1.3234108123677346, |
|
"learning_rate": 1.5494945417813034e-05, |
|
"loss": 1.28, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.38482095136290756, |
|
"grad_norm": 1.2464116209347194, |
|
"learning_rate": 1.545590755814483e-05, |
|
"loss": 1.339, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3861571352218065, |
|
"grad_norm": 1.2082007561781767, |
|
"learning_rate": 1.5416750952272198e-05, |
|
"loss": 1.2924, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3874933190807055, |
|
"grad_norm": 1.2460568233344762, |
|
"learning_rate": 1.537747645242701e-05, |
|
"loss": 1.2913, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3888295029396045, |
|
"grad_norm": 1.2643768624756646, |
|
"learning_rate": 1.5338084913407067e-05, |
|
"loss": 1.3385, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3901656867985035, |
|
"grad_norm": 1.2393822310314082, |
|
"learning_rate": 1.5298577192557487e-05, |
|
"loss": 1.2918, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.39150187065740244, |
|
"grad_norm": 1.2212527378031937, |
|
"learning_rate": 1.525895414975207e-05, |
|
"loss": 1.3496, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.39283805451630144, |
|
"grad_norm": 1.2539579056942822, |
|
"learning_rate": 1.5219216647374546e-05, |
|
"loss": 1.3285, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.39417423837520044, |
|
"grad_norm": 1.194899743310905, |
|
"learning_rate": 1.5179365550299823e-05, |
|
"loss": 1.2987, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.39551042223409943, |
|
"grad_norm": 1.2361033901459717, |
|
"learning_rate": 1.513940172587518e-05, |
|
"loss": 1.3018, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.3968466060929984, |
|
"grad_norm": 1.2040496820635649, |
|
"learning_rate": 1.5099326043901361e-05, |
|
"loss": 1.3144, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.3981827899518974, |
|
"grad_norm": 1.2436110116124757, |
|
"learning_rate": 1.5059139376613652e-05, |
|
"loss": 1.3147, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.3995189738107964, |
|
"grad_norm": 1.229222024704698, |
|
"learning_rate": 1.5018842598662913e-05, |
|
"loss": 1.3084, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.40085515766969537, |
|
"grad_norm": 1.2280023763221926, |
|
"learning_rate": 1.4978436587096526e-05, |
|
"loss": 1.2993, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.4021913415285943, |
|
"grad_norm": 1.2379509698653712, |
|
"learning_rate": 1.4937922221339303e-05, |
|
"loss": 1.3448, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.4035275253874933, |
|
"grad_norm": 1.1819300792108958, |
|
"learning_rate": 1.4897300383174362e-05, |
|
"loss": 1.3093, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.4048637092463923, |
|
"grad_norm": 1.2646351619539313, |
|
"learning_rate": 1.4856571956723924e-05, |
|
"loss": 1.3292, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.4061998931052913, |
|
"grad_norm": 1.20105031681724, |
|
"learning_rate": 1.4815737828430068e-05, |
|
"loss": 1.3275, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.40753607696419025, |
|
"grad_norm": 1.2322477247241055, |
|
"learning_rate": 1.4774798887035446e-05, |
|
"loss": 1.3273, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.40887226082308925, |
|
"grad_norm": 1.2203562838881301, |
|
"learning_rate": 1.4733756023563932e-05, |
|
"loss": 1.3046, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.41020844468198825, |
|
"grad_norm": 1.3701923681759842, |
|
"learning_rate": 1.4692610131301242e-05, |
|
"loss": 1.3468, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.41154462854088725, |
|
"grad_norm": 1.2104295209543379, |
|
"learning_rate": 1.4651362105775471e-05, |
|
"loss": 1.3292, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.4128808123997862, |
|
"grad_norm": 1.178262832900831, |
|
"learning_rate": 1.4610012844737622e-05, |
|
"loss": 1.3095, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.4142169962586852, |
|
"grad_norm": 1.2034684530693827, |
|
"learning_rate": 1.4568563248142058e-05, |
|
"loss": 1.277, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.4155531801175842, |
|
"grad_norm": 1.2346570794875227, |
|
"learning_rate": 1.4527014218126913e-05, |
|
"loss": 1.3323, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.4168893639764832, |
|
"grad_norm": 1.2924089782717, |
|
"learning_rate": 1.4485366658994463e-05, |
|
"loss": 1.3117, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.4182255478353821, |
|
"grad_norm": 1.222906017451485, |
|
"learning_rate": 1.4443621477191434e-05, |
|
"loss": 1.3185, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.4195617316942811, |
|
"grad_norm": 1.2534027609567953, |
|
"learning_rate": 1.440177958128929e-05, |
|
"loss": 1.2978, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.4208979155531801, |
|
"grad_norm": 1.2140810113206562, |
|
"learning_rate": 1.4359841881964445e-05, |
|
"loss": 1.3162, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.4222340994120791, |
|
"grad_norm": 1.1684011954978797, |
|
"learning_rate": 1.4317809291978442e-05, |
|
"loss": 1.3333, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.42357028327097807, |
|
"grad_norm": 1.214319379461352, |
|
"learning_rate": 1.4275682726158092e-05, |
|
"loss": 1.2867, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.42490646712987706, |
|
"grad_norm": 1.182224144852539, |
|
"learning_rate": 1.4233463101375569e-05, |
|
"loss": 1.3039, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.42624265098877606, |
|
"grad_norm": 1.2314041097519686, |
|
"learning_rate": 1.4191151336528441e-05, |
|
"loss": 1.3488, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.42757883484767506, |
|
"grad_norm": 1.2641228680933205, |
|
"learning_rate": 1.4148748352519677e-05, |
|
"loss": 1.3183, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.42757883484767506, |
|
"eval_loss": 1.3232654333114624, |
|
"eval_runtime": 523.7168, |
|
"eval_samples_per_second": 25.292, |
|
"eval_steps_per_second": 3.162, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.428915018706574, |
|
"grad_norm": 1.2410818763020894, |
|
"learning_rate": 1.4106255072237605e-05, |
|
"loss": 1.2977, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.430251202565473, |
|
"grad_norm": 1.2216210579429683, |
|
"learning_rate": 1.406367242053583e-05, |
|
"loss": 1.3388, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.431587386424372, |
|
"grad_norm": 1.2380524456449333, |
|
"learning_rate": 1.402100132421309e-05, |
|
"loss": 1.3048, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.432923570283271, |
|
"grad_norm": 1.222444425938849, |
|
"learning_rate": 1.39782427119931e-05, |
|
"loss": 1.3129, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.43425975414216994, |
|
"grad_norm": 1.2084516798428055, |
|
"learning_rate": 1.3935397514504332e-05, |
|
"loss": 1.3312, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.43559593800106894, |
|
"grad_norm": 1.3138679343999906, |
|
"learning_rate": 1.3892466664259756e-05, |
|
"loss": 1.3358, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.43693212185996794, |
|
"grad_norm": 1.325108022318488, |
|
"learning_rate": 1.3849451095636555e-05, |
|
"loss": 1.3197, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.43826830571886694, |
|
"grad_norm": 1.239099901058549, |
|
"learning_rate": 1.3806351744855781e-05, |
|
"loss": 1.3276, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.4396044895777659, |
|
"grad_norm": 1.2701728016347087, |
|
"learning_rate": 1.3763169549961976e-05, |
|
"loss": 1.3087, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.4409406734366649, |
|
"grad_norm": 1.3093633737758703, |
|
"learning_rate": 1.371990545080276e-05, |
|
"loss": 1.3334, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.4422768572955639, |
|
"grad_norm": 1.2721317308394575, |
|
"learning_rate": 1.3676560389008378e-05, |
|
"loss": 1.3568, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.4436130411544629, |
|
"grad_norm": 1.2184083694463035, |
|
"learning_rate": 1.3633135307971204e-05, |
|
"loss": 1.3279, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.4449492250133618, |
|
"grad_norm": 1.3001651314508893, |
|
"learning_rate": 1.3589631152825197e-05, |
|
"loss": 1.3176, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.4462854088722608, |
|
"grad_norm": 1.2226816442485073, |
|
"learning_rate": 1.3546048870425356e-05, |
|
"loss": 1.2705, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.4476215927311598, |
|
"grad_norm": 1.4176804415374429, |
|
"learning_rate": 1.3502389409327087e-05, |
|
"loss": 1.2679, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.4489577765900588, |
|
"grad_norm": 1.2388040805053258, |
|
"learning_rate": 1.3458653719765564e-05, |
|
"loss": 1.3046, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.45029396044895775, |
|
"grad_norm": 1.2129122448318554, |
|
"learning_rate": 1.341484275363506e-05, |
|
"loss": 1.3149, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.45163014430785675, |
|
"grad_norm": 1.244763644565545, |
|
"learning_rate": 1.3370957464468213e-05, |
|
"loss": 1.3436, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.45296632816675575, |
|
"grad_norm": 1.2759739357160127, |
|
"learning_rate": 1.332699880741528e-05, |
|
"loss": 1.3569, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.45430251202565475, |
|
"grad_norm": 1.2281614718249263, |
|
"learning_rate": 1.3282967739223357e-05, |
|
"loss": 1.3075, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.4556386958845537, |
|
"grad_norm": 1.2189899125191879, |
|
"learning_rate": 1.3238865218215535e-05, |
|
"loss": 1.3017, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.4569748797434527, |
|
"grad_norm": 1.231210053539351, |
|
"learning_rate": 1.3194692204270063e-05, |
|
"loss": 1.3366, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.4583110636023517, |
|
"grad_norm": 1.2018193616808162, |
|
"learning_rate": 1.3150449658799442e-05, |
|
"loss": 1.3535, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.4596472474612507, |
|
"grad_norm": 1.2128311963623164, |
|
"learning_rate": 1.3106138544729511e-05, |
|
"loss": 1.3495, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.46098343132014963, |
|
"grad_norm": 1.2304855721971648, |
|
"learning_rate": 1.3061759826478477e-05, |
|
"loss": 1.308, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.4623196151790486, |
|
"grad_norm": 1.183612891624708, |
|
"learning_rate": 1.3017314469935942e-05, |
|
"loss": 1.2771, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.4636557990379476, |
|
"grad_norm": 1.2035215718306922, |
|
"learning_rate": 1.2972803442441863e-05, |
|
"loss": 1.3374, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.4649919828968466, |
|
"grad_norm": 1.2078408808291714, |
|
"learning_rate": 1.2928227712765504e-05, |
|
"loss": 1.3155, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.46632816675574557, |
|
"grad_norm": 1.2685296349837316, |
|
"learning_rate": 1.2883588251084362e-05, |
|
"loss": 1.3224, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.46766435061464456, |
|
"grad_norm": 1.2298326258192636, |
|
"learning_rate": 1.2838886028963038e-05, |
|
"loss": 1.3332, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.46900053447354356, |
|
"grad_norm": 1.1803857708163348, |
|
"learning_rate": 1.2794122019332087e-05, |
|
"loss": 1.2889, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.47033671833244256, |
|
"grad_norm": 1.274081083989054, |
|
"learning_rate": 1.2749297196466861e-05, |
|
"loss": 1.289, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.4716729021913415, |
|
"grad_norm": 1.3238957747747422, |
|
"learning_rate": 1.270441253596629e-05, |
|
"loss": 1.3189, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.4730090860502405, |
|
"grad_norm": 1.2097611285289955, |
|
"learning_rate": 1.265946901473166e-05, |
|
"loss": 1.3071, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.4743452699091395, |
|
"grad_norm": 1.1625021555183794, |
|
"learning_rate": 1.2614467610945323e-05, |
|
"loss": 1.2987, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.4756814537680385, |
|
"grad_norm": 1.1887076661153424, |
|
"learning_rate": 1.256940930404945e-05, |
|
"loss": 1.2783, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.47701763762693744, |
|
"grad_norm": 1.2645632943006766, |
|
"learning_rate": 1.2524295074724683e-05, |
|
"loss": 1.3448, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.47835382148583644, |
|
"grad_norm": 1.2361406321440211, |
|
"learning_rate": 1.2479125904868795e-05, |
|
"loss": 1.2726, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.47969000534473544, |
|
"grad_norm": 1.274165207428913, |
|
"learning_rate": 1.2433902777575326e-05, |
|
"loss": 1.2907, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.48102618920363444, |
|
"grad_norm": 1.2418881744061736, |
|
"learning_rate": 1.2388626677112185e-05, |
|
"loss": 1.334, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.48102618920363444, |
|
"eval_loss": 1.316095232963562, |
|
"eval_runtime": 523.7124, |
|
"eval_samples_per_second": 25.293, |
|
"eval_steps_per_second": 3.162, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.4823623730625334, |
|
"grad_norm": 1.4406988030285752, |
|
"learning_rate": 1.2343298588900226e-05, |
|
"loss": 1.3175, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.4836985569214324, |
|
"grad_norm": 1.1708167122953599, |
|
"learning_rate": 1.2297919499491797e-05, |
|
"loss": 1.3048, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.4850347407803314, |
|
"grad_norm": 1.2437310171766487, |
|
"learning_rate": 1.2252490396549282e-05, |
|
"loss": 1.3287, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.4863709246392304, |
|
"grad_norm": 1.5815332852740502, |
|
"learning_rate": 1.220701226882358e-05, |
|
"loss": 1.3344, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.4877071084981293, |
|
"grad_norm": 1.2057093168260966, |
|
"learning_rate": 1.2161486106132612e-05, |
|
"loss": 1.3623, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.4890432923570283, |
|
"grad_norm": 1.1494411879995097, |
|
"learning_rate": 1.2115912899339757e-05, |
|
"loss": 1.283, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.4903794762159273, |
|
"grad_norm": 1.1721905423708292, |
|
"learning_rate": 1.2070293640332306e-05, |
|
"loss": 1.3124, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.4917156600748263, |
|
"grad_norm": 1.165222069970401, |
|
"learning_rate": 1.202462932199985e-05, |
|
"loss": 1.3094, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.49305184393372525, |
|
"grad_norm": 1.250148064441355, |
|
"learning_rate": 1.1978920938212691e-05, |
|
"loss": 1.3363, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.49438802779262425, |
|
"grad_norm": 1.1969490785809054, |
|
"learning_rate": 1.1933169483800203e-05, |
|
"loss": 1.3057, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.49572421165152325, |
|
"grad_norm": 1.2469012012408995, |
|
"learning_rate": 1.1887375954529167e-05, |
|
"loss": 1.3053, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.49706039551042225, |
|
"grad_norm": 1.1816859874185814, |
|
"learning_rate": 1.1841541347082134e-05, |
|
"loss": 1.2982, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.4983965793693212, |
|
"grad_norm": 1.258761404972035, |
|
"learning_rate": 1.1795666659035682e-05, |
|
"loss": 1.2963, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.4997327632282202, |
|
"grad_norm": 1.2450588185319733, |
|
"learning_rate": 1.1749752888838754e-05, |
|
"loss": 1.3272, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.5010689470871192, |
|
"grad_norm": 1.2052446498628289, |
|
"learning_rate": 1.1703801035790897e-05, |
|
"loss": 1.2946, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.5024051309460181, |
|
"grad_norm": 1.2142678023982578, |
|
"learning_rate": 1.1657812100020507e-05, |
|
"loss": 1.302, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.5037413148049171, |
|
"grad_norm": 1.2721562041773744, |
|
"learning_rate": 1.1611787082463095e-05, |
|
"loss": 1.3236, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.5050774986638161, |
|
"grad_norm": 1.3019577911767723, |
|
"learning_rate": 1.1565726984839471e-05, |
|
"loss": 1.3, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.5064136825227151, |
|
"grad_norm": 1.2326550134057832, |
|
"learning_rate": 1.1519632809633954e-05, |
|
"loss": 1.3449, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.5077498663816141, |
|
"grad_norm": 1.2108121941048178, |
|
"learning_rate": 1.1473505560072543e-05, |
|
"loss": 1.3003, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.5090860502405131, |
|
"grad_norm": 1.2351796577016483, |
|
"learning_rate": 1.1427346240101105e-05, |
|
"loss": 1.294, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.5104222340994121, |
|
"grad_norm": 1.210376427103751, |
|
"learning_rate": 1.1381155854363503e-05, |
|
"loss": 1.269, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.5117584179583111, |
|
"grad_norm": 1.1902823799298627, |
|
"learning_rate": 1.1334935408179736e-05, |
|
"loss": 1.2871, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.51309460181721, |
|
"grad_norm": 1.2389975794814998, |
|
"learning_rate": 1.1288685907524057e-05, |
|
"loss": 1.3092, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.514430785676109, |
|
"grad_norm": 1.228008698129948, |
|
"learning_rate": 1.1242408359003091e-05, |
|
"loss": 1.287, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.515766969535008, |
|
"grad_norm": 1.1816697296808294, |
|
"learning_rate": 1.1196103769833908e-05, |
|
"loss": 1.3128, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.517103153393907, |
|
"grad_norm": 1.1835653007689506, |
|
"learning_rate": 1.1149773147822112e-05, |
|
"loss": 1.2925, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.518439337252806, |
|
"grad_norm": 1.2380413372980732, |
|
"learning_rate": 1.1103417501339903e-05, |
|
"loss": 1.3405, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.519775521111705, |
|
"grad_norm": 1.2784541489247507, |
|
"learning_rate": 1.1057037839304135e-05, |
|
"loss": 1.2585, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.521111704970604, |
|
"grad_norm": 1.2402864740991135, |
|
"learning_rate": 1.1010635171154342e-05, |
|
"loss": 1.2867, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.522447888829503, |
|
"grad_norm": 1.2346852083660054, |
|
"learning_rate": 1.0964210506830802e-05, |
|
"loss": 1.2781, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.5237840726884019, |
|
"grad_norm": 1.2195618023918173, |
|
"learning_rate": 1.0917764856752509e-05, |
|
"loss": 1.289, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.5251202565473009, |
|
"grad_norm": 1.1817447386492328, |
|
"learning_rate": 1.0871299231795214e-05, |
|
"loss": 1.2883, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.5264564404061999, |
|
"grad_norm": 1.1727533453851813, |
|
"learning_rate": 1.082481464326942e-05, |
|
"loss": 1.32, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.5277926242650989, |
|
"grad_norm": 1.181864666573128, |
|
"learning_rate": 1.077831210289837e-05, |
|
"loss": 1.3068, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.5291288081239979, |
|
"grad_norm": 1.2249148804887326, |
|
"learning_rate": 1.0731792622796007e-05, |
|
"loss": 1.3044, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.5304649919828969, |
|
"grad_norm": 1.2263040268107137, |
|
"learning_rate": 1.0685257215444975e-05, |
|
"loss": 1.3002, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.5318011758417959, |
|
"grad_norm": 1.2237762133974603, |
|
"learning_rate": 1.0638706893674571e-05, |
|
"loss": 1.3138, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.5331373597006949, |
|
"grad_norm": 1.324928279536264, |
|
"learning_rate": 1.059214267063869e-05, |
|
"loss": 1.3364, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.5344735435595938, |
|
"grad_norm": 1.2493747053579303, |
|
"learning_rate": 1.0545565559793796e-05, |
|
"loss": 1.3013, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5344735435595938, |
|
"eval_loss": 1.3086645603179932, |
|
"eval_runtime": 523.908, |
|
"eval_samples_per_second": 25.283, |
|
"eval_steps_per_second": 3.161, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5358097274184928, |
|
"grad_norm": 1.2142470124401028, |
|
"learning_rate": 1.0498976574876838e-05, |
|
"loss": 1.2563, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.5371459112773918, |
|
"grad_norm": 1.1948400261490024, |
|
"learning_rate": 1.0452376729883216e-05, |
|
"loss": 1.3066, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.5384820951362908, |
|
"grad_norm": 1.206399971819588, |
|
"learning_rate": 1.0405767039044688e-05, |
|
"loss": 1.2928, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.5398182789951897, |
|
"grad_norm": 1.2064545160074232, |
|
"learning_rate": 1.0359148516807302e-05, |
|
"loss": 1.3377, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.5411544628540887, |
|
"grad_norm": 1.2352197196893993, |
|
"learning_rate": 1.0312522177809326e-05, |
|
"loss": 1.32, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.5424906467129877, |
|
"grad_norm": 1.1837145257724762, |
|
"learning_rate": 1.0265889036859146e-05, |
|
"loss": 1.2636, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.5438268305718867, |
|
"grad_norm": 1.2342337040554485, |
|
"learning_rate": 1.0219250108913206e-05, |
|
"loss": 1.2928, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.5451630144307856, |
|
"grad_norm": 1.2371903390877288, |
|
"learning_rate": 1.0172606409053887e-05, |
|
"loss": 1.3451, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.5464991982896846, |
|
"grad_norm": 1.2027906126810077, |
|
"learning_rate": 1.0125958952467439e-05, |
|
"loss": 1.3274, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.5478353821485836, |
|
"grad_norm": 1.176177465912769, |
|
"learning_rate": 1.0079308754421877e-05, |
|
"loss": 1.2996, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.5491715660074826, |
|
"grad_norm": 1.2416193442138757, |
|
"learning_rate": 1.003265683024487e-05, |
|
"loss": 1.2798, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.5505077498663816, |
|
"grad_norm": 1.2583071246909772, |
|
"learning_rate": 9.986004195301679e-06, |
|
"loss": 1.2727, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.5518439337252806, |
|
"grad_norm": 1.1935438605799258, |
|
"learning_rate": 9.939351864973006e-06, |
|
"loss": 1.256, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.5531801175841796, |
|
"grad_norm": 1.1633338976111967, |
|
"learning_rate": 9.892700854632945e-06, |
|
"loss": 1.2946, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.5545163014430786, |
|
"grad_norm": 1.2341873796862335, |
|
"learning_rate": 9.846052179626854e-06, |
|
"loss": 1.2695, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.5558524853019775, |
|
"grad_norm": 1.2110901028037866, |
|
"learning_rate": 9.799406855249261e-06, |
|
"loss": 1.2939, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.5571886691608765, |
|
"grad_norm": 1.2285980383554018, |
|
"learning_rate": 9.75276589672177e-06, |
|
"loss": 1.2646, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.5585248530197755, |
|
"grad_norm": 1.2442431465515202, |
|
"learning_rate": 9.706130319170968e-06, |
|
"loss": 1.284, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.5598610368786745, |
|
"grad_norm": 1.2387684585320866, |
|
"learning_rate": 9.659501137606317e-06, |
|
"loss": 1.245, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.5611972207375735, |
|
"grad_norm": 1.1969981629519848, |
|
"learning_rate": 9.61287936689808e-06, |
|
"loss": 1.3233, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.5625334045964725, |
|
"grad_norm": 1.2647984251708813, |
|
"learning_rate": 9.566266021755224e-06, |
|
"loss": 1.3229, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.5638695884553715, |
|
"grad_norm": 1.211725302821127, |
|
"learning_rate": 9.519662116703333e-06, |
|
"loss": 1.2725, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.5652057723142705, |
|
"grad_norm": 1.1879063545622028, |
|
"learning_rate": 9.473068666062535e-06, |
|
"loss": 1.2781, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.5665419561731694, |
|
"grad_norm": 1.3276654575701659, |
|
"learning_rate": 9.426486683925412e-06, |
|
"loss": 1.3065, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.5678781400320684, |
|
"grad_norm": 1.237229888093536, |
|
"learning_rate": 9.379917184134949e-06, |
|
"loss": 1.2736, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.5692143238909674, |
|
"grad_norm": 1.1968148529216311, |
|
"learning_rate": 9.33336118026245e-06, |
|
"loss": 1.3061, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.5705505077498664, |
|
"grad_norm": 1.178529508943571, |
|
"learning_rate": 9.286819685585482e-06, |
|
"loss": 1.285, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.5718866916087654, |
|
"grad_norm": 1.1976436960246757, |
|
"learning_rate": 9.240293713065826e-06, |
|
"loss": 1.3261, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.5732228754676644, |
|
"grad_norm": 1.293767414098443, |
|
"learning_rate": 9.193784275327439e-06, |
|
"loss": 1.3081, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.5745590593265634, |
|
"grad_norm": 1.1715456586189346, |
|
"learning_rate": 9.14729238463438e-06, |
|
"loss": 1.2754, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.5758952431854624, |
|
"grad_norm": 1.2266737106027843, |
|
"learning_rate": 9.100819052868818e-06, |
|
"loss": 1.3039, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.5772314270443613, |
|
"grad_norm": 1.2249867841645912, |
|
"learning_rate": 9.054365291508998e-06, |
|
"loss": 1.2862, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.5785676109032603, |
|
"grad_norm": 1.2137804099395924, |
|
"learning_rate": 9.007932111607202e-06, |
|
"loss": 1.3038, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.5799037947621593, |
|
"grad_norm": 1.1366732855918937, |
|
"learning_rate": 8.961520523767777e-06, |
|
"loss": 1.2751, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.5812399786210583, |
|
"grad_norm": 1.2241697536325484, |
|
"learning_rate": 8.915131538125124e-06, |
|
"loss": 1.3154, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.5825761624799572, |
|
"grad_norm": 1.1917760108841098, |
|
"learning_rate": 8.868766164321704e-06, |
|
"loss": 1.3126, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.5839123463388562, |
|
"grad_norm": 1.1679731332413519, |
|
"learning_rate": 8.822425411486087e-06, |
|
"loss": 1.2923, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.5852485301977552, |
|
"grad_norm": 1.160012615589374, |
|
"learning_rate": 8.776110288210964e-06, |
|
"loss": 1.295, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.5865847140566542, |
|
"grad_norm": 1.2221113014345002, |
|
"learning_rate": 8.729821802531213e-06, |
|
"loss": 1.2981, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.5879208979155531, |
|
"grad_norm": 1.1659553272788548, |
|
"learning_rate": 8.683560961901952e-06, |
|
"loss": 1.3156, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.5879208979155531, |
|
"eval_loss": 1.301594853401184, |
|
"eval_runtime": 526.3671, |
|
"eval_samples_per_second": 25.165, |
|
"eval_steps_per_second": 3.146, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.5892570817744521, |
|
"grad_norm": 1.171579385550598, |
|
"learning_rate": 8.637328773176605e-06, |
|
"loss": 1.3301, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.5905932656333511, |
|
"grad_norm": 1.1943778685731268, |
|
"learning_rate": 8.591126242585007e-06, |
|
"loss": 1.3187, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.5919294494922501, |
|
"grad_norm": 1.1944105035087198, |
|
"learning_rate": 8.544954375711487e-06, |
|
"loss": 1.2836, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.5932656333511491, |
|
"grad_norm": 1.2318242169174625, |
|
"learning_rate": 8.498814177472987e-06, |
|
"loss": 1.3131, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.5946018172100481, |
|
"grad_norm": 1.2254563935614524, |
|
"learning_rate": 8.452706652097187e-06, |
|
"loss": 1.2875, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.5959380010689471, |
|
"grad_norm": 1.1704675679433372, |
|
"learning_rate": 8.406632803100665e-06, |
|
"loss": 1.2853, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.5972741849278461, |
|
"grad_norm": 1.1396029283635678, |
|
"learning_rate": 8.360593633267024e-06, |
|
"loss": 1.29, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.598610368786745, |
|
"grad_norm": 1.1872398900997034, |
|
"learning_rate": 8.314590144625102e-06, |
|
"loss": 1.3049, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.599946552645644, |
|
"grad_norm": 1.182289479761897, |
|
"learning_rate": 8.268623338427139e-06, |
|
"loss": 1.2826, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.601282736504543, |
|
"grad_norm": 1.202378486908649, |
|
"learning_rate": 8.222694215126985e-06, |
|
"loss": 1.2919, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.602618920363442, |
|
"grad_norm": 1.1561336328785552, |
|
"learning_rate": 8.176803774358356e-06, |
|
"loss": 1.2615, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.603955104222341, |
|
"grad_norm": 1.2171931903507043, |
|
"learning_rate": 8.130953014913025e-06, |
|
"loss": 1.2887, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.60529128808124, |
|
"grad_norm": 1.1818497689115386, |
|
"learning_rate": 8.085142934719131e-06, |
|
"loss": 1.2627, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.606627471940139, |
|
"grad_norm": 1.2158971480650371, |
|
"learning_rate": 8.039374530819444e-06, |
|
"loss": 1.2829, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.607963655799038, |
|
"grad_norm": 1.275466968098271, |
|
"learning_rate": 7.993648799349646e-06, |
|
"loss": 1.3204, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.6092998396579369, |
|
"grad_norm": 1.2170453783162276, |
|
"learning_rate": 7.947966735516682e-06, |
|
"loss": 1.3045, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.6106360235168359, |
|
"grad_norm": 1.204876466973939, |
|
"learning_rate": 7.902329333577072e-06, |
|
"loss": 1.285, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.6119722073757349, |
|
"grad_norm": 1.2259462458581631, |
|
"learning_rate": 7.856737586815292e-06, |
|
"loss": 1.2937, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.6133083912346339, |
|
"grad_norm": 1.2108988682821065, |
|
"learning_rate": 7.811192487522141e-06, |
|
"loss": 1.2896, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.6146445750935329, |
|
"grad_norm": 1.1922326929306413, |
|
"learning_rate": 7.765695026973155e-06, |
|
"loss": 1.2888, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.6159807589524319, |
|
"grad_norm": 1.2082232502962738, |
|
"learning_rate": 7.72024619540702e-06, |
|
"loss": 1.3027, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.6173169428113309, |
|
"grad_norm": 1.1935181964760695, |
|
"learning_rate": 7.674846982004033e-06, |
|
"loss": 1.2726, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.6186531266702299, |
|
"grad_norm": 1.1988170924470665, |
|
"learning_rate": 7.62949837486456e-06, |
|
"loss": 1.3105, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.6199893105291288, |
|
"grad_norm": 1.1788468720871008, |
|
"learning_rate": 7.584201360987544e-06, |
|
"loss": 1.2962, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.6213254943880278, |
|
"grad_norm": 1.2164638957688612, |
|
"learning_rate": 7.538956926249013e-06, |
|
"loss": 1.2918, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.6226616782469268, |
|
"grad_norm": 1.2540807555588527, |
|
"learning_rate": 7.493766055380622e-06, |
|
"loss": 1.3032, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.6239978621058258, |
|
"grad_norm": 1.2021644980450277, |
|
"learning_rate": 7.44862973194823e-06, |
|
"loss": 1.2784, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.6253340459647247, |
|
"grad_norm": 1.1374900868431708, |
|
"learning_rate": 7.403548938330487e-06, |
|
"loss": 1.2615, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.6266702298236237, |
|
"grad_norm": 1.223926027057831, |
|
"learning_rate": 7.358524655697445e-06, |
|
"loss": 1.3324, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.6280064136825227, |
|
"grad_norm": 1.1663009699556086, |
|
"learning_rate": 7.31355786398922e-06, |
|
"loss": 1.3073, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.6293425975414217, |
|
"grad_norm": 1.1605825631923092, |
|
"learning_rate": 7.268649541894658e-06, |
|
"loss": 1.3167, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.6306787814003206, |
|
"grad_norm": 1.181802501051011, |
|
"learning_rate": 7.223800666830013e-06, |
|
"loss": 1.272, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.6320149652592196, |
|
"grad_norm": 1.2124656825123712, |
|
"learning_rate": 7.1790122149177135e-06, |
|
"loss": 1.2783, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.6333511491181186, |
|
"grad_norm": 1.159478422052137, |
|
"learning_rate": 7.134285160965091e-06, |
|
"loss": 1.3114, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.6346873329770176, |
|
"grad_norm": 1.2191673865530408, |
|
"learning_rate": 7.089620478443152e-06, |
|
"loss": 1.2873, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.6360235168359166, |
|
"grad_norm": 1.2212422633172386, |
|
"learning_rate": 7.045019139465434e-06, |
|
"loss": 1.2976, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.6373597006948156, |
|
"grad_norm": 1.2849058652407424, |
|
"learning_rate": 7.000482114766798e-06, |
|
"loss": 1.3026, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.6386958845537146, |
|
"grad_norm": 1.1693633671456403, |
|
"learning_rate": 6.956010373682334e-06, |
|
"loss": 1.2827, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.6400320684126136, |
|
"grad_norm": 1.160020552936478, |
|
"learning_rate": 6.911604884126259e-06, |
|
"loss": 1.2515, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.6413682522715125, |
|
"grad_norm": 1.1828314546763101, |
|
"learning_rate": 6.867266612570833e-06, |
|
"loss": 1.3092, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.6413682522715125, |
|
"eval_loss": 1.2952780723571777, |
|
"eval_runtime": 525.8904, |
|
"eval_samples_per_second": 25.188, |
|
"eval_steps_per_second": 3.149, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.6427044361304115, |
|
"grad_norm": 1.137455706013817, |
|
"learning_rate": 6.822996524025343e-06, |
|
"loss": 1.2891, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.6440406199893105, |
|
"grad_norm": 1.3754307767427276, |
|
"learning_rate": 6.778795582015096e-06, |
|
"loss": 1.2754, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.6453768038482095, |
|
"grad_norm": 1.216856677079751, |
|
"learning_rate": 6.734664748560437e-06, |
|
"loss": 1.3014, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.6467129877071085, |
|
"grad_norm": 1.2592962476705394, |
|
"learning_rate": 6.690604984155826e-06, |
|
"loss": 1.3221, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.6480491715660075, |
|
"grad_norm": 1.1929612361301374, |
|
"learning_rate": 6.646617247748926e-06, |
|
"loss": 1.2768, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.6493853554249065, |
|
"grad_norm": 1.1709139264257042, |
|
"learning_rate": 6.602702496719724e-06, |
|
"loss": 1.2549, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.6507215392838055, |
|
"grad_norm": 1.2183672529081806, |
|
"learning_rate": 6.558861686859712e-06, |
|
"loss": 1.2764, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.6520577231427044, |
|
"grad_norm": 1.1975495934510272, |
|
"learning_rate": 6.515095772351072e-06, |
|
"loss": 1.2929, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.6533939070016034, |
|
"grad_norm": 1.1613807938703284, |
|
"learning_rate": 6.471405705745906e-06, |
|
"loss": 1.2684, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.6547300908605024, |
|
"grad_norm": 1.2736187330390891, |
|
"learning_rate": 6.427792437945516e-06, |
|
"loss": 1.262, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.6560662747194014, |
|
"grad_norm": 1.174927022230355, |
|
"learning_rate": 6.384256918179692e-06, |
|
"loss": 1.2958, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.6574024585783004, |
|
"grad_norm": 1.1737222195085288, |
|
"learning_rate": 6.340800093986071e-06, |
|
"loss": 1.2987, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.6587386424371994, |
|
"grad_norm": 1.1988962500744893, |
|
"learning_rate": 6.297422911189499e-06, |
|
"loss": 1.2469, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.6600748262960984, |
|
"grad_norm": 1.138343574152133, |
|
"learning_rate": 6.254126313881448e-06, |
|
"loss": 1.2564, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.6614110101549974, |
|
"grad_norm": 1.161565466877101, |
|
"learning_rate": 6.210911244399477e-06, |
|
"loss": 1.305, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.6627471940138963, |
|
"grad_norm": 1.1944909640364771, |
|
"learning_rate": 6.167778643306717e-06, |
|
"loss": 1.2749, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.6640833778727953, |
|
"grad_norm": 1.2250102533436973, |
|
"learning_rate": 6.1247294493713845e-06, |
|
"loss": 1.2892, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.6654195617316943, |
|
"grad_norm": 1.211997261711134, |
|
"learning_rate": 6.0817645995463845e-06, |
|
"loss": 1.3064, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.6667557455905933, |
|
"grad_norm": 1.2199059761183304, |
|
"learning_rate": 6.038885028948889e-06, |
|
"loss": 1.2591, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.6680919294494923, |
|
"grad_norm": 1.204780340659424, |
|
"learning_rate": 5.996091670839983e-06, |
|
"loss": 1.2981, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.6694281133083912, |
|
"grad_norm": 1.1622142280662515, |
|
"learning_rate": 5.953385456604377e-06, |
|
"loss": 1.2837, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.6707642971672902, |
|
"grad_norm": 1.2595949655572636, |
|
"learning_rate": 5.910767315730119e-06, |
|
"loss": 1.3098, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.6721004810261892, |
|
"grad_norm": 1.1991193943391245, |
|
"learning_rate": 5.868238175788355e-06, |
|
"loss": 1.2844, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.6734366648850882, |
|
"grad_norm": 1.1286538895272766, |
|
"learning_rate": 5.825798962413164e-06, |
|
"loss": 1.2963, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.6747728487439871, |
|
"grad_norm": 1.2065687187980167, |
|
"learning_rate": 5.783450599281395e-06, |
|
"loss": 1.3002, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.6761090326028861, |
|
"grad_norm": 1.167072268529669, |
|
"learning_rate": 5.7411940080925685e-06, |
|
"loss": 1.2479, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.6774452164617851, |
|
"grad_norm": 1.1698208994032941, |
|
"learning_rate": 5.6990301085488235e-06, |
|
"loss": 1.2999, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.6787814003206841, |
|
"grad_norm": 1.1710233370221095, |
|
"learning_rate": 5.656959818334873e-06, |
|
"loss": 1.2595, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.6801175841795831, |
|
"grad_norm": 1.386755510116527, |
|
"learning_rate": 5.614984053098076e-06, |
|
"loss": 1.2879, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.6814537680384821, |
|
"grad_norm": 1.132940281229991, |
|
"learning_rate": 5.5731037264284735e-06, |
|
"loss": 1.2559, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.6827899518973811, |
|
"grad_norm": 1.1926751219485074, |
|
"learning_rate": 5.531319749838903e-06, |
|
"loss": 1.2906, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.6841261357562801, |
|
"grad_norm": 1.2244655010133048, |
|
"learning_rate": 5.489633032745185e-06, |
|
"loss": 1.2589, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.685462319615179, |
|
"grad_norm": 1.177667766929752, |
|
"learning_rate": 5.448044482446317e-06, |
|
"loss": 1.2896, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.686798503474078, |
|
"grad_norm": 1.1792403166242948, |
|
"learning_rate": 5.406555004104712e-06, |
|
"loss": 1.2724, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.688134687332977, |
|
"grad_norm": 1.1752737928249541, |
|
"learning_rate": 5.365165500726518e-06, |
|
"loss": 1.3095, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.689470871191876, |
|
"grad_norm": 1.2006301934171553, |
|
"learning_rate": 5.323876873141973e-06, |
|
"loss": 1.2682, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.690807055050775, |
|
"grad_norm": 1.232283420067014, |
|
"learning_rate": 5.282690019985756e-06, |
|
"loss": 1.2973, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.692143238909674, |
|
"grad_norm": 1.8591318420869134, |
|
"learning_rate": 5.241605837677481e-06, |
|
"loss": 1.2748, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.693479422768573, |
|
"grad_norm": 1.1409257082194615, |
|
"learning_rate": 5.200625220402139e-06, |
|
"loss": 1.2786, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.694815606627472, |
|
"grad_norm": 1.1874265547469547, |
|
"learning_rate": 5.159749060090675e-06, |
|
"loss": 1.2518, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.694815606627472, |
|
"eval_loss": 1.2894538640975952, |
|
"eval_runtime": 524.2545, |
|
"eval_samples_per_second": 25.266, |
|
"eval_steps_per_second": 3.159, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.6961517904863709, |
|
"grad_norm": 1.1436341244774122, |
|
"learning_rate": 5.118978246400555e-06, |
|
"loss": 1.2868, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.6974879743452699, |
|
"grad_norm": 1.1773055221354787, |
|
"learning_rate": 5.078313666696404e-06, |
|
"loss": 1.2865, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.6988241582041689, |
|
"grad_norm": 1.205692219736226, |
|
"learning_rate": 5.0377562060307e-06, |
|
"loss": 1.271, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.7001603420630679, |
|
"grad_norm": 1.1681619335726294, |
|
"learning_rate": 4.997306747124508e-06, |
|
"loss": 1.281, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.7014965259219669, |
|
"grad_norm": 1.1239087952042888, |
|
"learning_rate": 4.9569661703482585e-06, |
|
"loss": 1.2933, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.7028327097808659, |
|
"grad_norm": 1.1618547501473921, |
|
"learning_rate": 4.916735353702603e-06, |
|
"loss": 1.2556, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.7041688936397649, |
|
"grad_norm": 1.1446438877087823, |
|
"learning_rate": 4.876615172799294e-06, |
|
"loss": 1.2964, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.7055050774986639, |
|
"grad_norm": 1.2196306868529612, |
|
"learning_rate": 4.836606500842129e-06, |
|
"loss": 1.2934, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.7068412613575628, |
|
"grad_norm": 1.1966884099651143, |
|
"learning_rate": 4.7967102086079485e-06, |
|
"loss": 1.2432, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.7081774452164618, |
|
"grad_norm": 1.1483128968540317, |
|
"learning_rate": 4.756927164427685e-06, |
|
"loss": 1.2795, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.7095136290753608, |
|
"grad_norm": 1.1539714350859465, |
|
"learning_rate": 4.717258234167448e-06, |
|
"loss": 1.2494, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.7108498129342598, |
|
"grad_norm": 1.1421161621669704, |
|
"learning_rate": 4.677704281209707e-06, |
|
"loss": 1.2721, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.7121859967931587, |
|
"grad_norm": 1.1666778493912067, |
|
"learning_rate": 4.6382661664344665e-06, |
|
"loss": 1.2805, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.7135221806520577, |
|
"grad_norm": 1.2152799023877705, |
|
"learning_rate": 4.59894474820057e-06, |
|
"loss": 1.2781, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.7148583645109567, |
|
"grad_norm": 1.1802067902023374, |
|
"learning_rate": 4.559740882326984e-06, |
|
"loss": 1.2889, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.7161945483698557, |
|
"grad_norm": 1.1615396840856154, |
|
"learning_rate": 4.520655422074176e-06, |
|
"loss": 1.2295, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.7175307322287546, |
|
"grad_norm": 1.1290092637035325, |
|
"learning_rate": 4.481689218125561e-06, |
|
"loss": 1.2812, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.7188669160876536, |
|
"grad_norm": 1.1477861204230764, |
|
"learning_rate": 4.442843118568976e-06, |
|
"loss": 1.2565, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.7202030999465526, |
|
"grad_norm": 1.2347710658904285, |
|
"learning_rate": 4.4041179688782095e-06, |
|
"loss": 1.2959, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.7215392838054516, |
|
"grad_norm": 1.2180531197328832, |
|
"learning_rate": 4.365514611894623e-06, |
|
"loss": 1.2976, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7228754676643506, |
|
"grad_norm": 1.1757278803891242, |
|
"learning_rate": 4.327033887808802e-06, |
|
"loss": 1.2761, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.7242116515232496, |
|
"grad_norm": 1.1645726922846997, |
|
"learning_rate": 4.288676634142247e-06, |
|
"loss": 1.2595, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.7255478353821486, |
|
"grad_norm": 1.1411088256784303, |
|
"learning_rate": 4.25044368572917e-06, |
|
"loss": 1.2714, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.7268840192410476, |
|
"grad_norm": 1.1671327639274043, |
|
"learning_rate": 4.2123358746983225e-06, |
|
"loss": 1.2876, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.7282202030999465, |
|
"grad_norm": 1.1581886797505927, |
|
"learning_rate": 4.1743540304548615e-06, |
|
"loss": 1.2859, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.7295563869588455, |
|
"grad_norm": 1.1510537093782165, |
|
"learning_rate": 4.13649897966233e-06, |
|
"loss": 1.2437, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.7308925708177445, |
|
"grad_norm": 1.1962597941965873, |
|
"learning_rate": 4.098771546224643e-06, |
|
"loss": 1.2697, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.7322287546766435, |
|
"grad_norm": 1.176978028412522, |
|
"learning_rate": 4.061172551268162e-06, |
|
"loss": 1.3069, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.7335649385355425, |
|
"grad_norm": 1.2344307343981975, |
|
"learning_rate": 4.023702813123828e-06, |
|
"loss": 1.3034, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.7349011223944415, |
|
"grad_norm": 1.1490296827570874, |
|
"learning_rate": 3.986363147309332e-06, |
|
"loss": 1.2934, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.7362373062533405, |
|
"grad_norm": 1.128655104477458, |
|
"learning_rate": 3.949154366511395e-06, |
|
"loss": 1.3227, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.7375734901122395, |
|
"grad_norm": 1.1377152493000582, |
|
"learning_rate": 3.9120772805680575e-06, |
|
"loss": 1.2837, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.7389096739711384, |
|
"grad_norm": 1.1144595543031406, |
|
"learning_rate": 3.8751326964510615e-06, |
|
"loss": 1.2451, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.7402458578300374, |
|
"grad_norm": 1.1528921550358049, |
|
"learning_rate": 3.838321418248288e-06, |
|
"loss": 1.2615, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.7415820416889364, |
|
"grad_norm": 1.2690943955678684, |
|
"learning_rate": 3.8016442471462524e-06, |
|
"loss": 1.2829, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.7429182255478354, |
|
"grad_norm": 1.2308517139392547, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 1.2617, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.7442544094067344, |
|
"grad_norm": 1.2017097006317634, |
|
"learning_rate": 3.7286954163790734e-06, |
|
"loss": 1.2364, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.7455905932656334, |
|
"grad_norm": 1.1828941772780268, |
|
"learning_rate": 3.6924253444235224e-06, |
|
"loss": 1.3087, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.7469267771245324, |
|
"grad_norm": 1.1861330848081788, |
|
"learning_rate": 3.6562925549533355e-06, |
|
"loss": 1.2789, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.7482629609834314, |
|
"grad_norm": 1.1526594671507095, |
|
"learning_rate": 3.6202978343879337e-06, |
|
"loss": 1.2617, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.7482629609834314, |
|
"eval_loss": 1.2845991849899292, |
|
"eval_runtime": 523.8103, |
|
"eval_samples_per_second": 25.288, |
|
"eval_steps_per_second": 3.161, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.7495991448423303, |
|
"grad_norm": 1.1298081744323016, |
|
"learning_rate": 3.5844419661416886e-06, |
|
"loss": 1.2902, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.7509353287012293, |
|
"grad_norm": 1.1561421830885714, |
|
"learning_rate": 3.5487257306069054e-06, |
|
"loss": 1.29, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.7522715125601283, |
|
"grad_norm": 1.1536531647083885, |
|
"learning_rate": 3.5131499051368254e-06, |
|
"loss": 1.2614, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.7536076964190273, |
|
"grad_norm": 1.1856993866727077, |
|
"learning_rate": 3.4777152640286984e-06, |
|
"loss": 1.2861, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.7549438802779262, |
|
"grad_norm": 1.2042645879668878, |
|
"learning_rate": 3.4424225785069444e-06, |
|
"loss": 1.2658, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.7562800641368252, |
|
"grad_norm": 1.1759218780648395, |
|
"learning_rate": 3.4072726167063617e-06, |
|
"loss": 1.2613, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.7576162479957242, |
|
"grad_norm": 1.1941131261396993, |
|
"learning_rate": 3.3722661436554104e-06, |
|
"loss": 1.2926, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.7589524318546232, |
|
"grad_norm": 1.2349621736436946, |
|
"learning_rate": 3.337403921259559e-06, |
|
"loss": 1.2852, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.7602886157135221, |
|
"grad_norm": 1.243962877866245, |
|
"learning_rate": 3.3026867082847058e-06, |
|
"loss": 1.3087, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.7616247995724211, |
|
"grad_norm": 1.2755840307358004, |
|
"learning_rate": 3.268115260340654e-06, |
|
"loss": 1.2574, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.7629609834313201, |
|
"grad_norm": 1.256694820928362, |
|
"learning_rate": 3.233690329864684e-06, |
|
"loss": 1.2477, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.7642971672902191, |
|
"grad_norm": 1.153504157549798, |
|
"learning_rate": 3.1994126661051628e-06, |
|
"loss": 1.2669, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.7656333511491181, |
|
"grad_norm": 1.2272283092280594, |
|
"learning_rate": 3.1652830151052416e-06, |
|
"loss": 1.2919, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.7669695350080171, |
|
"grad_norm": 1.1512364936411144, |
|
"learning_rate": 3.131302119686621e-06, |
|
"loss": 1.2736, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.7683057188669161, |
|
"grad_norm": 1.1564153021008827, |
|
"learning_rate": 3.097470719433373e-06, |
|
"loss": 1.2556, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.7696419027258151, |
|
"grad_norm": 1.164737683509797, |
|
"learning_rate": 3.0637895506758597e-06, |
|
"loss": 1.2672, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.770978086584714, |
|
"grad_norm": 1.1685661560913843, |
|
"learning_rate": 3.0302593464746944e-06, |
|
"loss": 1.2907, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.772314270443613, |
|
"grad_norm": 1.1699267498621475, |
|
"learning_rate": 2.9968808366047942e-06, |
|
"loss": 1.2699, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.773650454302512, |
|
"grad_norm": 1.211199795955048, |
|
"learning_rate": 2.963654747539494e-06, |
|
"loss": 1.2693, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.774986638161411, |
|
"grad_norm": 1.1816669846421493, |
|
"learning_rate": 2.9305818024347378e-06, |
|
"loss": 1.2651, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.77632282202031, |
|
"grad_norm": 1.1914669242505636, |
|
"learning_rate": 2.897662721113328e-06, |
|
"loss": 1.2737, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.777659005879209, |
|
"grad_norm": 1.1738719241452067, |
|
"learning_rate": 2.864898220049277e-06, |
|
"loss": 1.2564, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.778995189738108, |
|
"grad_norm": 1.1840077986894364, |
|
"learning_rate": 2.832289012352203e-06, |
|
"loss": 1.2522, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.780331373597007, |
|
"grad_norm": 1.233343112626304, |
|
"learning_rate": 2.7998358077517975e-06, |
|
"loss": 1.2448, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.7816675574559059, |
|
"grad_norm": 1.1877039129056828, |
|
"learning_rate": 2.7675393125824144e-06, |
|
"loss": 1.2563, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.7830037413148049, |
|
"grad_norm": 1.1982422068810221, |
|
"learning_rate": 2.735400229767652e-06, |
|
"loss": 1.2403, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.7843399251737039, |
|
"grad_norm": 1.1935298921878634, |
|
"learning_rate": 2.7034192588050845e-06, |
|
"loss": 1.2701, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.7856761090326029, |
|
"grad_norm": 1.151254746126473, |
|
"learning_rate": 2.671597095751033e-06, |
|
"loss": 1.2509, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.7870122928915019, |
|
"grad_norm": 1.190278414424794, |
|
"learning_rate": 2.6399344332054e-06, |
|
"loss": 1.2914, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.7883484767504009, |
|
"grad_norm": 1.168908881155384, |
|
"learning_rate": 2.6084319602966167e-06, |
|
"loss": 1.2739, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.7896846606092999, |
|
"grad_norm": 1.1495974699891471, |
|
"learning_rate": 2.577090362666631e-06, |
|
"loss": 1.2822, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.7910208444681989, |
|
"grad_norm": 1.199562609813528, |
|
"learning_rate": 2.54591032245599e-06, |
|
"loss": 1.2833, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.7923570283270978, |
|
"grad_norm": 1.172598488602994, |
|
"learning_rate": 2.514892518288988e-06, |
|
"loss": 1.2427, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.7936932121859968, |
|
"grad_norm": 1.2023007809260173, |
|
"learning_rate": 2.484037625258908e-06, |
|
"loss": 1.3116, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.7950293960448958, |
|
"grad_norm": 1.1808013233447734, |
|
"learning_rate": 2.4533463149133073e-06, |
|
"loss": 1.2367, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.7963655799037948, |
|
"grad_norm": 1.2065243432367916, |
|
"learning_rate": 2.422819255239427e-06, |
|
"loss": 1.2995, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.7977017637626937, |
|
"grad_norm": 1.1490648428460022, |
|
"learning_rate": 2.392457110649634e-06, |
|
"loss": 1.2676, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.7990379476215927, |
|
"grad_norm": 1.136037346488201, |
|
"learning_rate": 2.362260541966972e-06, |
|
"loss": 1.2816, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.8003741314804917, |
|
"grad_norm": 1.149577173830776, |
|
"learning_rate": 2.3322302064107762e-06, |
|
"loss": 1.2948, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.8017103153393907, |
|
"grad_norm": 1.185488122515381, |
|
"learning_rate": 2.302366757582355e-06, |
|
"loss": 1.3041, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.8017103153393907, |
|
"eval_loss": 1.2808870077133179, |
|
"eval_runtime": 523.4307, |
|
"eval_samples_per_second": 25.306, |
|
"eval_steps_per_second": 3.164, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.8030464991982896, |
|
"grad_norm": 1.1513855940527316, |
|
"learning_rate": 2.272670845450791e-06, |
|
"loss": 1.277, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.8043826830571886, |
|
"grad_norm": 1.1602967857229551, |
|
"learning_rate": 2.243143116338773e-06, |
|
"loss": 1.2653, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.8057188669160876, |
|
"grad_norm": 1.197568564758537, |
|
"learning_rate": 2.2137842129085396e-06, |
|
"loss": 1.3524, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.8070550507749866, |
|
"grad_norm": 1.1421243027571122, |
|
"learning_rate": 2.1845947741478857e-06, |
|
"loss": 1.2651, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.8083912346338856, |
|
"grad_norm": 1.167663307330092, |
|
"learning_rate": 2.1555754353562643e-06, |
|
"loss": 1.2337, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.8097274184927846, |
|
"grad_norm": 1.1570704863531982, |
|
"learning_rate": 2.1267268281309418e-06, |
|
"loss": 1.2671, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.8110636023516836, |
|
"grad_norm": 1.1646116509684428, |
|
"learning_rate": 2.098049580353273e-06, |
|
"loss": 1.2831, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.8123997862105826, |
|
"grad_norm": 1.1929244270961756, |
|
"learning_rate": 2.069544316175025e-06, |
|
"loss": 1.2631, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.8137359700694815, |
|
"grad_norm": 1.1495579309904203, |
|
"learning_rate": 2.041211656004781e-06, |
|
"loss": 1.2891, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.8150721539283805, |
|
"grad_norm": 1.158971184848035, |
|
"learning_rate": 2.0130522164944666e-06, |
|
"loss": 1.2543, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.8164083377872795, |
|
"grad_norm": 1.1993305810419228, |
|
"learning_rate": 1.985066610525904e-06, |
|
"loss": 1.3294, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.8177445216461785, |
|
"grad_norm": 1.1523443888711082, |
|
"learning_rate": 1.9572554471974723e-06, |
|
"loss": 1.2771, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.8190807055050775, |
|
"grad_norm": 1.1695216750777881, |
|
"learning_rate": 1.9296193318108723e-06, |
|
"loss": 1.2578, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.8204168893639765, |
|
"grad_norm": 1.1732642497713914, |
|
"learning_rate": 1.9021588658579249e-06, |
|
"loss": 1.2513, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.8217530732228755, |
|
"grad_norm": 1.1519213257119236, |
|
"learning_rate": 1.8748746470075029e-06, |
|
"loss": 1.2977, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.8230892570817745, |
|
"grad_norm": 1.1516668595048642, |
|
"learning_rate": 1.847767269092511e-06, |
|
"loss": 1.2321, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.8244254409406734, |
|
"grad_norm": 1.1909108579078609, |
|
"learning_rate": 1.820837322096961e-06, |
|
"loss": 1.2484, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.8257616247995724, |
|
"grad_norm": 1.101081523016669, |
|
"learning_rate": 1.7940853921431378e-06, |
|
"loss": 1.2575, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.8270978086584714, |
|
"grad_norm": 1.178074475938639, |
|
"learning_rate": 1.7675120614788367e-06, |
|
"loss": 1.2664, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.8284339925173704, |
|
"grad_norm": 1.1731584765176883, |
|
"learning_rate": 1.7411179084646879e-06, |
|
"loss": 1.2804, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.8297701763762694, |
|
"grad_norm": 1.1586731249421351, |
|
"learning_rate": 1.7149035075615795e-06, |
|
"loss": 1.2905, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.8311063602351684, |
|
"grad_norm": 1.169289922029439, |
|
"learning_rate": 1.6888694293181462e-06, |
|
"loss": 1.2836, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.8324425440940674, |
|
"grad_norm": 1.1719327462563847, |
|
"learning_rate": 1.6630162403583538e-06, |
|
"loss": 1.2513, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.8337787279529664, |
|
"grad_norm": 1.1435061474054447, |
|
"learning_rate": 1.637344503369167e-06, |
|
"loss": 1.2635, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.8351149118118654, |
|
"grad_norm": 1.1521864026183537, |
|
"learning_rate": 1.6118547770883031e-06, |
|
"loss": 1.3022, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.8364510956707643, |
|
"grad_norm": 1.2218598184249856, |
|
"learning_rate": 1.5865476162920658e-06, |
|
"loss": 1.293, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.8377872795296633, |
|
"grad_norm": 1.1461871951868958, |
|
"learning_rate": 1.5614235717832838e-06, |
|
"loss": 1.284, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.8391234633885623, |
|
"grad_norm": 1.2002653441412816, |
|
"learning_rate": 1.536483190379302e-06, |
|
"loss": 1.2608, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.8404596472474613, |
|
"grad_norm": 1.194888981517886, |
|
"learning_rate": 1.5117270149001061e-06, |
|
"loss": 1.2728, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.8417958311063602, |
|
"grad_norm": 1.1669886636546563, |
|
"learning_rate": 1.4871555841564889e-06, |
|
"loss": 1.2936, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.8431320149652592, |
|
"grad_norm": 1.178341085561249, |
|
"learning_rate": 1.4627694329383245e-06, |
|
"loss": 1.2737, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.8444681988241582, |
|
"grad_norm": 1.173822019467738, |
|
"learning_rate": 1.4385690920029394e-06, |
|
"loss": 1.2389, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.8458043826830572, |
|
"grad_norm": 1.172444929862991, |
|
"learning_rate": 1.4145550880635551e-06, |
|
"loss": 1.283, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.8471405665419561, |
|
"grad_norm": 1.157959236772995, |
|
"learning_rate": 1.3907279437778154e-06, |
|
"loss": 1.3089, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.8484767504008551, |
|
"grad_norm": 1.142956839312928, |
|
"learning_rate": 1.3670881777364276e-06, |
|
"loss": 1.2385, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.8498129342597541, |
|
"grad_norm": 1.1394377032150336, |
|
"learning_rate": 1.3436363044518685e-06, |
|
"loss": 1.3002, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.8511491181186531, |
|
"grad_norm": 1.1081112610690127, |
|
"learning_rate": 1.3203728343471766e-06, |
|
"loss": 1.2471, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.8524853019775521, |
|
"grad_norm": 1.1233094984645868, |
|
"learning_rate": 1.2972982737448582e-06, |
|
"loss": 1.2511, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.8538214858364511, |
|
"grad_norm": 1.168807245788649, |
|
"learning_rate": 1.27441312485586e-06, |
|
"loss": 1.2487, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.8551576696953501, |
|
"grad_norm": 1.2213102334394346, |
|
"learning_rate": 1.2517178857686318e-06, |
|
"loss": 1.3102, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.8551576696953501, |
|
"eval_loss": 1.278082013130188, |
|
"eval_runtime": 523.3237, |
|
"eval_samples_per_second": 25.311, |
|
"eval_steps_per_second": 3.164, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.8564938535542491, |
|
"grad_norm": 1.120564972312021, |
|
"learning_rate": 1.2292130504383037e-06, |
|
"loss": 1.2578, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.857830037413148, |
|
"grad_norm": 1.1580496245252512, |
|
"learning_rate": 1.2068991086759175e-06, |
|
"loss": 1.2983, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.859166221272047, |
|
"grad_norm": 1.1198656733420578, |
|
"learning_rate": 1.1847765461377757e-06, |
|
"loss": 1.2245, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.860502405130946, |
|
"grad_norm": 1.2004671948391095, |
|
"learning_rate": 1.1628458443148716e-06, |
|
"loss": 1.2737, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.861838588989845, |
|
"grad_norm": 1.114311359039494, |
|
"learning_rate": 1.1411074805223997e-06, |
|
"loss": 1.2583, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.863174772848744, |
|
"grad_norm": 1.1608634318847668, |
|
"learning_rate": 1.119561927889381e-06, |
|
"loss": 1.2543, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.864510956707643, |
|
"grad_norm": 1.178010917617475, |
|
"learning_rate": 1.0982096553483568e-06, |
|
"loss": 1.3171, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.865847140566542, |
|
"grad_norm": 1.1868881875439512, |
|
"learning_rate": 1.077051127625185e-06, |
|
"loss": 1.2985, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.867183324425441, |
|
"grad_norm": 1.1357672595216541, |
|
"learning_rate": 1.0560868052289253e-06, |
|
"loss": 1.2634, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.8685195082843399, |
|
"grad_norm": 1.171433935969695, |
|
"learning_rate": 1.0353171444418187e-06, |
|
"loss": 1.2759, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.8698556921432389, |
|
"grad_norm": 1.1224025445608299, |
|
"learning_rate": 1.014742597309346e-06, |
|
"loss": 1.267, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.8711918760021379, |
|
"grad_norm": 1.1874141914261405, |
|
"learning_rate": 9.943636116304068e-07, |
|
"loss": 1.2509, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.8725280598610369, |
|
"grad_norm": 1.1508324966317753, |
|
"learning_rate": 9.741806309475588e-07, |
|
"loss": 1.2647, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.8738642437199359, |
|
"grad_norm": 1.1671717510783461, |
|
"learning_rate": 9.541940945373718e-07, |
|
"loss": 1.2357, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.8752004275788349, |
|
"grad_norm": 1.1214376758027105, |
|
"learning_rate": 9.344044374008632e-07, |
|
"loss": 1.2598, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.8765366114377339, |
|
"grad_norm": 1.1745237243989533, |
|
"learning_rate": 9.148120902540281e-07, |
|
"loss": 1.238, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.8778727952966329, |
|
"grad_norm": 1.1566262573536532, |
|
"learning_rate": 8.954174795184756e-07, |
|
"loss": 1.2464, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.8792089791555318, |
|
"grad_norm": 1.158731873705362, |
|
"learning_rate": 8.762210273121363e-07, |
|
"loss": 1.2783, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.8805451630144308, |
|
"grad_norm": 1.1187300362715888, |
|
"learning_rate": 8.572231514400775e-07, |
|
"loss": 1.2692, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.8818813468733298, |
|
"grad_norm": 1.1820432928944034, |
|
"learning_rate": 8.384242653854146e-07, |
|
"loss": 1.3092, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.8832175307322288, |
|
"grad_norm": 1.1315388199619425, |
|
"learning_rate": 8.198247783003133e-07, |
|
"loss": 1.2581, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.8845537145911277, |
|
"grad_norm": 1.135539300633719, |
|
"learning_rate": 8.014250949970704e-07, |
|
"loss": 1.2726, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.8858898984500267, |
|
"grad_norm": 1.147749224885632, |
|
"learning_rate": 7.832256159393181e-07, |
|
"loss": 1.2426, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.8872260823089257, |
|
"grad_norm": 1.1681170491391923, |
|
"learning_rate": 7.652267372333056e-07, |
|
"loss": 1.2487, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.8885622661678247, |
|
"grad_norm": 1.1503659735195155, |
|
"learning_rate": 7.474288506192662e-07, |
|
"loss": 1.2898, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.8898984500267236, |
|
"grad_norm": 1.205235234079246, |
|
"learning_rate": 7.298323434629095e-07, |
|
"loss": 1.3094, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.8912346338856226, |
|
"grad_norm": 1.1557951328425964, |
|
"learning_rate": 7.124375987469767e-07, |
|
"loss": 1.2326, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.8925708177445216, |
|
"grad_norm": 1.1334618687477662, |
|
"learning_rate": 6.952449950629103e-07, |
|
"loss": 1.2679, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.8939070016034206, |
|
"grad_norm": 1.14695924636925, |
|
"learning_rate": 6.782549066026145e-07, |
|
"loss": 1.3017, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.8952431854623196, |
|
"grad_norm": 1.147346259057884, |
|
"learning_rate": 6.614677031503059e-07, |
|
"loss": 1.2705, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.8965793693212186, |
|
"grad_norm": 1.1656452627988076, |
|
"learning_rate": 6.448837500744742e-07, |
|
"loss": 1.2475, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.8979155531801176, |
|
"grad_norm": 1.1415605090418526, |
|
"learning_rate": 6.285034083199216e-07, |
|
"loss": 1.273, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.8992517370390166, |
|
"grad_norm": 1.1324906184307248, |
|
"learning_rate": 6.123270343999132e-07, |
|
"loss": 1.2817, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.9005879208979155, |
|
"grad_norm": 1.1353501329617395, |
|
"learning_rate": 5.963549803884128e-07, |
|
"loss": 1.2734, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.9019241047568145, |
|
"grad_norm": 1.2284321012966415, |
|
"learning_rate": 5.80587593912425e-07, |
|
"loss": 1.276, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.9032602886157135, |
|
"grad_norm": 1.1122913016411495, |
|
"learning_rate": 5.650252181444215e-07, |
|
"loss": 1.2524, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.9045964724746125, |
|
"grad_norm": 1.1797725331714115, |
|
"learning_rate": 5.496681917948809e-07, |
|
"loss": 1.2336, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.9059326563335115, |
|
"grad_norm": 1.159426377293814, |
|
"learning_rate": 5.345168491049124e-07, |
|
"loss": 1.242, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.9072688401924105, |
|
"grad_norm": 1.1645037741741007, |
|
"learning_rate": 5.195715198389784e-07, |
|
"loss": 1.3, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.9086050240513095, |
|
"grad_norm": 1.1480424103028632, |
|
"learning_rate": 5.048325292777279e-07, |
|
"loss": 1.2675, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.9086050240513095, |
|
"eval_loss": 1.2765072584152222, |
|
"eval_runtime": 523.9483, |
|
"eval_samples_per_second": 25.281, |
|
"eval_steps_per_second": 3.161, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.9099412079102085, |
|
"grad_norm": 1.1694724018883438, |
|
"learning_rate": 4.903001982109002e-07, |
|
"loss": 1.2881, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.9112773917691074, |
|
"grad_norm": 1.149736399066079, |
|
"learning_rate": 4.759748429303579e-07, |
|
"loss": 1.2993, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.9126135756280064, |
|
"grad_norm": 1.1394564788621853, |
|
"learning_rate": 4.618567752231962e-07, |
|
"loss": 1.2486, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.9139497594869054, |
|
"grad_norm": 1.1538669258463885, |
|
"learning_rate": 4.479463023649555e-07, |
|
"loss": 1.2618, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.9152859433458044, |
|
"grad_norm": 1.1791184429266262, |
|
"learning_rate": 4.342437271129396e-07, |
|
"loss": 1.2583, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.9166221272047034, |
|
"grad_norm": 1.1326513415617325, |
|
"learning_rate": 4.207493476996205e-07, |
|
"loss": 1.2408, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.9179583110636024, |
|
"grad_norm": 1.1222236287232992, |
|
"learning_rate": 4.074634578261516e-07, |
|
"loss": 1.2379, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.9192944949225014, |
|
"grad_norm": 1.1253440717297474, |
|
"learning_rate": 3.9438634665597165e-07, |
|
"loss": 1.2616, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.9206306787814004, |
|
"grad_norm": 1.1510397092293756, |
|
"learning_rate": 3.815182988085153e-07, |
|
"loss": 1.2685, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.9219668626402993, |
|
"grad_norm": 1.1879818159354434, |
|
"learning_rate": 3.6885959435301156e-07, |
|
"loss": 1.2365, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.9233030464991983, |
|
"grad_norm": 1.1515610393737254, |
|
"learning_rate": 3.564105088023984e-07, |
|
"loss": 1.2501, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.9246392303580973, |
|
"grad_norm": 1.1434666832922344, |
|
"learning_rate": 3.441713131073177e-07, |
|
"loss": 1.2604, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.9259754142169963, |
|
"grad_norm": 1.2537303724295639, |
|
"learning_rate": 3.3214227365022e-07, |
|
"loss": 1.2954, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.9273115980758952, |
|
"grad_norm": 1.1324300056445142, |
|
"learning_rate": 3.2032365223957253e-07, |
|
"loss": 1.2553, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.9286477819347942, |
|
"grad_norm": 1.1623455952898252, |
|
"learning_rate": 3.0871570610415124e-07, |
|
"loss": 1.2578, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.9299839657936932, |
|
"grad_norm": 1.1816451537266295, |
|
"learning_rate": 2.97318687887449e-07, |
|
"loss": 1.2512, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.9313201496525922, |
|
"grad_norm": 1.1900717072648277, |
|
"learning_rate": 2.861328456421775e-07, |
|
"loss": 1.275, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.9326563335114911, |
|
"grad_norm": 1.1275013968941647, |
|
"learning_rate": 2.7515842282486274e-07, |
|
"loss": 1.2653, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.9339925173703901, |
|
"grad_norm": 1.1553817658383019, |
|
"learning_rate": 2.6439565829055267e-07, |
|
"loss": 1.2572, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.9353287012292891, |
|
"grad_norm": 1.1368557006469668, |
|
"learning_rate": 2.5384478628761586e-07, |
|
"loss": 1.263, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.9366648850881881, |
|
"grad_norm": 1.1505999909872684, |
|
"learning_rate": 2.435060364526387e-07, |
|
"loss": 1.2504, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.9380010689470871, |
|
"grad_norm": 1.158821621163466, |
|
"learning_rate": 2.3337963380543726e-07, |
|
"loss": 1.2609, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.9393372528059861, |
|
"grad_norm": 1.1699141096438583, |
|
"learning_rate": 2.234657987441502e-07, |
|
"loss": 1.2396, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.9406734366648851, |
|
"grad_norm": 1.1460631269115045, |
|
"learning_rate": 2.1376474704044693e-07, |
|
"loss": 1.2503, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.9420096205237841, |
|
"grad_norm": 1.1207186839548462, |
|
"learning_rate": 2.0427668983483361e-07, |
|
"loss": 1.246, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.943345804382683, |
|
"grad_norm": 1.1268601862855379, |
|
"learning_rate": 1.9500183363205029e-07, |
|
"loss": 1.2367, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.944681988241582, |
|
"grad_norm": 1.1458026011683493, |
|
"learning_rate": 1.85940380296582e-07, |
|
"loss": 1.2363, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.946018172100481, |
|
"grad_norm": 1.126828597537144, |
|
"learning_rate": 1.7709252704826485e-07, |
|
"loss": 1.2771, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.94735435595938, |
|
"grad_norm": 1.159174309108889, |
|
"learning_rate": 1.6845846645799025e-07, |
|
"loss": 1.2846, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.948690539818279, |
|
"grad_norm": 1.144716451177437, |
|
"learning_rate": 1.6003838644351843e-07, |
|
"loss": 1.2711, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.950026723677178, |
|
"grad_norm": 1.2115861792213716, |
|
"learning_rate": 1.5183247026538505e-07, |
|
"loss": 1.2658, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.951362907536077, |
|
"grad_norm": 1.1183503620411726, |
|
"learning_rate": 1.4384089652291544e-07, |
|
"loss": 1.2687, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.952699091394976, |
|
"grad_norm": 1.1482250830428042, |
|
"learning_rate": 1.3606383915033217e-07, |
|
"loss": 1.2471, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.9540352752538749, |
|
"grad_norm": 1.1465209714509406, |
|
"learning_rate": 1.2850146741297586e-07, |
|
"loss": 1.2776, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.9553714591127739, |
|
"grad_norm": 1.1496013307054171, |
|
"learning_rate": 1.2115394590361595e-07, |
|
"loss": 1.2509, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.9567076429716729, |
|
"grad_norm": 1.1607454473870606, |
|
"learning_rate": 1.1402143453887238e-07, |
|
"loss": 1.2887, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.9580438268305719, |
|
"grad_norm": 1.169980738830098, |
|
"learning_rate": 1.0710408855573173e-07, |
|
"loss": 1.2912, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.9593800106894709, |
|
"grad_norm": 1.131820961749528, |
|
"learning_rate": 1.0040205850817109e-07, |
|
"loss": 1.2405, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.9607161945483699, |
|
"grad_norm": 1.1643105760459518, |
|
"learning_rate": 9.391549026387948e-08, |
|
"loss": 1.2845, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.9620523784072689, |
|
"grad_norm": 1.1545962184900476, |
|
"learning_rate": 8.764452500108711e-08, |
|
"loss": 1.2978, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.9620523784072689, |
|
"eval_loss": 1.275867223739624, |
|
"eval_runtime": 523.9419, |
|
"eval_samples_per_second": 25.281, |
|
"eval_steps_per_second": 3.161, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.9633885622661679, |
|
"grad_norm": 1.1593993216116114, |
|
"learning_rate": 8.158929920548342e-08, |
|
"loss": 1.2391, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.9647247461250668, |
|
"grad_norm": 1.1553562511912872, |
|
"learning_rate": 7.574994466725827e-08, |
|
"loss": 1.2495, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.9660609299839658, |
|
"grad_norm": 1.177204123718693, |
|
"learning_rate": 7.012658847822428e-08, |
|
"loss": 1.2905, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.9673971138428648, |
|
"grad_norm": 1.1296251792868242, |
|
"learning_rate": 6.471935302905574e-08, |
|
"loss": 1.2825, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.9687332977017638, |
|
"grad_norm": 1.1818608486775184, |
|
"learning_rate": 5.952835600662288e-08, |
|
"loss": 1.2327, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.9700694815606627, |
|
"grad_norm": 1.1499671622397054, |
|
"learning_rate": 5.455371039143176e-08, |
|
"loss": 1.2795, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.9714056654195617, |
|
"grad_norm": 1.1349247593874663, |
|
"learning_rate": 4.9795524455164e-08, |
|
"loss": 1.2644, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.9727418492784607, |
|
"grad_norm": 1.1311236324206604, |
|
"learning_rate": 4.52539017583209e-08, |
|
"loss": 1.2385, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.9740780331373597, |
|
"grad_norm": 1.1732868443594786, |
|
"learning_rate": 4.0928941147966306e-08, |
|
"loss": 1.2908, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.9754142169962586, |
|
"grad_norm": 1.1619085838301122, |
|
"learning_rate": 3.682073675558395e-08, |
|
"loss": 1.278, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.9767504008551576, |
|
"grad_norm": 1.1539440287056817, |
|
"learning_rate": 3.2929377995019054e-08, |
|
"loss": 1.2386, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.9780865847140566, |
|
"grad_norm": 1.1170137832632987, |
|
"learning_rate": 2.9254949560535428e-08, |
|
"loss": 1.2589, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.9794227685729556, |
|
"grad_norm": 1.1423040789553527, |
|
"learning_rate": 2.5797531424976983e-08, |
|
"loss": 1.2896, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.9807589524318546, |
|
"grad_norm": 1.1680612690612637, |
|
"learning_rate": 2.2557198838019102e-08, |
|
"loss": 1.2404, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.9820951362907536, |
|
"grad_norm": 1.114027530261495, |
|
"learning_rate": 1.9534022324536606e-08, |
|
"loss": 1.2802, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.9834313201496526, |
|
"grad_norm": 1.1636519127752734, |
|
"learning_rate": 1.6728067683066117e-08, |
|
"loss": 1.2701, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.9847675040085516, |
|
"grad_norm": 1.1842605033839753, |
|
"learning_rate": 1.4139395984377191e-08, |
|
"loss": 1.2858, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.9861036878674505, |
|
"grad_norm": 1.1431541821634774, |
|
"learning_rate": 1.1768063570136712e-08, |
|
"loss": 1.2662, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.9874398717263495, |
|
"grad_norm": 1.1466055572304683, |
|
"learning_rate": 9.614122051689878e-09, |
|
"loss": 1.2773, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.9887760555852485, |
|
"grad_norm": 1.180031565283721, |
|
"learning_rate": 7.67761830893443e-09, |
|
"loss": 1.2449, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.9901122394441475, |
|
"grad_norm": 1.1492882566559037, |
|
"learning_rate": 5.958594489295921e-09, |
|
"loss": 1.2521, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.9914484233030465, |
|
"grad_norm": 1.1745365142675883, |
|
"learning_rate": 4.457088006816213e-09, |
|
"loss": 1.2739, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.9927846071619455, |
|
"grad_norm": 1.1360910701337543, |
|
"learning_rate": 3.173131541338581e-09, |
|
"loss": 1.2512, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.9941207910208445, |
|
"grad_norm": 1.1874182237790705, |
|
"learning_rate": 2.1067530377927305e-09, |
|
"loss": 1.2587, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.9954569748797435, |
|
"grad_norm": 1.1682623889051527, |
|
"learning_rate": 1.2579757055897202e-09, |
|
"loss": 1.2834, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.9967931587386424, |
|
"grad_norm": 1.1413958118113652, |
|
"learning_rate": 6.268180181157047e-10, |
|
"loss": 1.2642, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.9981293425975414, |
|
"grad_norm": 1.1586136947062726, |
|
"learning_rate": 2.1329371232892138e-10, |
|
"loss": 1.211, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.9994655264564404, |
|
"grad_norm": 1.196251896759201, |
|
"learning_rate": 1.7411788463261858e-11, |
|
"loss": 1.2948, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 3742, |
|
"total_flos": 99431605075968.0, |
|
"train_loss": 1.3181866798281097, |
|
"train_runtime": 27158.0723, |
|
"train_samples_per_second": 4.409, |
|
"train_steps_per_second": 0.138 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3742, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 99431605075968.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|