|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1563, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006397952655150352, |
|
"grad_norm": 19.596042230632786, |
|
"learning_rate": 1.2738853503184715e-07, |
|
"loss": 1.9254, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003198976327575176, |
|
"grad_norm": 19.320989757503053, |
|
"learning_rate": 6.369426751592357e-07, |
|
"loss": 1.8985, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006397952655150352, |
|
"grad_norm": 10.48203671169689, |
|
"learning_rate": 1.2738853503184715e-06, |
|
"loss": 1.8034, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009596928982725527, |
|
"grad_norm": 8.35295489819408, |
|
"learning_rate": 1.9108280254777074e-06, |
|
"loss": 1.6227, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012795905310300703, |
|
"grad_norm": 6.366704733146609, |
|
"learning_rate": 2.547770700636943e-06, |
|
"loss": 1.5116, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01599488163787588, |
|
"grad_norm": 5.982203446460906, |
|
"learning_rate": 3.1847133757961785e-06, |
|
"loss": 1.4995, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.019193857965451054, |
|
"grad_norm": 5.819962966473566, |
|
"learning_rate": 3.821656050955415e-06, |
|
"loss": 1.4278, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022392834293026232, |
|
"grad_norm": 6.029586520004324, |
|
"learning_rate": 4.45859872611465e-06, |
|
"loss": 1.4064, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.025591810620601407, |
|
"grad_norm": 6.155880824723257, |
|
"learning_rate": 5.095541401273886e-06, |
|
"loss": 1.4693, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.028790786948176585, |
|
"grad_norm": 6.183603254054322, |
|
"learning_rate": 5.732484076433121e-06, |
|
"loss": 1.4451, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03198976327575176, |
|
"grad_norm": 6.0739281893318, |
|
"learning_rate": 6.369426751592357e-06, |
|
"loss": 1.4503, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.035188739603326934, |
|
"grad_norm": 5.666112734989155, |
|
"learning_rate": 7.006369426751593e-06, |
|
"loss": 1.4281, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03838771593090211, |
|
"grad_norm": 6.215484436208105, |
|
"learning_rate": 7.64331210191083e-06, |
|
"loss": 1.4641, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04158669225847729, |
|
"grad_norm": 5.91078945631238, |
|
"learning_rate": 8.280254777070064e-06, |
|
"loss": 1.4672, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.044785668586052464, |
|
"grad_norm": 5.661599095084434, |
|
"learning_rate": 8.9171974522293e-06, |
|
"loss": 1.4572, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04798464491362764, |
|
"grad_norm": 6.399453722973696, |
|
"learning_rate": 9.554140127388536e-06, |
|
"loss": 1.4781, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05118362124120281, |
|
"grad_norm": 6.272431960129444, |
|
"learning_rate": 1.0191082802547772e-05, |
|
"loss": 1.4413, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05438259756877799, |
|
"grad_norm": 5.809472680223379, |
|
"learning_rate": 1.0828025477707008e-05, |
|
"loss": 1.4618, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05758157389635317, |
|
"grad_norm": 5.8795539576606854, |
|
"learning_rate": 1.1464968152866242e-05, |
|
"loss": 1.4612, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.060780550223928344, |
|
"grad_norm": 6.353272916584155, |
|
"learning_rate": 1.2101910828025478e-05, |
|
"loss": 1.4663, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06397952655150352, |
|
"grad_norm": 5.8194943048004975, |
|
"learning_rate": 1.2738853503184714e-05, |
|
"loss": 1.4683, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0671785028790787, |
|
"grad_norm": 5.748599336291074, |
|
"learning_rate": 1.337579617834395e-05, |
|
"loss": 1.4635, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07037747920665387, |
|
"grad_norm": 5.446443880371817, |
|
"learning_rate": 1.4012738853503186e-05, |
|
"loss": 1.4833, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07357645553422905, |
|
"grad_norm": 5.846022334061706, |
|
"learning_rate": 1.464968152866242e-05, |
|
"loss": 1.4841, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07677543186180422, |
|
"grad_norm": 5.8983508370893585, |
|
"learning_rate": 1.528662420382166e-05, |
|
"loss": 1.5009, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0799744081893794, |
|
"grad_norm": 5.580182994348059, |
|
"learning_rate": 1.5923566878980894e-05, |
|
"loss": 1.5282, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08317338451695458, |
|
"grad_norm": 7.00216131929732, |
|
"learning_rate": 1.6560509554140128e-05, |
|
"loss": 1.5138, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08637236084452975, |
|
"grad_norm": 5.395228647689322, |
|
"learning_rate": 1.7197452229299365e-05, |
|
"loss": 1.5527, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08957133717210493, |
|
"grad_norm": 6.4381395397364924, |
|
"learning_rate": 1.78343949044586e-05, |
|
"loss": 1.5202, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0927703134996801, |
|
"grad_norm": 7.298084151887251, |
|
"learning_rate": 1.8471337579617837e-05, |
|
"loss": 1.5144, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09596928982725528, |
|
"grad_norm": 5.820918003601176, |
|
"learning_rate": 1.910828025477707e-05, |
|
"loss": 1.4815, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09916826615483046, |
|
"grad_norm": 5.261025002970225, |
|
"learning_rate": 1.9745222929936306e-05, |
|
"loss": 1.5365, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.10236724248240563, |
|
"grad_norm": 5.822872418153333, |
|
"learning_rate": 1.9999775332635076e-05, |
|
"loss": 1.495, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10556621880998081, |
|
"grad_norm": 5.990794687517629, |
|
"learning_rate": 1.999840240196313e-05, |
|
"loss": 1.5071, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10876519513755598, |
|
"grad_norm": 5.613973413489411, |
|
"learning_rate": 1.9995781526975738e-05, |
|
"loss": 1.5319, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11196417146513116, |
|
"grad_norm": 6.584077464363587, |
|
"learning_rate": 1.9991913034795768e-05, |
|
"loss": 1.5861, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11516314779270634, |
|
"grad_norm": 5.848500499551572, |
|
"learning_rate": 1.9986797408266636e-05, |
|
"loss": 1.5, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1183621241202815, |
|
"grad_norm": 5.564555003715003, |
|
"learning_rate": 1.9980435285892056e-05, |
|
"loss": 1.5481, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.12156110044785669, |
|
"grad_norm": 6.028564963206183, |
|
"learning_rate": 1.9972827461756335e-05, |
|
"loss": 1.578, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12476007677543186, |
|
"grad_norm": 5.926547243421822, |
|
"learning_rate": 1.9963974885425267e-05, |
|
"loss": 1.5555, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12795905310300704, |
|
"grad_norm": 5.966985587917129, |
|
"learning_rate": 1.9953878661827603e-05, |
|
"loss": 1.5288, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13115802943058222, |
|
"grad_norm": 6.010709482959101, |
|
"learning_rate": 1.994254005111715e-05, |
|
"loss": 1.5387, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1343570057581574, |
|
"grad_norm": 6.628579244426843, |
|
"learning_rate": 1.992996046851548e-05, |
|
"loss": 1.5874, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.13755598208573255, |
|
"grad_norm": 6.675308221300031, |
|
"learning_rate": 1.9916141484135297e-05, |
|
"loss": 1.5012, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.14075495841330773, |
|
"grad_norm": 6.371390174895763, |
|
"learning_rate": 1.990108482278446e-05, |
|
"loss": 1.5824, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14395393474088292, |
|
"grad_norm": 5.64251865194712, |
|
"learning_rate": 1.9884792363750684e-05, |
|
"loss": 1.5324, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1471529110684581, |
|
"grad_norm": 6.410849237786982, |
|
"learning_rate": 1.9867266140567024e-05, |
|
"loss": 1.5654, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15035188739603328, |
|
"grad_norm": 7.066430464872344, |
|
"learning_rate": 1.9848508340758014e-05, |
|
"loss": 1.5612, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.15355086372360843, |
|
"grad_norm": 6.525361010219877, |
|
"learning_rate": 1.9828521305566647e-05, |
|
"loss": 1.5963, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15674984005118361, |
|
"grad_norm": 27.981136788668703, |
|
"learning_rate": 1.9807307529662175e-05, |
|
"loss": 1.6072, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1599488163787588, |
|
"grad_norm": 14.850012678039711, |
|
"learning_rate": 1.9784869660828708e-05, |
|
"loss": 1.6261, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16314779270633398, |
|
"grad_norm": 9.017990600325932, |
|
"learning_rate": 1.9761210499634754e-05, |
|
"loss": 1.5862, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16634676903390916, |
|
"grad_norm": 6.009881994757128, |
|
"learning_rate": 1.973633299908365e-05, |
|
"loss": 1.5476, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1695457453614843, |
|
"grad_norm": 7.772193560607181, |
|
"learning_rate": 1.9710240264245005e-05, |
|
"loss": 1.5568, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1727447216890595, |
|
"grad_norm": 6.0046356700151575, |
|
"learning_rate": 1.9682935551867132e-05, |
|
"loss": 1.5652, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.17594369801663468, |
|
"grad_norm": 7.259518897268176, |
|
"learning_rate": 1.9654422269970545e-05, |
|
"loss": 1.5152, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.17914267434420986, |
|
"grad_norm": 5.849933171807072, |
|
"learning_rate": 1.9624703977422624e-05, |
|
"loss": 1.5411, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18234165067178504, |
|
"grad_norm": 6.900313411361811, |
|
"learning_rate": 1.959378438349338e-05, |
|
"loss": 1.5514, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1855406269993602, |
|
"grad_norm": 5.976493878932652, |
|
"learning_rate": 1.956166734739251e-05, |
|
"loss": 1.5491, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.18873960332693537, |
|
"grad_norm": 6.839976739709417, |
|
"learning_rate": 1.95283568777877e-05, |
|
"loss": 1.5808, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.19193857965451055, |
|
"grad_norm": 23.36116832288107, |
|
"learning_rate": 1.9493857132304295e-05, |
|
"loss": 1.5103, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19513755598208574, |
|
"grad_norm": 5.907471018056248, |
|
"learning_rate": 1.9458172417006347e-05, |
|
"loss": 1.5491, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.19833653230966092, |
|
"grad_norm": 5.462387821543604, |
|
"learning_rate": 1.942130718585919e-05, |
|
"loss": 1.4817, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.20153550863723607, |
|
"grad_norm": 5.445829839166847, |
|
"learning_rate": 1.938326604017349e-05, |
|
"loss": 1.5581, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.20473448496481125, |
|
"grad_norm": 6.17671143618823, |
|
"learning_rate": 1.9344053728030952e-05, |
|
"loss": 1.4874, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.20793346129238643, |
|
"grad_norm": 5.454424295381491, |
|
"learning_rate": 1.9303675143691683e-05, |
|
"loss": 1.5252, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21113243761996162, |
|
"grad_norm": 5.820669131673871, |
|
"learning_rate": 1.9262135326983326e-05, |
|
"loss": 1.5198, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2143314139475368, |
|
"grad_norm": 5.912753814527551, |
|
"learning_rate": 1.921943946267201e-05, |
|
"loss": 1.587, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.21753039027511195, |
|
"grad_norm": 7.487537515670172, |
|
"learning_rate": 1.9175592879815217e-05, |
|
"loss": 1.5657, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22072936660268713, |
|
"grad_norm": 5.853172355021576, |
|
"learning_rate": 1.9130601051096655e-05, |
|
"loss": 1.4938, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.22392834293026231, |
|
"grad_norm": 5.709149350487184, |
|
"learning_rate": 1.9084469592143154e-05, |
|
"loss": 1.5328, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2271273192578375, |
|
"grad_norm": 7.575317890582474, |
|
"learning_rate": 1.9037204260823788e-05, |
|
"loss": 1.4863, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.23032629558541268, |
|
"grad_norm": 6.140971076057151, |
|
"learning_rate": 1.89888109565312e-05, |
|
"loss": 1.5559, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.23352527191298783, |
|
"grad_norm": 6.7321671777819745, |
|
"learning_rate": 1.893929571944527e-05, |
|
"loss": 1.4776, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.236724248240563, |
|
"grad_norm": 5.673506383692647, |
|
"learning_rate": 1.8888664729779205e-05, |
|
"loss": 1.5505, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2399232245681382, |
|
"grad_norm": 6.7549167620013755, |
|
"learning_rate": 1.883692430700818e-05, |
|
"loss": 1.5136, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.24312220089571338, |
|
"grad_norm": 5.657108216640427, |
|
"learning_rate": 1.8784080909080568e-05, |
|
"loss": 1.4313, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.24632117722328856, |
|
"grad_norm": 5.706894107517054, |
|
"learning_rate": 1.8730141131611882e-05, |
|
"loss": 1.5071, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2495201535508637, |
|
"grad_norm": 5.492551858258093, |
|
"learning_rate": 1.867511170706157e-05, |
|
"loss": 1.4764, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2527191298784389, |
|
"grad_norm": 17.304144834459898, |
|
"learning_rate": 1.861899950389269e-05, |
|
"loss": 1.5008, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2559181062060141, |
|
"grad_norm": 6.5102974464825705, |
|
"learning_rate": 1.856181152571463e-05, |
|
"loss": 1.5829, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2591170825335892, |
|
"grad_norm": 5.948826356074738, |
|
"learning_rate": 1.850355491040897e-05, |
|
"loss": 1.5518, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.26231605886116444, |
|
"grad_norm": 5.971106058355866, |
|
"learning_rate": 1.8444236929238556e-05, |
|
"loss": 1.5757, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2655150351887396, |
|
"grad_norm": 5.8679997341744885, |
|
"learning_rate": 1.8383864985939944e-05, |
|
"loss": 1.5092, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2687140115163148, |
|
"grad_norm": 5.6678931253995275, |
|
"learning_rate": 1.8322446615799317e-05, |
|
"loss": 1.5914, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.27191298784388995, |
|
"grad_norm": 5.73166539396345, |
|
"learning_rate": 1.825998948471197e-05, |
|
"loss": 1.5175, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2751119641714651, |
|
"grad_norm": 6.505274488122578, |
|
"learning_rate": 1.819650138822548e-05, |
|
"loss": 1.5308, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2783109404990403, |
|
"grad_norm": 6.1831675736655365, |
|
"learning_rate": 1.8131990250566733e-05, |
|
"loss": 1.4676, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.28150991682661547, |
|
"grad_norm": 5.898782759058662, |
|
"learning_rate": 1.8066464123652857e-05, |
|
"loss": 1.4624, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2847088931541907, |
|
"grad_norm": 6.0743511266401455, |
|
"learning_rate": 1.7999931186086225e-05, |
|
"loss": 1.497, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.28790786948176583, |
|
"grad_norm": 6.439702879767435, |
|
"learning_rate": 1.793239974213364e-05, |
|
"loss": 1.4987, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.291106845809341, |
|
"grad_norm": 5.652845631220745, |
|
"learning_rate": 1.786387822068987e-05, |
|
"loss": 1.4731, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2943058221369162, |
|
"grad_norm": 6.32893452010987, |
|
"learning_rate": 1.7794375174225565e-05, |
|
"loss": 1.5686, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.29750479846449135, |
|
"grad_norm": 6.832606404525833, |
|
"learning_rate": 1.7723899277719815e-05, |
|
"loss": 1.5616, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.30070377479206656, |
|
"grad_norm": 6.376151858944639, |
|
"learning_rate": 1.7652459327577377e-05, |
|
"loss": 1.5418, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3039027511196417, |
|
"grad_norm": 5.100702734889225, |
|
"learning_rate": 1.7580064240530746e-05, |
|
"loss": 1.4835, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.30710172744721687, |
|
"grad_norm": 5.99984161717874, |
|
"learning_rate": 1.7506723052527243e-05, |
|
"loss": 1.5465, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3103007037747921, |
|
"grad_norm": 6.053485182766263, |
|
"learning_rate": 1.7432444917601183e-05, |
|
"loss": 1.5017, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.31349968010236723, |
|
"grad_norm": 5.6631171211389875, |
|
"learning_rate": 1.735723910673132e-05, |
|
"loss": 1.4774, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.31669865642994244, |
|
"grad_norm": 5.584804357150484, |
|
"learning_rate": 1.7281115006683687e-05, |
|
"loss": 1.4851, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3198976327575176, |
|
"grad_norm": 6.342021402984585, |
|
"learning_rate": 1.7204082118840035e-05, |
|
"loss": 1.4825, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32309660908509275, |
|
"grad_norm": 5.579391027670954, |
|
"learning_rate": 1.712615005801185e-05, |
|
"loss": 1.472, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.32629558541266795, |
|
"grad_norm": 5.394518012610248, |
|
"learning_rate": 1.704732855124037e-05, |
|
"loss": 1.4966, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3294945617402431, |
|
"grad_norm": 5.980215145783222, |
|
"learning_rate": 1.6967627436582445e-05, |
|
"loss": 1.4075, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3326935380678183, |
|
"grad_norm": 5.808270410776529, |
|
"learning_rate": 1.6887056661882644e-05, |
|
"loss": 1.4771, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.33589251439539347, |
|
"grad_norm": 7.152355179367295, |
|
"learning_rate": 1.6805626283531592e-05, |
|
"loss": 1.4722, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3390914907229686, |
|
"grad_norm": 6.939335206870156, |
|
"learning_rate": 1.6723346465210815e-05, |
|
"loss": 1.5168, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.34229046705054383, |
|
"grad_norm": 5.552572033728716, |
|
"learning_rate": 1.6640227476624163e-05, |
|
"loss": 1.5027, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.345489443378119, |
|
"grad_norm": 5.492220368766092, |
|
"learning_rate": 1.655627969221598e-05, |
|
"loss": 1.4868, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3486884197056942, |
|
"grad_norm": 5.994401409437794, |
|
"learning_rate": 1.6471513589876247e-05, |
|
"loss": 1.4977, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.35188739603326935, |
|
"grad_norm": 5.997814866621087, |
|
"learning_rate": 1.638593974963278e-05, |
|
"loss": 1.4191, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3550863723608445, |
|
"grad_norm": 5.829666218564288, |
|
"learning_rate": 1.6299568852330703e-05, |
|
"loss": 1.4315, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3582853486884197, |
|
"grad_norm": 5.61328215827393, |
|
"learning_rate": 1.6212411678299306e-05, |
|
"loss": 1.5058, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.36148432501599487, |
|
"grad_norm": 5.327552350734913, |
|
"learning_rate": 1.612447910600652e-05, |
|
"loss": 1.4737, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3646833013435701, |
|
"grad_norm": 5.69563503987059, |
|
"learning_rate": 1.6035782110701125e-05, |
|
"loss": 1.4831, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.36788227767114523, |
|
"grad_norm": 5.10594196156975, |
|
"learning_rate": 1.594633176304287e-05, |
|
"loss": 1.5169, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3710812539987204, |
|
"grad_norm": 6.293737941786795, |
|
"learning_rate": 1.5856139227720714e-05, |
|
"loss": 1.5339, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3742802303262956, |
|
"grad_norm": 5.995452546156939, |
|
"learning_rate": 1.5765215762059304e-05, |
|
"loss": 1.4663, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.37747920665387075, |
|
"grad_norm": 6.0346764414373775, |
|
"learning_rate": 1.5673572714613886e-05, |
|
"loss": 1.4116, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.38067818298144596, |
|
"grad_norm": 6.020240628226962, |
|
"learning_rate": 1.558122152375387e-05, |
|
"loss": 1.5372, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3838771593090211, |
|
"grad_norm": 6.736733472792626, |
|
"learning_rate": 1.548817371623513e-05, |
|
"loss": 1.4495, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.38707613563659626, |
|
"grad_norm": 22.11767334193536, |
|
"learning_rate": 1.539444090576132e-05, |
|
"loss": 1.4788, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3902751119641715, |
|
"grad_norm": 5.323640782312709, |
|
"learning_rate": 1.5300034791534297e-05, |
|
"loss": 1.4732, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3934740882917466, |
|
"grad_norm": 5.3327689435484835, |
|
"learning_rate": 1.520496715679391e-05, |
|
"loss": 1.5138, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.39667306461932184, |
|
"grad_norm": 5.7610778542310985, |
|
"learning_rate": 1.5109249867347276e-05, |
|
"loss": 1.4399, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.399872040946897, |
|
"grad_norm": 5.399701474557813, |
|
"learning_rate": 1.5012894870087751e-05, |
|
"loss": 1.3912, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.40307101727447214, |
|
"grad_norm": 5.480600811287772, |
|
"learning_rate": 1.4915914191503792e-05, |
|
"loss": 1.4894, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.40626999360204735, |
|
"grad_norm": 5.441970253275154, |
|
"learning_rate": 1.4818319936177885e-05, |
|
"loss": 1.4936, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.4094689699296225, |
|
"grad_norm": 6.296326244418185, |
|
"learning_rate": 1.4720124285275703e-05, |
|
"loss": 1.4706, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4126679462571977, |
|
"grad_norm": 5.893181005202456, |
|
"learning_rate": 1.4621339495025731e-05, |
|
"loss": 1.4504, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.41586692258477287, |
|
"grad_norm": 5.276496357891099, |
|
"learning_rate": 1.4521977895189518e-05, |
|
"loss": 1.5316, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.419065898912348, |
|
"grad_norm": 5.355395453864716, |
|
"learning_rate": 1.4422051887522735e-05, |
|
"loss": 1.4182, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.42226487523992323, |
|
"grad_norm": 5.750131591812706, |
|
"learning_rate": 1.4321573944227254e-05, |
|
"loss": 1.4162, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4254638515674984, |
|
"grad_norm": 5.6704431690976715, |
|
"learning_rate": 1.4220556606394465e-05, |
|
"loss": 1.4115, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.4286628278950736, |
|
"grad_norm": 5.594312503139955, |
|
"learning_rate": 1.4119012482439929e-05, |
|
"loss": 1.4828, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.43186180422264875, |
|
"grad_norm": 5.501849792689381, |
|
"learning_rate": 1.4016954246529697e-05, |
|
"loss": 1.4409, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4350607805502239, |
|
"grad_norm": 5.694559255173223, |
|
"learning_rate": 1.3914394636998374e-05, |
|
"loss": 1.4415, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4382597568777991, |
|
"grad_norm": 5.7273106418478745, |
|
"learning_rate": 1.3811346454759211e-05, |
|
"loss": 1.4229, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.44145873320537427, |
|
"grad_norm": 5.072530356298814, |
|
"learning_rate": 1.3707822561706336e-05, |
|
"loss": 1.4428, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4446577095329495, |
|
"grad_norm": 5.158739577606318, |
|
"learning_rate": 1.3603835879109442e-05, |
|
"loss": 1.409, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.44785668586052463, |
|
"grad_norm": 6.061233482330587, |
|
"learning_rate": 1.349939938600099e-05, |
|
"loss": 1.3806, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4510556621880998, |
|
"grad_norm": 5.707331290817792, |
|
"learning_rate": 1.3394526117556277e-05, |
|
"loss": 1.3426, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.454254638515675, |
|
"grad_norm": 5.398255971340936, |
|
"learning_rate": 1.3289229163466421e-05, |
|
"loss": 1.3656, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.45745361484325014, |
|
"grad_norm": 5.2746408411360965, |
|
"learning_rate": 1.3183521666304611e-05, |
|
"loss": 1.3791, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.46065259117082535, |
|
"grad_norm": 6.106490023435743, |
|
"learning_rate": 1.3077416819885707e-05, |
|
"loss": 1.4025, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4638515674984005, |
|
"grad_norm": 9.033703923623596, |
|
"learning_rate": 1.297092786761946e-05, |
|
"loss": 1.3972, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.46705054382597566, |
|
"grad_norm": 5.203446436783156, |
|
"learning_rate": 1.2864068100857565e-05, |
|
"loss": 1.4624, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.47024952015355087, |
|
"grad_norm": 5.783521080834399, |
|
"learning_rate": 1.2756850857234686e-05, |
|
"loss": 1.3224, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.473448496481126, |
|
"grad_norm": 5.1601982559203075, |
|
"learning_rate": 1.2649289519003739e-05, |
|
"loss": 1.4001, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.47664747280870123, |
|
"grad_norm": 6.162375731265669, |
|
"learning_rate": 1.2541397511365584e-05, |
|
"loss": 1.3971, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4798464491362764, |
|
"grad_norm": 5.44449446975744, |
|
"learning_rate": 1.2433188300793399e-05, |
|
"loss": 1.3678, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.48304542546385154, |
|
"grad_norm": 5.742001125711013, |
|
"learning_rate": 1.2324675393351818e-05, |
|
"loss": 1.3902, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.48624440179142675, |
|
"grad_norm": 6.588679903273224, |
|
"learning_rate": 1.221587233301123e-05, |
|
"loss": 1.3967, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4894433781190019, |
|
"grad_norm": 4.749871970824734, |
|
"learning_rate": 1.2106792699957264e-05, |
|
"loss": 1.3323, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4926423544465771, |
|
"grad_norm": 5.241600068506743, |
|
"learning_rate": 1.1997450108895807e-05, |
|
"loss": 1.3341, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.49584133077415227, |
|
"grad_norm": 6.669804856668056, |
|
"learning_rate": 1.1887858207353678e-05, |
|
"loss": 1.4422, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4990403071017274, |
|
"grad_norm": 6.267377222844033, |
|
"learning_rate": 1.1778030673975227e-05, |
|
"loss": 1.3734, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5022392834293026, |
|
"grad_norm": 5.387079839641074, |
|
"learning_rate": 1.166798121681505e-05, |
|
"loss": 1.3142, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5054382597568778, |
|
"grad_norm": 5.257949564475725, |
|
"learning_rate": 1.1557723571627016e-05, |
|
"loss": 1.412, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.508637236084453, |
|
"grad_norm": 6.286120326860924, |
|
"learning_rate": 1.1447271500149849e-05, |
|
"loss": 1.3923, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5118362124120281, |
|
"grad_norm": 4.8250523003825325, |
|
"learning_rate": 1.1336638788389473e-05, |
|
"loss": 1.3933, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5150351887396033, |
|
"grad_norm": 5.309455908621953, |
|
"learning_rate": 1.122583924489832e-05, |
|
"loss": 1.3419, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5182341650671785, |
|
"grad_norm": 5.4051345865376845, |
|
"learning_rate": 1.1114886699051803e-05, |
|
"loss": 1.3372, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5214331413947537, |
|
"grad_norm": 5.4776811813134385, |
|
"learning_rate": 1.1003794999322246e-05, |
|
"loss": 1.4053, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5246321177223289, |
|
"grad_norm": 6.028813751150013, |
|
"learning_rate": 1.089257801155037e-05, |
|
"loss": 1.2717, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.527831094049904, |
|
"grad_norm": 5.6893885646734725, |
|
"learning_rate": 1.0781249617214642e-05, |
|
"loss": 1.3647, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5310300703774792, |
|
"grad_norm": 7.845081962784706, |
|
"learning_rate": 1.0669823711698668e-05, |
|
"loss": 1.3118, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5342290467050543, |
|
"grad_norm": 5.974302837219022, |
|
"learning_rate": 1.0558314202556866e-05, |
|
"loss": 1.3196, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5374280230326296, |
|
"grad_norm": 6.2079916851397945, |
|
"learning_rate": 1.0446735007778563e-05, |
|
"loss": 1.3387, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5406269993602048, |
|
"grad_norm": 5.710214267091366, |
|
"learning_rate": 1.0335100054050877e-05, |
|
"loss": 1.3401, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5438259756877799, |
|
"grad_norm": 5.245166732528631, |
|
"learning_rate": 1.0223423275020431e-05, |
|
"loss": 1.3537, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5470249520153551, |
|
"grad_norm": 5.47319375038005, |
|
"learning_rate": 1.0111718609554254e-05, |
|
"loss": 1.3702, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5502239283429302, |
|
"grad_norm": 5.563044035160415, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3042, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5534229046705055, |
|
"grad_norm": 5.32197020170691, |
|
"learning_rate": 9.888281390445747e-06, |
|
"loss": 1.3702, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5566218809980806, |
|
"grad_norm": 4.9805710370306935, |
|
"learning_rate": 9.776576724979572e-06, |
|
"loss": 1.3279, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5598208573256558, |
|
"grad_norm": 5.517527208389846, |
|
"learning_rate": 9.664899945949128e-06, |
|
"loss": 1.3328, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5630198336532309, |
|
"grad_norm": 5.7898575142466955, |
|
"learning_rate": 9.553264992221442e-06, |
|
"loss": 1.3181, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5662188099808061, |
|
"grad_norm": 5.573656915945259, |
|
"learning_rate": 9.441685797443138e-06, |
|
"loss": 1.2587, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5694177863083814, |
|
"grad_norm": 5.276092647196823, |
|
"learning_rate": 9.330176288301332e-06, |
|
"loss": 1.2959, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5726167626359565, |
|
"grad_norm": 5.343314007371001, |
|
"learning_rate": 9.21875038278536e-06, |
|
"loss": 1.352, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5758157389635317, |
|
"grad_norm": 5.6426664531262425, |
|
"learning_rate": 9.107421988449632e-06, |
|
"loss": 1.292, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5790147152911068, |
|
"grad_norm": 5.640737666551658, |
|
"learning_rate": 8.996205000677758e-06, |
|
"loss": 1.3715, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.582213691618682, |
|
"grad_norm": 5.9612820213670945, |
|
"learning_rate": 8.885113300948199e-06, |
|
"loss": 1.2402, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5854126679462572, |
|
"grad_norm": 5.163441313443216, |
|
"learning_rate": 8.774160755101685e-06, |
|
"loss": 1.2863, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5886116442738324, |
|
"grad_norm": 6.145271777985554, |
|
"learning_rate": 8.663361211610529e-06, |
|
"loss": 1.3555, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5918106206014075, |
|
"grad_norm": 5.4418317157557405, |
|
"learning_rate": 8.552728499850153e-06, |
|
"loss": 1.3657, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5950095969289827, |
|
"grad_norm": 5.467010402521484, |
|
"learning_rate": 8.442276428372986e-06, |
|
"loss": 1.2939, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5982085732565579, |
|
"grad_norm": 5.375591925949063, |
|
"learning_rate": 8.332018783184952e-06, |
|
"loss": 1.3256, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.6014075495841331, |
|
"grad_norm": 5.669641606911128, |
|
"learning_rate": 8.221969326024776e-06, |
|
"loss": 1.2655, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6046065259117083, |
|
"grad_norm": 5.107615949778542, |
|
"learning_rate": 8.112141792646326e-06, |
|
"loss": 1.2487, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.6078055022392834, |
|
"grad_norm": 5.917321765317, |
|
"learning_rate": 8.002549891104196e-06, |
|
"loss": 1.3936, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6110044785668586, |
|
"grad_norm": 5.405584578362852, |
|
"learning_rate": 7.89320730004274e-06, |
|
"loss": 1.3401, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.6142034548944337, |
|
"grad_norm": 5.217914222418895, |
|
"learning_rate": 7.784127666988774e-06, |
|
"loss": 1.2734, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.617402431222009, |
|
"grad_norm": 5.41366943915775, |
|
"learning_rate": 7.675324606648187e-06, |
|
"loss": 1.3577, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.6206014075495841, |
|
"grad_norm": 5.613307742444461, |
|
"learning_rate": 7.566811699206604e-06, |
|
"loss": 1.2305, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6238003838771593, |
|
"grad_norm": 6.161058161659451, |
|
"learning_rate": 7.458602488634416e-06, |
|
"loss": 1.2949, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6269993602047345, |
|
"grad_norm": 5.8815820932534075, |
|
"learning_rate": 7.350710480996266e-06, |
|
"loss": 1.3216, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6301983365323096, |
|
"grad_norm": 5.8377871027181625, |
|
"learning_rate": 7.243149142765317e-06, |
|
"loss": 1.2566, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.6333973128598849, |
|
"grad_norm": 6.007065423720387, |
|
"learning_rate": 7.135931899142438e-06, |
|
"loss": 1.1938, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.63659628918746, |
|
"grad_norm": 5.665836143013, |
|
"learning_rate": 7.029072132380543e-06, |
|
"loss": 1.2106, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6397952655150352, |
|
"grad_norm": 5.976229117153174, |
|
"learning_rate": 6.922583180114299e-06, |
|
"loss": 1.2839, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6429942418426103, |
|
"grad_norm": 6.016028155992671, |
|
"learning_rate": 6.816478333695392e-06, |
|
"loss": 1.2725, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6461932181701855, |
|
"grad_norm": 5.83046161596007, |
|
"learning_rate": 6.710770836533584e-06, |
|
"loss": 1.2883, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6493921944977608, |
|
"grad_norm": 5.2835178908360625, |
|
"learning_rate": 6.605473882443725e-06, |
|
"loss": 1.286, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6525911708253359, |
|
"grad_norm": 5.405038918816778, |
|
"learning_rate": 6.5006006139990115e-06, |
|
"loss": 1.2575, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6557901471529111, |
|
"grad_norm": 5.345367266134731, |
|
"learning_rate": 6.396164120890562e-06, |
|
"loss": 1.3175, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6589891234804862, |
|
"grad_norm": 5.327195492441683, |
|
"learning_rate": 6.292177438293665e-06, |
|
"loss": 1.2368, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6621880998080614, |
|
"grad_norm": 5.429158143917414, |
|
"learning_rate": 6.188653545240793e-06, |
|
"loss": 1.2618, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6653870761356366, |
|
"grad_norm": 9.442946173046769, |
|
"learning_rate": 6.085605363001628e-06, |
|
"loss": 1.2635, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6685860524632118, |
|
"grad_norm": 5.461550264996468, |
|
"learning_rate": 5.983045753470308e-06, |
|
"loss": 1.2292, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6717850287907869, |
|
"grad_norm": 5.433865692907909, |
|
"learning_rate": 5.880987517560075e-06, |
|
"loss": 1.2533, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6749840051183621, |
|
"grad_norm": 5.360876312233277, |
|
"learning_rate": 5.779443393605536e-06, |
|
"loss": 1.2302, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6781829814459372, |
|
"grad_norm": 5.597034291592108, |
|
"learning_rate": 5.678426055772746e-06, |
|
"loss": 1.2621, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6813819577735125, |
|
"grad_norm": 5.364563467233611, |
|
"learning_rate": 5.577948112477271e-06, |
|
"loss": 1.2081, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6845809341010877, |
|
"grad_norm": 5.479671671356431, |
|
"learning_rate": 5.478022104810483e-06, |
|
"loss": 1.2343, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6877799104286628, |
|
"grad_norm": 5.42208058407067, |
|
"learning_rate": 5.378660504974271e-06, |
|
"loss": 1.1894, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.690978886756238, |
|
"grad_norm": 5.318236870909474, |
|
"learning_rate": 5.2798757147242986e-06, |
|
"loss": 1.2133, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6941778630838131, |
|
"grad_norm": 7.743258878956998, |
|
"learning_rate": 5.1816800638221176e-06, |
|
"loss": 1.2861, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6973768394113884, |
|
"grad_norm": 5.071086020945074, |
|
"learning_rate": 5.084085808496211e-06, |
|
"loss": 1.1672, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7005758157389635, |
|
"grad_norm": 5.938216173933617, |
|
"learning_rate": 4.987105129912252e-06, |
|
"loss": 1.2525, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7037747920665387, |
|
"grad_norm": 5.14560931343335, |
|
"learning_rate": 4.890750132652724e-06, |
|
"loss": 1.2527, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7069737683941139, |
|
"grad_norm": 5.824953341068154, |
|
"learning_rate": 4.795032843206091e-06, |
|
"loss": 1.2699, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.710172744721689, |
|
"grad_norm": 5.823852748558207, |
|
"learning_rate": 4.699965208465702e-06, |
|
"loss": 1.2413, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7133717210492643, |
|
"grad_norm": 6.159332363176924, |
|
"learning_rate": 4.605559094238681e-06, |
|
"loss": 1.2449, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.7165706973768394, |
|
"grad_norm": 5.491076270333848, |
|
"learning_rate": 4.511826283764872e-06, |
|
"loss": 1.2375, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.7197696737044146, |
|
"grad_norm": 5.998578981936084, |
|
"learning_rate": 4.418778476246132e-06, |
|
"loss": 1.2537, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7229686500319897, |
|
"grad_norm": 5.387463063273167, |
|
"learning_rate": 4.326427285386118e-06, |
|
"loss": 1.1868, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7261676263595649, |
|
"grad_norm": 5.769559852193052, |
|
"learning_rate": 4.234784237940705e-06, |
|
"loss": 1.2264, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7293666026871402, |
|
"grad_norm": 5.68912164929325, |
|
"learning_rate": 4.143860772279287e-06, |
|
"loss": 1.1669, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7325655790147153, |
|
"grad_norm": 6.169259924681037, |
|
"learning_rate": 4.053668236957135e-06, |
|
"loss": 1.188, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.7357645553422905, |
|
"grad_norm": 5.775273351173123, |
|
"learning_rate": 3.964217889298882e-06, |
|
"loss": 1.2167, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7389635316698656, |
|
"grad_norm": 6.66491466045373, |
|
"learning_rate": 3.875520893993482e-06, |
|
"loss": 1.2407, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7421625079974408, |
|
"grad_norm": 5.4584893738231814, |
|
"learning_rate": 3.787588321700697e-06, |
|
"loss": 1.1773, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.745361484325016, |
|
"grad_norm": 5.495714202965497, |
|
"learning_rate": 3.7004311476692977e-06, |
|
"loss": 1.263, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7485604606525912, |
|
"grad_norm": 5.367093577033402, |
|
"learning_rate": 3.6140602503672217e-06, |
|
"loss": 1.1816, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7517594369801663, |
|
"grad_norm": 5.929562727552283, |
|
"learning_rate": 3.528486410123758e-06, |
|
"loss": 1.287, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7549584133077415, |
|
"grad_norm": 5.16200286713459, |
|
"learning_rate": 3.443720307784022e-06, |
|
"loss": 1.1518, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7581573896353166, |
|
"grad_norm": 5.617358144040829, |
|
"learning_rate": 3.359772523375837e-06, |
|
"loss": 1.126, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7613563659628919, |
|
"grad_norm": 5.274480247725789, |
|
"learning_rate": 3.276653534789185e-06, |
|
"loss": 1.1226, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7645553422904671, |
|
"grad_norm": 5.087961244651565, |
|
"learning_rate": 3.1943737164684094e-06, |
|
"loss": 1.1843, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7677543186180422, |
|
"grad_norm": 5.165537670382836, |
|
"learning_rate": 3.11294333811736e-06, |
|
"loss": 1.2111, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7709532949456174, |
|
"grad_norm": 5.141713986869319, |
|
"learning_rate": 3.032372563417556e-06, |
|
"loss": 1.1842, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7741522712731925, |
|
"grad_norm": 5.0964117361663455, |
|
"learning_rate": 2.952671448759631e-06, |
|
"loss": 1.2247, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7773512476007678, |
|
"grad_norm": 5.3243216832528715, |
|
"learning_rate": 2.8738499419881517e-06, |
|
"loss": 1.1762, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.780550223928343, |
|
"grad_norm": 4.851797186739384, |
|
"learning_rate": 2.795917881159973e-06, |
|
"loss": 1.1502, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7837492002559181, |
|
"grad_norm": 5.496612395138414, |
|
"learning_rate": 2.718884993316311e-06, |
|
"loss": 1.1772, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7869481765834933, |
|
"grad_norm": 5.442736560179264, |
|
"learning_rate": 2.642760893268684e-06, |
|
"loss": 1.1553, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7901471529110684, |
|
"grad_norm": 5.17508160986582, |
|
"learning_rate": 2.5675550823988184e-06, |
|
"loss": 1.156, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7933461292386437, |
|
"grad_norm": 5.549015221247355, |
|
"learning_rate": 2.493276947472756e-06, |
|
"loss": 1.1863, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7965451055662188, |
|
"grad_norm": 5.482924269402519, |
|
"learning_rate": 2.4199357594692564e-06, |
|
"loss": 1.1136, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.799744081893794, |
|
"grad_norm": 6.019191778587459, |
|
"learning_rate": 2.347540672422627e-06, |
|
"loss": 1.1587, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8029430582213691, |
|
"grad_norm": 5.49048264574569, |
|
"learning_rate": 2.2761007222801866e-06, |
|
"loss": 1.1607, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.8061420345489443, |
|
"grad_norm": 5.16885210872782, |
|
"learning_rate": 2.2056248257744383e-06, |
|
"loss": 1.1408, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8093410108765196, |
|
"grad_norm": 5.432012432577872, |
|
"learning_rate": 2.136121779310132e-06, |
|
"loss": 1.2092, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.8125399872040947, |
|
"grad_norm": 5.406927709627139, |
|
"learning_rate": 2.067600257866358e-06, |
|
"loss": 1.1709, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.8157389635316699, |
|
"grad_norm": 5.141882407100849, |
|
"learning_rate": 2.000068813913777e-06, |
|
"loss": 1.0669, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.818937939859245, |
|
"grad_norm": 5.499076743202356, |
|
"learning_rate": 1.933535876347141e-06, |
|
"loss": 1.1749, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8221369161868202, |
|
"grad_norm": 5.475323870607504, |
|
"learning_rate": 1.8680097494332682e-06, |
|
"loss": 1.0934, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.8253358925143954, |
|
"grad_norm": 5.391123200327997, |
|
"learning_rate": 1.8034986117745245e-06, |
|
"loss": 1.1402, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8285348688419706, |
|
"grad_norm": 5.348163069842104, |
|
"learning_rate": 1.7400105152880331e-06, |
|
"loss": 1.1806, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.8317338451695457, |
|
"grad_norm": 5.699253380097164, |
|
"learning_rate": 1.6775533842006853e-06, |
|
"loss": 1.0805, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8349328214971209, |
|
"grad_norm": 6.0098839122086005, |
|
"learning_rate": 1.6161350140600606e-06, |
|
"loss": 1.0664, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.838131797824696, |
|
"grad_norm": 5.119178144887819, |
|
"learning_rate": 1.555763070761448e-06, |
|
"loss": 1.16, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8413307741522713, |
|
"grad_norm": 5.82644856476954, |
|
"learning_rate": 1.496445089591031e-06, |
|
"loss": 1.2315, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.8445297504798465, |
|
"grad_norm": 5.272508975962535, |
|
"learning_rate": 1.4381884742853723e-06, |
|
"loss": 1.1552, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8477287268074216, |
|
"grad_norm": 5.636426941111563, |
|
"learning_rate": 1.381000496107313e-06, |
|
"loss": 1.1839, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8509277031349968, |
|
"grad_norm": 5.298864794368035, |
|
"learning_rate": 1.3248882929384321e-06, |
|
"loss": 1.1189, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8541266794625719, |
|
"grad_norm": 6.334207459793715, |
|
"learning_rate": 1.2698588683881185e-06, |
|
"loss": 1.1156, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8573256557901472, |
|
"grad_norm": 5.085204432451678, |
|
"learning_rate": 1.215919090919434e-06, |
|
"loss": 1.1514, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8605246321177223, |
|
"grad_norm": 5.519657129601866, |
|
"learning_rate": 1.1630756929918218e-06, |
|
"loss": 1.1383, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8637236084452975, |
|
"grad_norm": 5.64198104430077, |
|
"learning_rate": 1.111335270220798e-06, |
|
"loss": 1.2014, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8669225847728727, |
|
"grad_norm": 5.639338014653528, |
|
"learning_rate": 1.060704280554733e-06, |
|
"loss": 1.1162, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.8701215611004478, |
|
"grad_norm": 6.2590061059224755, |
|
"learning_rate": 1.0111890434688009e-06, |
|
"loss": 1.1873, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8733205374280231, |
|
"grad_norm": 5.764812667567324, |
|
"learning_rate": 9.62795739176212e-07, |
|
"loss": 1.1753, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8765195137555982, |
|
"grad_norm": 5.468694265932519, |
|
"learning_rate": 9.155304078568495e-07, |
|
"loss": 1.1844, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8797184900831734, |
|
"grad_norm": 6.144081430233088, |
|
"learning_rate": 8.693989489033494e-07, |
|
"loss": 1.223, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8829174664107485, |
|
"grad_norm": 5.329648133115913, |
|
"learning_rate": 8.244071201847826e-07, |
|
"loss": 1.0919, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8861164427383237, |
|
"grad_norm": 5.392201352239966, |
|
"learning_rate": 7.805605373279934e-07, |
|
"loss": 1.1712, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.889315419065899, |
|
"grad_norm": 6.36606850342023, |
|
"learning_rate": 7.378646730166783e-07, |
|
"loss": 1.0977, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8925143953934741, |
|
"grad_norm": 5.577850513924324, |
|
"learning_rate": 6.963248563083203e-07, |
|
"loss": 1.1191, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8957133717210493, |
|
"grad_norm": 5.78598403542479, |
|
"learning_rate": 6.559462719690501e-07, |
|
"loss": 1.1134, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8989123480486244, |
|
"grad_norm": 5.3934506662058626, |
|
"learning_rate": 6.167339598265109e-07, |
|
"loss": 1.1181, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.9021113243761996, |
|
"grad_norm": 5.236856333816423, |
|
"learning_rate": 5.78692814140811e-07, |
|
"loss": 1.1359, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9053103007037748, |
|
"grad_norm": 5.224339666624607, |
|
"learning_rate": 5.418275829936537e-07, |
|
"loss": 1.079, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.90850927703135, |
|
"grad_norm": 5.997958861887249, |
|
"learning_rate": 5.06142867695708e-07, |
|
"loss": 1.1306, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.9117082533589251, |
|
"grad_norm": 7.257431339690822, |
|
"learning_rate": 4.716431222122997e-07, |
|
"loss": 1.202, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9149072296865003, |
|
"grad_norm": 5.409318506244401, |
|
"learning_rate": 4.3833265260749157e-07, |
|
"loss": 1.1247, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.9181062060140754, |
|
"grad_norm": 5.66599799044067, |
|
"learning_rate": 4.062156165066211e-07, |
|
"loss": 1.2072, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.9213051823416507, |
|
"grad_norm": 5.1071218474615785, |
|
"learning_rate": 3.752960225773772e-07, |
|
"loss": 1.1526, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9245041586692259, |
|
"grad_norm": 5.612505718218031, |
|
"learning_rate": 3.4557773002945607e-07, |
|
"loss": 1.1799, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.927703134996801, |
|
"grad_norm": 5.569758076359398, |
|
"learning_rate": 3.170644481328711e-07, |
|
"loss": 1.1338, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9309021113243762, |
|
"grad_norm": 5.150978620117567, |
|
"learning_rate": 2.8975973575499526e-07, |
|
"loss": 1.0794, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.9341010876519513, |
|
"grad_norm": 5.318105176276369, |
|
"learning_rate": 2.636670009163522e-07, |
|
"loss": 1.1089, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9373000639795266, |
|
"grad_norm": 5.272467269890987, |
|
"learning_rate": 2.3878950036524963e-07, |
|
"loss": 1.0252, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.9404990403071017, |
|
"grad_norm": 5.374330955578694, |
|
"learning_rate": 2.1513033917129334e-07, |
|
"loss": 1.1165, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9436980166346769, |
|
"grad_norm": 5.901734563944482, |
|
"learning_rate": 1.9269247033782744e-07, |
|
"loss": 1.0798, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.946896992962252, |
|
"grad_norm": 5.725259152435928, |
|
"learning_rate": 1.7147869443335463e-07, |
|
"loss": 1.0879, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9500959692898272, |
|
"grad_norm": 6.531843733416842, |
|
"learning_rate": 1.5149165924199016e-07, |
|
"loss": 1.1457, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.9532949456174025, |
|
"grad_norm": 5.482485131721133, |
|
"learning_rate": 1.3273385943297746e-07, |
|
"loss": 1.0818, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9564939219449776, |
|
"grad_norm": 5.710529768741672, |
|
"learning_rate": 1.1520763624931597e-07, |
|
"loss": 1.0723, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9596928982725528, |
|
"grad_norm": 6.2511499196111195, |
|
"learning_rate": 9.891517721554499e-08, |
|
"loss": 1.2099, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9628918746001279, |
|
"grad_norm": 6.188073654950074, |
|
"learning_rate": 8.385851586470318e-08, |
|
"loss": 1.1173, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9660908509277031, |
|
"grad_norm": 5.170348272609539, |
|
"learning_rate": 7.003953148452036e-08, |
|
"loss": 1.1465, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9692898272552783, |
|
"grad_norm": 5.49895966704058, |
|
"learning_rate": 5.745994888285311e-08, |
|
"loss": 1.0866, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9724888035828535, |
|
"grad_norm": 5.862183299505906, |
|
"learning_rate": 4.612133817239905e-08, |
|
"loss": 1.1478, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9756877799104287, |
|
"grad_norm": 5.522072674954621, |
|
"learning_rate": 3.602511457473479e-08, |
|
"loss": 1.0727, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9788867562380038, |
|
"grad_norm": 5.9700737220751146, |
|
"learning_rate": 2.7172538243666057e-08, |
|
"loss": 1.2276, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.982085732565579, |
|
"grad_norm": 5.812591223408153, |
|
"learning_rate": 1.9564714107945804e-08, |
|
"loss": 1.2139, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9852847088931542, |
|
"grad_norm": 5.401482512458415, |
|
"learning_rate": 1.3202591733365577e-08, |
|
"loss": 1.1062, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9884836852207294, |
|
"grad_norm": 5.685685800297117, |
|
"learning_rate": 8.086965204233688e-09, |
|
"loss": 1.1616, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9916826615483045, |
|
"grad_norm": 5.825452202788891, |
|
"learning_rate": 4.218473024261149e-09, |
|
"loss": 1.1341, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9948816378758797, |
|
"grad_norm": 5.673198190032165, |
|
"learning_rate": 1.5975980368709843e-09, |
|
"loss": 1.1724, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9980806142034548, |
|
"grad_norm": 5.554210422341384, |
|
"learning_rate": 2.2466736492643416e-10, |
|
"loss": 1.0668, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.142279028892517, |
|
"eval_runtime": 20.0059, |
|
"eval_samples_per_second": 24.993, |
|
"eval_steps_per_second": 0.8, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1563, |
|
"total_flos": 38150611599360.0, |
|
"train_loss": 1.3537356438578816, |
|
"train_runtime": 6160.8214, |
|
"train_samples_per_second": 8.116, |
|
"train_steps_per_second": 0.254 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1563, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 38150611599360.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|