|
{ |
|
"best_metric": 0.46183693408966064, |
|
"best_model_checkpoint": "Action_all_10_class/checkpoint-1600", |
|
"epoch": 10.0, |
|
"eval_steps": 100, |
|
"global_step": 2890, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9832468032836914, |
|
"learning_rate": 9.965397923875432e-05, |
|
"loss": 2.2618, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.9726794958114624, |
|
"learning_rate": 9.930795847750865e-05, |
|
"loss": 2.1513, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8990365266799927, |
|
"learning_rate": 9.896193771626297e-05, |
|
"loss": 2.0562, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.9971903562545776, |
|
"learning_rate": 9.86159169550173e-05, |
|
"loss": 1.9375, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.781921148300171, |
|
"learning_rate": 9.826989619377162e-05, |
|
"loss": 1.7386, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.6308515071868896, |
|
"learning_rate": 9.792387543252595e-05, |
|
"loss": 1.5669, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.349245309829712, |
|
"learning_rate": 9.757785467128027e-05, |
|
"loss": 1.4557, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.6303634643554688, |
|
"learning_rate": 9.72318339100346e-05, |
|
"loss": 1.3707, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.0813469886779785, |
|
"learning_rate": 9.688581314878892e-05, |
|
"loss": 1.3667, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.506680488586426, |
|
"learning_rate": 9.653979238754325e-05, |
|
"loss": 1.1996, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"eval_accuracy": 0.7730061349693251, |
|
"eval_loss": 1.0634636878967285, |
|
"eval_runtime": 16.9399, |
|
"eval_samples_per_second": 48.111, |
|
"eval_steps_per_second": 6.021, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.7484147548675537, |
|
"learning_rate": 9.619377162629759e-05, |
|
"loss": 1.1942, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.1658833026885986, |
|
"learning_rate": 9.584775086505192e-05, |
|
"loss": 1.2623, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.366626024246216, |
|
"learning_rate": 9.550173010380624e-05, |
|
"loss": 1.0979, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.570044994354248, |
|
"learning_rate": 9.515570934256057e-05, |
|
"loss": 1.0803, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.996645927429199, |
|
"learning_rate": 9.480968858131488e-05, |
|
"loss": 1.069, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.534286975860596, |
|
"learning_rate": 9.446366782006922e-05, |
|
"loss": 1.0673, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.0312581062316895, |
|
"learning_rate": 9.411764705882353e-05, |
|
"loss": 1.0314, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.871572494506836, |
|
"learning_rate": 9.380622837370243e-05, |
|
"loss": 1.056, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.05521821975708, |
|
"learning_rate": 9.346020761245676e-05, |
|
"loss": 0.9677, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.704447269439697, |
|
"learning_rate": 9.311418685121109e-05, |
|
"loss": 1.0335, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"eval_accuracy": 0.7717791411042945, |
|
"eval_loss": 0.8391550779342651, |
|
"eval_runtime": 11.6297, |
|
"eval_samples_per_second": 70.079, |
|
"eval_steps_per_second": 8.771, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 6.939559459686279, |
|
"learning_rate": 9.27681660899654e-05, |
|
"loss": 0.9873, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.459825038909912, |
|
"learning_rate": 9.242214532871972e-05, |
|
"loss": 0.9642, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.480250835418701, |
|
"learning_rate": 9.207612456747406e-05, |
|
"loss": 0.9012, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.0692737102508545, |
|
"learning_rate": 9.173010380622837e-05, |
|
"loss": 0.9618, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.740853786468506, |
|
"learning_rate": 9.13840830449827e-05, |
|
"loss": 0.8414, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.637920379638672, |
|
"learning_rate": 9.103806228373702e-05, |
|
"loss": 0.908, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.488283634185791, |
|
"learning_rate": 9.069204152249136e-05, |
|
"loss": 0.9001, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.264199256896973, |
|
"learning_rate": 9.034602076124567e-05, |
|
"loss": 0.8283, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.3874425888061523, |
|
"learning_rate": 9e-05, |
|
"loss": 0.7862, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 3.339252233505249, |
|
"learning_rate": 8.965397923875432e-05, |
|
"loss": 0.6279, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"eval_accuracy": 0.8294478527607362, |
|
"eval_loss": 0.6463050842285156, |
|
"eval_runtime": 12.9239, |
|
"eval_samples_per_second": 63.061, |
|
"eval_steps_per_second": 7.892, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 5.215779781341553, |
|
"learning_rate": 8.930795847750866e-05, |
|
"loss": 0.7314, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 5.00214958190918, |
|
"learning_rate": 8.896193771626297e-05, |
|
"loss": 0.6857, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.8398542404174805, |
|
"learning_rate": 8.86159169550173e-05, |
|
"loss": 0.767, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 4.89760160446167, |
|
"learning_rate": 8.826989619377162e-05, |
|
"loss": 0.6085, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 3.459028720855713, |
|
"learning_rate": 8.792387543252596e-05, |
|
"loss": 0.6735, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 5.60736608505249, |
|
"learning_rate": 8.757785467128027e-05, |
|
"loss": 0.8843, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 4.042627811431885, |
|
"learning_rate": 8.72318339100346e-05, |
|
"loss": 0.7819, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 3.9558098316192627, |
|
"learning_rate": 8.688581314878894e-05, |
|
"loss": 0.8151, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 2.589348554611206, |
|
"learning_rate": 8.653979238754327e-05, |
|
"loss": 0.7008, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 5.536679744720459, |
|
"learning_rate": 8.619377162629759e-05, |
|
"loss": 0.8633, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"eval_accuracy": 0.7926380368098159, |
|
"eval_loss": 0.7172089219093323, |
|
"eval_runtime": 11.6484, |
|
"eval_samples_per_second": 69.967, |
|
"eval_steps_per_second": 8.757, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 3.1925909519195557, |
|
"learning_rate": 8.584775086505192e-05, |
|
"loss": 0.7146, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.3583850860595703, |
|
"learning_rate": 8.550173010380624e-05, |
|
"loss": 0.6679, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 2.476952075958252, |
|
"learning_rate": 8.515570934256055e-05, |
|
"loss": 0.7093, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 4.362217426300049, |
|
"learning_rate": 8.480968858131489e-05, |
|
"loss": 0.5813, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 6.683523178100586, |
|
"learning_rate": 8.44636678200692e-05, |
|
"loss": 0.6022, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 5.650263786315918, |
|
"learning_rate": 8.411764705882354e-05, |
|
"loss": 0.6538, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 2.9296507835388184, |
|
"learning_rate": 8.377162629757785e-05, |
|
"loss": 0.7312, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 4.2093048095703125, |
|
"learning_rate": 8.342560553633219e-05, |
|
"loss": 0.5509, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 4.481240749359131, |
|
"learning_rate": 8.30795847750865e-05, |
|
"loss": 0.7108, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 6.15458869934082, |
|
"learning_rate": 8.273356401384084e-05, |
|
"loss": 0.5851, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"eval_accuracy": 0.838036809815951, |
|
"eval_loss": 0.5857788324356079, |
|
"eval_runtime": 11.1755, |
|
"eval_samples_per_second": 72.927, |
|
"eval_steps_per_second": 9.127, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 5.6646409034729, |
|
"learning_rate": 8.238754325259515e-05, |
|
"loss": 0.7466, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 3.708621025085449, |
|
"learning_rate": 8.204152249134948e-05, |
|
"loss": 0.6417, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 4.252438545227051, |
|
"learning_rate": 8.16955017301038e-05, |
|
"loss": 0.5843, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 6.532431602478027, |
|
"learning_rate": 8.134948096885813e-05, |
|
"loss": 0.7773, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 2.0311672687530518, |
|
"learning_rate": 8.100346020761245e-05, |
|
"loss": 0.6273, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 4.033968448638916, |
|
"learning_rate": 8.065743944636678e-05, |
|
"loss": 0.6618, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 3.268759250640869, |
|
"learning_rate": 8.03114186851211e-05, |
|
"loss": 0.5404, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 5.866196632385254, |
|
"learning_rate": 7.996539792387543e-05, |
|
"loss": 0.6579, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.0091450214385986, |
|
"learning_rate": 7.961937716262977e-05, |
|
"loss": 0.4575, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 4.265220642089844, |
|
"learning_rate": 7.92733564013841e-05, |
|
"loss": 0.5305, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"eval_accuracy": 0.8355828220858895, |
|
"eval_loss": 0.5779993534088135, |
|
"eval_runtime": 12.1337, |
|
"eval_samples_per_second": 67.168, |
|
"eval_steps_per_second": 8.406, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 3.7668848037719727, |
|
"learning_rate": 7.892733564013842e-05, |
|
"loss": 0.5928, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 3.6394691467285156, |
|
"learning_rate": 7.858131487889275e-05, |
|
"loss": 0.533, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 4.226925373077393, |
|
"learning_rate": 7.823529411764707e-05, |
|
"loss": 0.4776, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 6.159184455871582, |
|
"learning_rate": 7.78892733564014e-05, |
|
"loss": 0.5517, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 7.021705150604248, |
|
"learning_rate": 7.754325259515572e-05, |
|
"loss": 0.4969, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 5.880521297454834, |
|
"learning_rate": 7.719723183391003e-05, |
|
"loss": 0.5172, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 4.049355506896973, |
|
"learning_rate": 7.685121107266437e-05, |
|
"loss": 0.5188, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 6.603725433349609, |
|
"learning_rate": 7.650519031141868e-05, |
|
"loss": 0.5329, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 6.908223628997803, |
|
"learning_rate": 7.615916955017301e-05, |
|
"loss": 0.5938, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 5.728999614715576, |
|
"learning_rate": 7.581314878892733e-05, |
|
"loss": 0.5511, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"eval_accuracy": 0.8392638036809816, |
|
"eval_loss": 0.5313203930854797, |
|
"eval_runtime": 11.9712, |
|
"eval_samples_per_second": 68.08, |
|
"eval_steps_per_second": 8.52, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 3.414422035217285, |
|
"learning_rate": 7.546712802768166e-05, |
|
"loss": 0.4702, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 7.404133319854736, |
|
"learning_rate": 7.512110726643598e-05, |
|
"loss": 0.5877, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 4.851989269256592, |
|
"learning_rate": 7.477508650519031e-05, |
|
"loss": 0.5855, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 6.651949882507324, |
|
"learning_rate": 7.442906574394463e-05, |
|
"loss": 0.5618, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 9.513969421386719, |
|
"learning_rate": 7.408304498269896e-05, |
|
"loss": 0.5871, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 11.049631118774414, |
|
"learning_rate": 7.373702422145328e-05, |
|
"loss": 0.4098, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 4.604189872741699, |
|
"learning_rate": 7.339100346020761e-05, |
|
"loss": 0.5752, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 5.650027275085449, |
|
"learning_rate": 7.304498269896195e-05, |
|
"loss": 0.6265, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 7.081886291503906, |
|
"learning_rate": 7.269896193771626e-05, |
|
"loss": 0.4814, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 9.59929084777832, |
|
"learning_rate": 7.23529411764706e-05, |
|
"loss": 0.4657, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"eval_accuracy": 0.8368098159509203, |
|
"eval_loss": 0.5443019866943359, |
|
"eval_runtime": 14.7103, |
|
"eval_samples_per_second": 55.403, |
|
"eval_steps_per_second": 6.934, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 3.963488817214966, |
|
"learning_rate": 7.200692041522493e-05, |
|
"loss": 0.4586, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 5.124209403991699, |
|
"learning_rate": 7.166089965397925e-05, |
|
"loss": 0.4099, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.2323999404907227, |
|
"learning_rate": 7.131487889273358e-05, |
|
"loss": 0.5683, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 6.542498588562012, |
|
"learning_rate": 7.09688581314879e-05, |
|
"loss": 0.4908, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 5.042673110961914, |
|
"learning_rate": 7.062283737024223e-05, |
|
"loss": 0.5441, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 7.417181491851807, |
|
"learning_rate": 7.027681660899654e-05, |
|
"loss": 0.4688, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 3.395101547241211, |
|
"learning_rate": 6.993079584775086e-05, |
|
"loss": 0.6159, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 4.752573013305664, |
|
"learning_rate": 6.95847750865052e-05, |
|
"loss": 0.4304, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 6.356733322143555, |
|
"learning_rate": 6.923875432525951e-05, |
|
"loss": 0.4116, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 3.9821412563323975, |
|
"learning_rate": 6.889273356401384e-05, |
|
"loss": 0.3615, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"eval_accuracy": 0.8429447852760736, |
|
"eval_loss": 0.5038174986839294, |
|
"eval_runtime": 11.7579, |
|
"eval_samples_per_second": 69.315, |
|
"eval_steps_per_second": 8.675, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 4.009409427642822, |
|
"learning_rate": 6.854671280276816e-05, |
|
"loss": 0.3844, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 6.364121437072754, |
|
"learning_rate": 6.82006920415225e-05, |
|
"loss": 0.3863, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 5.382229804992676, |
|
"learning_rate": 6.785467128027681e-05, |
|
"loss": 0.4159, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 3.8384909629821777, |
|
"learning_rate": 6.750865051903114e-05, |
|
"loss": 0.4783, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 0.2637701630592346, |
|
"learning_rate": 6.716262975778546e-05, |
|
"loss": 0.4072, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 3.8876795768737793, |
|
"learning_rate": 6.68166089965398e-05, |
|
"loss": 0.3698, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 2.9388973712921143, |
|
"learning_rate": 6.647058823529411e-05, |
|
"loss": 0.4197, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 1.423226237297058, |
|
"learning_rate": 6.612456747404844e-05, |
|
"loss": 0.344, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 5.080854892730713, |
|
"learning_rate": 6.577854671280278e-05, |
|
"loss": 0.3596, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 4.027952671051025, |
|
"learning_rate": 6.54325259515571e-05, |
|
"loss": 0.5301, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"eval_accuracy": 0.8503067484662576, |
|
"eval_loss": 0.5100666284561157, |
|
"eval_runtime": 11.1502, |
|
"eval_samples_per_second": 73.093, |
|
"eval_steps_per_second": 9.148, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 7.976325988769531, |
|
"learning_rate": 6.508650519031142e-05, |
|
"loss": 0.3365, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 5.674125671386719, |
|
"learning_rate": 6.474048442906576e-05, |
|
"loss": 0.3745, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 2.311941623687744, |
|
"learning_rate": 6.439446366782007e-05, |
|
"loss": 0.4887, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 2.7413113117218018, |
|
"learning_rate": 6.40484429065744e-05, |
|
"loss": 0.4548, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 4.1170830726623535, |
|
"learning_rate": 6.370242214532872e-05, |
|
"loss": 0.5191, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 3.560255765914917, |
|
"learning_rate": 6.335640138408306e-05, |
|
"loss": 0.496, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 7.074652671813965, |
|
"learning_rate": 6.301038062283737e-05, |
|
"loss": 0.5216, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 2.202094793319702, |
|
"learning_rate": 6.26643598615917e-05, |
|
"loss": 0.3693, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 5.584536075592041, |
|
"learning_rate": 6.231833910034602e-05, |
|
"loss": 0.3557, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 4.547154903411865, |
|
"learning_rate": 6.197231833910034e-05, |
|
"loss": 0.4108, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"eval_accuracy": 0.8478527607361963, |
|
"eval_loss": 0.5211547613143921, |
|
"eval_runtime": 11.576, |
|
"eval_samples_per_second": 70.404, |
|
"eval_steps_per_second": 8.811, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 2.4568958282470703, |
|
"learning_rate": 6.162629757785467e-05, |
|
"loss": 0.4367, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 3.632456064224243, |
|
"learning_rate": 6.128027681660899e-05, |
|
"loss": 0.4811, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 5.1356329917907715, |
|
"learning_rate": 6.0934256055363324e-05, |
|
"loss": 0.3121, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 2.870601177215576, |
|
"learning_rate": 6.058823529411765e-05, |
|
"loss": 0.42, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 6.305639743804932, |
|
"learning_rate": 6.0242214532871973e-05, |
|
"loss": 0.4733, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 5.95077657699585, |
|
"learning_rate": 5.98961937716263e-05, |
|
"loss": 0.3604, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 4.88993501663208, |
|
"learning_rate": 5.955017301038063e-05, |
|
"loss": 0.3475, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.9184603691101074, |
|
"learning_rate": 5.920415224913495e-05, |
|
"loss": 0.395, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 3.779115915298462, |
|
"learning_rate": 5.885813148788928e-05, |
|
"loss": 0.4199, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 4.5922322273254395, |
|
"learning_rate": 5.854671280276816e-05, |
|
"loss": 0.4223, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"eval_accuracy": 0.8429447852760736, |
|
"eval_loss": 0.5328340530395508, |
|
"eval_runtime": 11.5883, |
|
"eval_samples_per_second": 70.33, |
|
"eval_steps_per_second": 8.802, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 5.457287311553955, |
|
"learning_rate": 5.8200692041522495e-05, |
|
"loss": 0.2775, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 5.398082733154297, |
|
"learning_rate": 5.785467128027682e-05, |
|
"loss": 0.3019, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 5.261845111846924, |
|
"learning_rate": 5.750865051903115e-05, |
|
"loss": 0.4365, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 5.616456508636475, |
|
"learning_rate": 5.716262975778547e-05, |
|
"loss": 0.412, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 3.715630292892456, |
|
"learning_rate": 5.68166089965398e-05, |
|
"loss": 0.3644, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 5.539583683013916, |
|
"learning_rate": 5.647058823529412e-05, |
|
"loss": 0.2519, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 7.531044960021973, |
|
"learning_rate": 5.612456747404845e-05, |
|
"loss": 0.4028, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 4.245101451873779, |
|
"learning_rate": 5.577854671280277e-05, |
|
"loss": 0.483, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 4.674008846282959, |
|
"learning_rate": 5.54325259515571e-05, |
|
"loss": 0.4101, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 5.6747331619262695, |
|
"learning_rate": 5.508650519031142e-05, |
|
"loss": 0.3877, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"eval_accuracy": 0.8294478527607362, |
|
"eval_loss": 0.5815134048461914, |
|
"eval_runtime": 11.4501, |
|
"eval_samples_per_second": 71.178, |
|
"eval_steps_per_second": 8.908, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 4.670008659362793, |
|
"learning_rate": 5.4740484429065744e-05, |
|
"loss": 0.4169, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 6.947169780731201, |
|
"learning_rate": 5.4394463667820076e-05, |
|
"loss": 0.3955, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 0.6032439470291138, |
|
"learning_rate": 5.4048442906574394e-05, |
|
"loss": 0.3438, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 2.0391006469726562, |
|
"learning_rate": 5.3702422145328725e-05, |
|
"loss": 0.3032, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 1.2689828872680664, |
|
"learning_rate": 5.3356401384083043e-05, |
|
"loss": 0.3091, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 9.588872909545898, |
|
"learning_rate": 5.3010380622837375e-05, |
|
"loss": 0.3301, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 9.207768440246582, |
|
"learning_rate": 5.266435986159169e-05, |
|
"loss": 0.4184, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 3.842557191848755, |
|
"learning_rate": 5.2318339100346025e-05, |
|
"loss": 0.3254, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 5.358198642730713, |
|
"learning_rate": 5.197231833910034e-05, |
|
"loss": 0.2876, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 4.02155065536499, |
|
"learning_rate": 5.1626297577854675e-05, |
|
"loss": 0.3879, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"eval_accuracy": 0.8503067484662576, |
|
"eval_loss": 0.5150614380836487, |
|
"eval_runtime": 11.4927, |
|
"eval_samples_per_second": 70.914, |
|
"eval_steps_per_second": 8.875, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 4.721061706542969, |
|
"learning_rate": 5.1280276816609e-05, |
|
"loss": 0.4051, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 6.063509941101074, |
|
"learning_rate": 5.0934256055363325e-05, |
|
"loss": 0.436, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 4.278838634490967, |
|
"learning_rate": 5.058823529411765e-05, |
|
"loss": 0.3533, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 5.166705131530762, |
|
"learning_rate": 5.024221453287198e-05, |
|
"loss": 0.3281, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 4.790309906005859, |
|
"learning_rate": 4.98961937716263e-05, |
|
"loss": 0.3324, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 6.665502548217773, |
|
"learning_rate": 4.9550173010380624e-05, |
|
"loss": 0.4309, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"grad_norm": 2.508721113204956, |
|
"learning_rate": 4.920415224913495e-05, |
|
"loss": 0.4029, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 3.7062671184539795, |
|
"learning_rate": 4.8858131487889274e-05, |
|
"loss": 0.3016, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 1.6657217741012573, |
|
"learning_rate": 4.85121107266436e-05, |
|
"loss": 0.2906, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 5.122851848602295, |
|
"learning_rate": 4.8166089965397924e-05, |
|
"loss": 0.2797, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"eval_accuracy": 0.8564417177914111, |
|
"eval_loss": 0.5160326361656189, |
|
"eval_runtime": 11.658, |
|
"eval_samples_per_second": 69.909, |
|
"eval_steps_per_second": 8.749, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 1.0947901010513306, |
|
"learning_rate": 4.782006920415225e-05, |
|
"loss": 0.2272, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 6.223177909851074, |
|
"learning_rate": 4.747404844290658e-05, |
|
"loss": 0.4336, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 4.786419868469238, |
|
"learning_rate": 4.7128027681660905e-05, |
|
"loss": 0.3644, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 2.7506916522979736, |
|
"learning_rate": 4.678200692041523e-05, |
|
"loss": 0.3686, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 5.9919352531433105, |
|
"learning_rate": 4.6435986159169555e-05, |
|
"loss": 0.2422, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"grad_norm": 2.702258825302124, |
|
"learning_rate": 4.608996539792388e-05, |
|
"loss": 0.4158, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 0.17089949548244476, |
|
"learning_rate": 4.5743944636678205e-05, |
|
"loss": 0.2326, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 1.4725955724716187, |
|
"learning_rate": 4.539792387543253e-05, |
|
"loss": 0.2891, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 3.754875659942627, |
|
"learning_rate": 4.5051903114186854e-05, |
|
"loss": 0.3169, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"grad_norm": 1.3562734127044678, |
|
"learning_rate": 4.470588235294118e-05, |
|
"loss": 0.2628, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"eval_accuracy": 0.8699386503067484, |
|
"eval_loss": 0.46183693408966064, |
|
"eval_runtime": 12.3863, |
|
"eval_samples_per_second": 65.799, |
|
"eval_steps_per_second": 8.235, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 1.3551547527313232, |
|
"learning_rate": 4.4359861591695504e-05, |
|
"loss": 0.2924, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"grad_norm": 2.6148874759674072, |
|
"learning_rate": 4.401384083044983e-05, |
|
"loss": 0.2792, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 3.0203983783721924, |
|
"learning_rate": 4.3667820069204154e-05, |
|
"loss": 0.2955, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 10.279276847839355, |
|
"learning_rate": 4.332179930795848e-05, |
|
"loss": 0.4448, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 0.37972888350486755, |
|
"learning_rate": 4.2975778546712804e-05, |
|
"loss": 0.2687, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 5.29146671295166, |
|
"learning_rate": 4.262975778546713e-05, |
|
"loss": 0.2522, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"grad_norm": 1.9747039079666138, |
|
"learning_rate": 4.2283737024221454e-05, |
|
"loss": 0.2579, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 1.1432113647460938, |
|
"learning_rate": 4.193771626297578e-05, |
|
"loss": 0.2522, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 0.5862625241279602, |
|
"learning_rate": 4.15916955017301e-05, |
|
"loss": 0.265, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 5.743435859680176, |
|
"learning_rate": 4.124567474048443e-05, |
|
"loss": 0.3404, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"eval_accuracy": 0.8674846625766871, |
|
"eval_loss": 0.4903254508972168, |
|
"eval_runtime": 11.4951, |
|
"eval_samples_per_second": 70.9, |
|
"eval_steps_per_second": 8.873, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 2.558229446411133, |
|
"learning_rate": 4.089965397923875e-05, |
|
"loss": 0.3082, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 1.225205659866333, |
|
"learning_rate": 4.055363321799308e-05, |
|
"loss": 0.2875, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"grad_norm": 0.44406113028526306, |
|
"learning_rate": 4.020761245674741e-05, |
|
"loss": 0.2987, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"grad_norm": 6.770223617553711, |
|
"learning_rate": 3.9861591695501735e-05, |
|
"loss": 0.381, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"grad_norm": 5.378862380981445, |
|
"learning_rate": 3.951557093425606e-05, |
|
"loss": 0.3109, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"grad_norm": 5.086582183837891, |
|
"learning_rate": 3.9169550173010384e-05, |
|
"loss": 0.3031, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"grad_norm": 3.3848397731781006, |
|
"learning_rate": 3.882352941176471e-05, |
|
"loss": 0.3092, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 1.4123233556747437, |
|
"learning_rate": 3.8477508650519034e-05, |
|
"loss": 0.352, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 1.3674020767211914, |
|
"learning_rate": 3.813148788927336e-05, |
|
"loss": 0.1128, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"grad_norm": 3.944838047027588, |
|
"learning_rate": 3.7785467128027684e-05, |
|
"loss": 0.3033, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"eval_accuracy": 0.8662576687116564, |
|
"eval_loss": 0.4860667288303375, |
|
"eval_runtime": 11.4194, |
|
"eval_samples_per_second": 71.37, |
|
"eval_steps_per_second": 8.932, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"grad_norm": 4.455410957336426, |
|
"learning_rate": 3.743944636678201e-05, |
|
"loss": 0.2315, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"grad_norm": 3.5710418224334717, |
|
"learning_rate": 3.7093425605536334e-05, |
|
"loss": 0.2175, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"grad_norm": 6.50364875793457, |
|
"learning_rate": 3.674740484429066e-05, |
|
"loss": 0.2477, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"grad_norm": 2.710531711578369, |
|
"learning_rate": 3.640138408304499e-05, |
|
"loss": 0.221, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 2.1464736461639404, |
|
"learning_rate": 3.605536332179931e-05, |
|
"loss": 0.2387, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"grad_norm": 7.897326469421387, |
|
"learning_rate": 3.570934256055363e-05, |
|
"loss": 0.2723, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"grad_norm": 2.9161367416381836, |
|
"learning_rate": 3.536332179930796e-05, |
|
"loss": 0.2621, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"grad_norm": 7.048449516296387, |
|
"learning_rate": 3.501730103806228e-05, |
|
"loss": 0.2955, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"grad_norm": 5.2737250328063965, |
|
"learning_rate": 3.467128027681661e-05, |
|
"loss": 0.3076, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"grad_norm": 4.985352039337158, |
|
"learning_rate": 3.432525951557093e-05, |
|
"loss": 0.214, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"eval_accuracy": 0.8687116564417178, |
|
"eval_loss": 0.4853067100048065, |
|
"eval_runtime": 11.47, |
|
"eval_samples_per_second": 71.055, |
|
"eval_steps_per_second": 8.893, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"grad_norm": 6.846111297607422, |
|
"learning_rate": 3.397923875432526e-05, |
|
"loss": 0.2693, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 2.8362717628479004, |
|
"learning_rate": 3.363321799307958e-05, |
|
"loss": 0.2582, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"grad_norm": 1.0130655765533447, |
|
"learning_rate": 3.3287197231833914e-05, |
|
"loss": 0.278, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"grad_norm": 1.3184999227523804, |
|
"learning_rate": 3.294117647058824e-05, |
|
"loss": 0.3776, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"grad_norm": 3.637080430984497, |
|
"learning_rate": 3.2595155709342564e-05, |
|
"loss": 0.1598, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"grad_norm": 4.093498706817627, |
|
"learning_rate": 3.224913494809689e-05, |
|
"loss": 0.2604, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"grad_norm": 6.110915184020996, |
|
"learning_rate": 3.1903114186851214e-05, |
|
"loss": 0.3471, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"grad_norm": 6.045133590698242, |
|
"learning_rate": 3.155709342560554e-05, |
|
"loss": 0.1886, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"grad_norm": 0.4137258231639862, |
|
"learning_rate": 3.1211072664359864e-05, |
|
"loss": 0.183, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"grad_norm": 3.405696392059326, |
|
"learning_rate": 3.086505190311419e-05, |
|
"loss": 0.2763, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"eval_accuracy": 0.8736196319018404, |
|
"eval_loss": 0.4705219566822052, |
|
"eval_runtime": 11.4342, |
|
"eval_samples_per_second": 71.277, |
|
"eval_steps_per_second": 8.921, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 6.817298889160156, |
|
"learning_rate": 3.0519031141868513e-05, |
|
"loss": 0.2825, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"grad_norm": 5.460371494293213, |
|
"learning_rate": 3.0173010380622842e-05, |
|
"loss": 0.2853, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"grad_norm": 6.517573833465576, |
|
"learning_rate": 2.9826989619377167e-05, |
|
"loss": 0.2913, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"grad_norm": 2.988943099975586, |
|
"learning_rate": 2.948096885813149e-05, |
|
"loss": 0.3647, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"grad_norm": 3.221747636795044, |
|
"learning_rate": 2.9134948096885816e-05, |
|
"loss": 0.2344, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"grad_norm": 3.342573404312134, |
|
"learning_rate": 2.878892733564014e-05, |
|
"loss": 0.2741, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"grad_norm": 4.326902866363525, |
|
"learning_rate": 2.8442906574394463e-05, |
|
"loss": 0.2503, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 7.965173244476318, |
|
"learning_rate": 2.8096885813148788e-05, |
|
"loss": 0.2061, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"grad_norm": 0.314100056886673, |
|
"learning_rate": 2.7750865051903113e-05, |
|
"loss": 0.2205, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"grad_norm": 4.415227890014648, |
|
"learning_rate": 2.7404844290657437e-05, |
|
"loss": 0.3009, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"eval_accuracy": 0.8625766871165644, |
|
"eval_loss": 0.47227197885513306, |
|
"eval_runtime": 11.4055, |
|
"eval_samples_per_second": 71.457, |
|
"eval_steps_per_second": 8.943, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"grad_norm": 4.705826759338379, |
|
"learning_rate": 2.7058823529411766e-05, |
|
"loss": 0.2255, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"grad_norm": 1.2122201919555664, |
|
"learning_rate": 2.671280276816609e-05, |
|
"loss": 0.2103, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"grad_norm": 11.807892799377441, |
|
"learning_rate": 2.6366782006920416e-05, |
|
"loss": 0.2001, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"grad_norm": 5.092467784881592, |
|
"learning_rate": 2.602076124567474e-05, |
|
"loss": 0.2208, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"grad_norm": 2.7508227825164795, |
|
"learning_rate": 2.5674740484429065e-05, |
|
"loss": 0.2397, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"grad_norm": 5.095257759094238, |
|
"learning_rate": 2.532871972318339e-05, |
|
"loss": 0.1861, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"grad_norm": 2.8208508491516113, |
|
"learning_rate": 2.498269896193772e-05, |
|
"loss": 0.1776, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"grad_norm": 16.025461196899414, |
|
"learning_rate": 2.4636678200692043e-05, |
|
"loss": 0.2899, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"grad_norm": 8.661275863647461, |
|
"learning_rate": 2.4290657439446368e-05, |
|
"loss": 0.3018, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"grad_norm": 4.188193321228027, |
|
"learning_rate": 2.3944636678200693e-05, |
|
"loss": 0.1543, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"eval_accuracy": 0.8638036809815951, |
|
"eval_loss": 0.4983120858669281, |
|
"eval_runtime": 11.4805, |
|
"eval_samples_per_second": 70.99, |
|
"eval_steps_per_second": 8.885, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"grad_norm": 2.8907430171966553, |
|
"learning_rate": 2.3598615916955018e-05, |
|
"loss": 0.2289, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"grad_norm": 6.132763385772705, |
|
"learning_rate": 2.3252595155709346e-05, |
|
"loss": 0.1687, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"grad_norm": 8.132895469665527, |
|
"learning_rate": 2.290657439446367e-05, |
|
"loss": 0.1936, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"grad_norm": 2.7748658657073975, |
|
"learning_rate": 2.2560553633217993e-05, |
|
"loss": 0.2018, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"grad_norm": 3.723444938659668, |
|
"learning_rate": 2.2214532871972318e-05, |
|
"loss": 0.331, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"grad_norm": 0.9698171615600586, |
|
"learning_rate": 2.1868512110726642e-05, |
|
"loss": 0.2071, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"grad_norm": 5.589043140411377, |
|
"learning_rate": 2.152249134948097e-05, |
|
"loss": 0.1308, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"grad_norm": 1.0597747564315796, |
|
"learning_rate": 2.1176470588235296e-05, |
|
"loss": 0.2911, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 0.17304980754852295, |
|
"learning_rate": 2.083044982698962e-05, |
|
"loss": 0.3127, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 2.3576455116271973, |
|
"learning_rate": 2.0484429065743945e-05, |
|
"loss": 0.2407, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"eval_accuracy": 0.8650306748466258, |
|
"eval_loss": 0.47417759895324707, |
|
"eval_runtime": 11.7355, |
|
"eval_samples_per_second": 69.447, |
|
"eval_steps_per_second": 8.692, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"grad_norm": 6.587134838104248, |
|
"learning_rate": 2.013840830449827e-05, |
|
"loss": 0.1954, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"grad_norm": 5.971607685089111, |
|
"learning_rate": 1.9792387543252595e-05, |
|
"loss": 0.139, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"grad_norm": 2.352544069290161, |
|
"learning_rate": 1.9446366782006923e-05, |
|
"loss": 0.1648, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"grad_norm": 1.0423003435134888, |
|
"learning_rate": 1.910034602076125e-05, |
|
"loss": 0.3657, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"grad_norm": 3.612190008163452, |
|
"learning_rate": 1.875432525951557e-05, |
|
"loss": 0.1846, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"grad_norm": 3.1284472942352295, |
|
"learning_rate": 1.8408304498269895e-05, |
|
"loss": 0.2876, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"grad_norm": 6.02339506149292, |
|
"learning_rate": 1.8062283737024223e-05, |
|
"loss": 0.3388, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"grad_norm": 7.388739109039307, |
|
"learning_rate": 1.7716262975778548e-05, |
|
"loss": 0.3095, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"grad_norm": 6.4854912757873535, |
|
"learning_rate": 1.7370242214532873e-05, |
|
"loss": 0.1786, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"grad_norm": 3.4759864807128906, |
|
"learning_rate": 1.7024221453287198e-05, |
|
"loss": 0.2679, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"eval_accuracy": 0.8723926380368098, |
|
"eval_loss": 0.4934893846511841, |
|
"eval_runtime": 11.4149, |
|
"eval_samples_per_second": 71.398, |
|
"eval_steps_per_second": 8.936, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"grad_norm": 0.9941917657852173, |
|
"learning_rate": 1.6678200692041523e-05, |
|
"loss": 0.1666, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"grad_norm": 2.615389108657837, |
|
"learning_rate": 1.6332179930795848e-05, |
|
"loss": 0.2039, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 0.12697245180606842, |
|
"learning_rate": 1.5986159169550176e-05, |
|
"loss": 0.2026, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 2.1617543697357178, |
|
"learning_rate": 1.56401384083045e-05, |
|
"loss": 0.2141, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 6.248876094818115, |
|
"learning_rate": 1.5294117647058826e-05, |
|
"loss": 0.1039, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"grad_norm": 3.2735066413879395, |
|
"learning_rate": 1.4948096885813149e-05, |
|
"loss": 0.269, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"grad_norm": 0.2865382730960846, |
|
"learning_rate": 1.4602076124567474e-05, |
|
"loss": 0.1724, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"grad_norm": 0.17031227052211761, |
|
"learning_rate": 1.4256055363321799e-05, |
|
"loss": 0.1477, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"grad_norm": 5.686491966247559, |
|
"learning_rate": 1.3910034602076125e-05, |
|
"loss": 0.2774, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"grad_norm": 0.2951521873474121, |
|
"learning_rate": 1.356401384083045e-05, |
|
"loss": 0.1508, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"eval_accuracy": 0.8674846625766871, |
|
"eval_loss": 0.4825909435749054, |
|
"eval_runtime": 11.5579, |
|
"eval_samples_per_second": 70.515, |
|
"eval_steps_per_second": 8.825, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"grad_norm": 4.706648826599121, |
|
"learning_rate": 1.3217993079584775e-05, |
|
"loss": 0.1022, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"grad_norm": 7.546770095825195, |
|
"learning_rate": 1.2871972318339102e-05, |
|
"loss": 0.1778, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"grad_norm": 8.35574722290039, |
|
"learning_rate": 1.2525951557093426e-05, |
|
"loss": 0.242, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"grad_norm": 2.115328788757324, |
|
"learning_rate": 1.2179930795847751e-05, |
|
"loss": 0.1634, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"grad_norm": 7.35970401763916, |
|
"learning_rate": 1.1833910034602076e-05, |
|
"loss": 0.1618, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"grad_norm": 0.3500753939151764, |
|
"learning_rate": 1.1487889273356401e-05, |
|
"loss": 0.2685, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"grad_norm": 4.974752426147461, |
|
"learning_rate": 1.1141868512110728e-05, |
|
"loss": 0.1975, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"grad_norm": 2.2218804359436035, |
|
"learning_rate": 1.0795847750865053e-05, |
|
"loss": 0.1887, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"grad_norm": 2.4438514709472656, |
|
"learning_rate": 1.0449826989619377e-05, |
|
"loss": 0.2348, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 7.3225998878479, |
|
"learning_rate": 1.0103806228373702e-05, |
|
"loss": 0.2129, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.8711656441717791, |
|
"eval_loss": 0.49807000160217285, |
|
"eval_runtime": 11.7207, |
|
"eval_samples_per_second": 69.535, |
|
"eval_steps_per_second": 8.703, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"grad_norm": 3.7790167331695557, |
|
"learning_rate": 9.757785467128027e-06, |
|
"loss": 0.1668, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 0.14809288084506989, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 0.1686, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"grad_norm": 0.21380960941314697, |
|
"learning_rate": 9.065743944636679e-06, |
|
"loss": 0.2266, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"grad_norm": 1.157197117805481, |
|
"learning_rate": 8.719723183391004e-06, |
|
"loss": 0.1608, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"grad_norm": 1.3275336027145386, |
|
"learning_rate": 8.37370242214533e-06, |
|
"loss": 0.1909, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"grad_norm": 0.963979959487915, |
|
"learning_rate": 8.027681660899653e-06, |
|
"loss": 0.1803, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.716262975778547e-06, |
|
"loss": 0.1407, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"grad_norm": 2.7852437496185303, |
|
"learning_rate": 7.370242214532873e-06, |
|
"loss": 0.2366, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"grad_norm": 0.2830531895160675, |
|
"learning_rate": 7.024221453287197e-06, |
|
"loss": 0.2925, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"grad_norm": 2.379260301589966, |
|
"learning_rate": 6.678200692041523e-06, |
|
"loss": 0.1131, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"eval_accuracy": 0.8711656441717791, |
|
"eval_loss": 0.47180497646331787, |
|
"eval_runtime": 11.6646, |
|
"eval_samples_per_second": 69.87, |
|
"eval_steps_per_second": 8.744, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"grad_norm": 7.354953289031982, |
|
"learning_rate": 6.3321799307958475e-06, |
|
"loss": 0.1997, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"grad_norm": 0.7862864136695862, |
|
"learning_rate": 5.986159169550173e-06, |
|
"loss": 0.1147, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"grad_norm": 2.8488690853118896, |
|
"learning_rate": 5.640138408304498e-06, |
|
"loss": 0.1345, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"grad_norm": 4.022710800170898, |
|
"learning_rate": 5.294117647058824e-06, |
|
"loss": 0.1466, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 1.0064830780029297, |
|
"learning_rate": 4.948096885813149e-06, |
|
"loss": 0.2983, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"grad_norm": 2.4876391887664795, |
|
"learning_rate": 4.602076124567474e-06, |
|
"loss": 0.2516, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"grad_norm": 8.971895217895508, |
|
"learning_rate": 4.2560553633217994e-06, |
|
"loss": 0.2621, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"grad_norm": 6.835991859436035, |
|
"learning_rate": 3.910034602076125e-06, |
|
"loss": 0.1548, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"grad_norm": 3.8968513011932373, |
|
"learning_rate": 3.5640138408304496e-06, |
|
"loss": 0.2505, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"grad_norm": 2.595637083053589, |
|
"learning_rate": 3.2179930795847754e-06, |
|
"loss": 0.2144, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"eval_accuracy": 0.8711656441717791, |
|
"eval_loss": 0.4744942784309387, |
|
"eval_runtime": 11.7899, |
|
"eval_samples_per_second": 69.127, |
|
"eval_steps_per_second": 8.651, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"grad_norm": 8.595541954040527, |
|
"learning_rate": 2.8719723183391003e-06, |
|
"loss": 0.1468, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 3.5496714115142822, |
|
"learning_rate": 2.5259515570934256e-06, |
|
"loss": 0.2275, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"grad_norm": 6.138185977935791, |
|
"learning_rate": 2.179930795847751e-06, |
|
"loss": 0.3254, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"grad_norm": 1.1723095178604126, |
|
"learning_rate": 1.8339100346020762e-06, |
|
"loss": 0.109, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"grad_norm": 5.896970272064209, |
|
"learning_rate": 1.4878892733564015e-06, |
|
"loss": 0.2334, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"grad_norm": 2.945650577545166, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 0.3131, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"grad_norm": 5.7923583984375, |
|
"learning_rate": 8.304498269896194e-07, |
|
"loss": 0.3139, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"grad_norm": 2.053051471710205, |
|
"learning_rate": 4.844290657439446e-07, |
|
"loss": 0.2643, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.1167406514286995, |
|
"learning_rate": 1.384083044982699e-07, |
|
"loss": 0.1631, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 2890, |
|
"total_flos": 3.57806169439359e+18, |
|
"train_loss": 0.4476649462557994, |
|
"train_runtime": 1596.8664, |
|
"train_samples_per_second": 28.913, |
|
"train_steps_per_second": 1.81 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2890, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"total_flos": 3.57806169439359e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|