|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2179, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004589261128958238, |
|
"grad_norm": 8.542048983401715, |
|
"learning_rate": 9.174311926605506e-08, |
|
"loss": 1.1206, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002294630564479119, |
|
"grad_norm": 8.730820381996422, |
|
"learning_rate": 4.587155963302753e-07, |
|
"loss": 1.1225, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004589261128958238, |
|
"grad_norm": 5.026231881904144, |
|
"learning_rate": 9.174311926605506e-07, |
|
"loss": 1.1138, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006883891693437357, |
|
"grad_norm": 3.214436315778165, |
|
"learning_rate": 1.3761467889908258e-06, |
|
"loss": 1.0308, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.009178522257916476, |
|
"grad_norm": 2.9262839132141174, |
|
"learning_rate": 1.8348623853211011e-06, |
|
"loss": 0.9803, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011473152822395595, |
|
"grad_norm": 2.5816500702829965, |
|
"learning_rate": 2.2935779816513764e-06, |
|
"loss": 0.9955, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.013767783386874713, |
|
"grad_norm": 2.62089651849134, |
|
"learning_rate": 2.7522935779816517e-06, |
|
"loss": 1.0214, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.016062413951353834, |
|
"grad_norm": 2.522275443963982, |
|
"learning_rate": 3.211009174311927e-06, |
|
"loss": 0.9751, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.018357044515832952, |
|
"grad_norm": 2.415617593047094, |
|
"learning_rate": 3.6697247706422022e-06, |
|
"loss": 0.995, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02065167508031207, |
|
"grad_norm": 2.525030642664337, |
|
"learning_rate": 4.128440366972478e-06, |
|
"loss": 1.0113, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02294630564479119, |
|
"grad_norm": 2.6665037719940954, |
|
"learning_rate": 4.587155963302753e-06, |
|
"loss": 0.9655, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.025240936209270308, |
|
"grad_norm": 3.3232412426313385, |
|
"learning_rate": 5.045871559633028e-06, |
|
"loss": 0.9874, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.027535566773749427, |
|
"grad_norm": 2.394624410644964, |
|
"learning_rate": 5.504587155963303e-06, |
|
"loss": 0.9921, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.029830197338228545, |
|
"grad_norm": 2.6803374112332796, |
|
"learning_rate": 5.963302752293578e-06, |
|
"loss": 0.9968, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03212482790270767, |
|
"grad_norm": 2.695577014057411, |
|
"learning_rate": 6.422018348623854e-06, |
|
"loss": 0.9819, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03441945846718678, |
|
"grad_norm": 2.5003353553530308, |
|
"learning_rate": 6.880733944954129e-06, |
|
"loss": 1.0135, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.036714089031665904, |
|
"grad_norm": 2.449197716174392, |
|
"learning_rate": 7.3394495412844045e-06, |
|
"loss": 0.9861, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03900871959614502, |
|
"grad_norm": 2.547680048482618, |
|
"learning_rate": 7.79816513761468e-06, |
|
"loss": 0.9886, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04130335016062414, |
|
"grad_norm": 2.738205329791908, |
|
"learning_rate": 8.256880733944956e-06, |
|
"loss": 0.9849, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04359798072510326, |
|
"grad_norm": 2.6040796770347256, |
|
"learning_rate": 8.71559633027523e-06, |
|
"loss": 1.0033, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04589261128958238, |
|
"grad_norm": 2.6163342737972495, |
|
"learning_rate": 9.174311926605506e-06, |
|
"loss": 0.9906, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.048187241854061494, |
|
"grad_norm": 2.4805649474435536, |
|
"learning_rate": 9.633027522935781e-06, |
|
"loss": 0.9953, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.050481872418540616, |
|
"grad_norm": 2.5415673257044604, |
|
"learning_rate": 1.0091743119266055e-05, |
|
"loss": 1.0171, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05277650298301973, |
|
"grad_norm": 2.4798532274771303, |
|
"learning_rate": 1.055045871559633e-05, |
|
"loss": 1.0314, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05507113354749885, |
|
"grad_norm": 2.8712967477215217, |
|
"learning_rate": 1.1009174311926607e-05, |
|
"loss": 0.9966, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05736576411197797, |
|
"grad_norm": 2.40307484944342, |
|
"learning_rate": 1.1467889908256882e-05, |
|
"loss": 0.9862, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.05966039467645709, |
|
"grad_norm": 2.531947293972582, |
|
"learning_rate": 1.1926605504587156e-05, |
|
"loss": 1.0111, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06195502524093621, |
|
"grad_norm": 2.5630237672720355, |
|
"learning_rate": 1.238532110091743e-05, |
|
"loss": 1.0111, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06424965580541533, |
|
"grad_norm": 3.0796021235291504, |
|
"learning_rate": 1.2844036697247708e-05, |
|
"loss": 0.996, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06654428636989444, |
|
"grad_norm": 2.532220737324273, |
|
"learning_rate": 1.3302752293577984e-05, |
|
"loss": 1.0361, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.06883891693437356, |
|
"grad_norm": 2.656655575725622, |
|
"learning_rate": 1.3761467889908258e-05, |
|
"loss": 1.0061, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07113354749885269, |
|
"grad_norm": 2.4257637087990194, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 0.9896, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07342817806333181, |
|
"grad_norm": 2.482384770473286, |
|
"learning_rate": 1.4678899082568809e-05, |
|
"loss": 1.0321, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07572280862781092, |
|
"grad_norm": 2.9466970045401957, |
|
"learning_rate": 1.5137614678899085e-05, |
|
"loss": 1.0113, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07801743919229004, |
|
"grad_norm": 2.5190350812742364, |
|
"learning_rate": 1.559633027522936e-05, |
|
"loss": 1.0271, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08031206975676916, |
|
"grad_norm": 2.478301477100018, |
|
"learning_rate": 1.6055045871559634e-05, |
|
"loss": 1.0165, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08260670032124828, |
|
"grad_norm": 2.4464230838695826, |
|
"learning_rate": 1.6513761467889912e-05, |
|
"loss": 1.0646, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08490133088572739, |
|
"grad_norm": 2.5240574285299315, |
|
"learning_rate": 1.6972477064220186e-05, |
|
"loss": 1.0555, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08719596145020651, |
|
"grad_norm": 2.4185989752236505, |
|
"learning_rate": 1.743119266055046e-05, |
|
"loss": 1.0451, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08949059201468564, |
|
"grad_norm": 2.6482287747026954, |
|
"learning_rate": 1.7889908256880734e-05, |
|
"loss": 1.0325, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09178522257916476, |
|
"grad_norm": 2.4286821504213814, |
|
"learning_rate": 1.834862385321101e-05, |
|
"loss": 1.0269, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09407985314364388, |
|
"grad_norm": 2.193912154365014, |
|
"learning_rate": 1.8807339449541285e-05, |
|
"loss": 1.0551, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.09637448370812299, |
|
"grad_norm": 2.695401581193139, |
|
"learning_rate": 1.9266055045871563e-05, |
|
"loss": 1.0456, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.09866911427260211, |
|
"grad_norm": 2.6583940178739827, |
|
"learning_rate": 1.9724770642201837e-05, |
|
"loss": 1.0664, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.10096374483708123, |
|
"grad_norm": 2.371077606257049, |
|
"learning_rate": 1.9999948669655127e-05, |
|
"loss": 1.0578, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.10325837540156035, |
|
"grad_norm": 2.159798506956808, |
|
"learning_rate": 1.999937120932709e-05, |
|
"loss": 1.0421, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.10555300596603946, |
|
"grad_norm": 2.7274671704918516, |
|
"learning_rate": 1.9998152162914807e-05, |
|
"loss": 1.0513, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.10784763653051858, |
|
"grad_norm": 2.171023592361791, |
|
"learning_rate": 1.9996291608635527e-05, |
|
"loss": 1.0566, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1101422670949977, |
|
"grad_norm": 2.1701858629565445, |
|
"learning_rate": 1.9993789665867316e-05, |
|
"loss": 1.0371, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.11243689765947683, |
|
"grad_norm": 2.2165028686751254, |
|
"learning_rate": 1.9990646495141445e-05, |
|
"loss": 1.0491, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.11473152822395594, |
|
"grad_norm": 2.6072713574574466, |
|
"learning_rate": 1.998686229813205e-05, |
|
"loss": 1.0636, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.11702615878843506, |
|
"grad_norm": 2.135919480089347, |
|
"learning_rate": 1.9982437317643218e-05, |
|
"loss": 1.05, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.11932078935291418, |
|
"grad_norm": 2.4909036218466993, |
|
"learning_rate": 1.9977371837593382e-05, |
|
"loss": 1.0611, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1216154199173933, |
|
"grad_norm": 5.355161964355881, |
|
"learning_rate": 1.997166618299714e-05, |
|
"loss": 1.0253, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.12391005048187242, |
|
"grad_norm": 2.374369183096664, |
|
"learning_rate": 1.9965320719944366e-05, |
|
"loss": 1.044, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.12620468104635155, |
|
"grad_norm": 2.5178752868507144, |
|
"learning_rate": 1.995833585557674e-05, |
|
"loss": 1.0692, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.12849931161083067, |
|
"grad_norm": 2.560582169592424, |
|
"learning_rate": 1.9950712038061617e-05, |
|
"loss": 1.0518, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.13079394217530976, |
|
"grad_norm": 2.279796271646809, |
|
"learning_rate": 1.994244975656328e-05, |
|
"loss": 1.0627, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.13308857273978889, |
|
"grad_norm": 2.428851471685207, |
|
"learning_rate": 1.993354954121155e-05, |
|
"loss": 1.0672, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.135383203304268, |
|
"grad_norm": 2.253932739513235, |
|
"learning_rate": 1.9924011963067765e-05, |
|
"loss": 1.0572, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.13767783386874713, |
|
"grad_norm": 2.2863725772380232, |
|
"learning_rate": 1.9913837634088143e-05, |
|
"loss": 1.0612, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13997246443322625, |
|
"grad_norm": 2.3216601188965433, |
|
"learning_rate": 1.9903027207084525e-05, |
|
"loss": 1.0711, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.14226709499770537, |
|
"grad_norm": 2.4619963294994305, |
|
"learning_rate": 1.9891581375682472e-05, |
|
"loss": 1.091, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1445617255621845, |
|
"grad_norm": 2.6730364673716216, |
|
"learning_rate": 1.9879500874276788e-05, |
|
"loss": 1.0739, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.14685635612666362, |
|
"grad_norm": 2.240223628972246, |
|
"learning_rate": 1.9866786477984357e-05, |
|
"loss": 1.0675, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.14915098669114274, |
|
"grad_norm": 2.3152538012542436, |
|
"learning_rate": 1.985343900259446e-05, |
|
"loss": 1.0723, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.15144561725562183, |
|
"grad_norm": 2.4361030726660364, |
|
"learning_rate": 1.983945930451639e-05, |
|
"loss": 1.0895, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.15374024782010096, |
|
"grad_norm": 2.1416955206141086, |
|
"learning_rate": 1.982484828072452e-05, |
|
"loss": 1.0606, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.15603487838458008, |
|
"grad_norm": 2.1520402841348054, |
|
"learning_rate": 1.9809606868700755e-05, |
|
"loss": 1.0751, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.1583295089490592, |
|
"grad_norm": 2.198043038406304, |
|
"learning_rate": 1.9793736046374375e-05, |
|
"loss": 1.0642, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.16062413951353832, |
|
"grad_norm": 2.310801939427329, |
|
"learning_rate": 1.977723683205928e-05, |
|
"loss": 1.0922, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.16291877007801744, |
|
"grad_norm": 2.108829610277771, |
|
"learning_rate": 1.9760110284388667e-05, |
|
"loss": 1.0738, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.16521340064249657, |
|
"grad_norm": 2.2185687059059602, |
|
"learning_rate": 1.9742357502247104e-05, |
|
"loss": 1.0636, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1675080312069757, |
|
"grad_norm": 2.3820797912728433, |
|
"learning_rate": 1.9723979624700004e-05, |
|
"loss": 1.0646, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.16980266177145478, |
|
"grad_norm": 2.39166890987706, |
|
"learning_rate": 1.970497783092057e-05, |
|
"loss": 1.0826, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1720972923359339, |
|
"grad_norm": 2.067234800314795, |
|
"learning_rate": 1.9685353340114104e-05, |
|
"loss": 1.0675, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.17439192290041303, |
|
"grad_norm": 2.16248649556658, |
|
"learning_rate": 1.9665107411439805e-05, |
|
"loss": 1.0955, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.17668655346489215, |
|
"grad_norm": 2.290019980298114, |
|
"learning_rate": 1.9644241343929966e-05, |
|
"loss": 1.0591, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.17898118402937127, |
|
"grad_norm": 2.2129439226232503, |
|
"learning_rate": 1.9622756476406625e-05, |
|
"loss": 1.0631, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1812758145938504, |
|
"grad_norm": 2.027387747335278, |
|
"learning_rate": 1.9600654187395666e-05, |
|
"loss": 1.0497, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.18357044515832951, |
|
"grad_norm": 2.4021963976761995, |
|
"learning_rate": 1.9577935895038363e-05, |
|
"loss": 1.0738, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.18586507572280864, |
|
"grad_norm": 2.066698083420903, |
|
"learning_rate": 1.9554603057000397e-05, |
|
"loss": 1.0708, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.18815970628728776, |
|
"grad_norm": 2.2419541719123823, |
|
"learning_rate": 1.953065717037832e-05, |
|
"loss": 1.058, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.19045433685176685, |
|
"grad_norm": 2.2301364115878823, |
|
"learning_rate": 1.9506099771603515e-05, |
|
"loss": 1.0654, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.19274896741624598, |
|
"grad_norm": 2.259635192704883, |
|
"learning_rate": 1.9480932436343584e-05, |
|
"loss": 1.0608, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.1950435979807251, |
|
"grad_norm": 2.1694738965454743, |
|
"learning_rate": 1.945515677940127e-05, |
|
"loss": 1.0795, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.19733822854520422, |
|
"grad_norm": 24.332124680220836, |
|
"learning_rate": 1.9428774454610845e-05, |
|
"loss": 1.09, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.19963285910968334, |
|
"grad_norm": 13.049567795778453, |
|
"learning_rate": 1.9401787154731993e-05, |
|
"loss": 1.6953, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.20192748967416246, |
|
"grad_norm": 7.723175796818905, |
|
"learning_rate": 1.9374196611341212e-05, |
|
"loss": 1.1141, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.20422212023864159, |
|
"grad_norm": 87.6420124445034, |
|
"learning_rate": 1.934600459472067e-05, |
|
"loss": 1.1117, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2065167508031207, |
|
"grad_norm": 2.9869822864961053, |
|
"learning_rate": 1.931721291374467e-05, |
|
"loss": 1.0953, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.20881138136759983, |
|
"grad_norm": 2.4448207284406775, |
|
"learning_rate": 1.9287823415763552e-05, |
|
"loss": 1.0806, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.21110601193207892, |
|
"grad_norm": 2.566404692048504, |
|
"learning_rate": 1.9257837986485187e-05, |
|
"loss": 1.0969, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.21340064249655805, |
|
"grad_norm": 4.385856228808105, |
|
"learning_rate": 1.922725854985396e-05, |
|
"loss": 1.055, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.21569527306103717, |
|
"grad_norm": 2.3775711238334507, |
|
"learning_rate": 1.9196087067927348e-05, |
|
"loss": 1.0698, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2179899036255163, |
|
"grad_norm": 2.2620590555710915, |
|
"learning_rate": 1.916432554075002e-05, |
|
"loss": 1.0825, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2202845341899954, |
|
"grad_norm": 2.7304207516254997, |
|
"learning_rate": 1.913197600622549e-05, |
|
"loss": 1.035, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.22257916475447453, |
|
"grad_norm": 2.3652995101192382, |
|
"learning_rate": 1.9099040539985395e-05, |
|
"loss": 1.0598, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.22487379531895366, |
|
"grad_norm": 2.3400675787952627, |
|
"learning_rate": 1.90655212552563e-05, |
|
"loss": 1.0507, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.22716842588343278, |
|
"grad_norm": 2.1219643393114493, |
|
"learning_rate": 1.9031420302724093e-05, |
|
"loss": 1.0549, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.22946305644791187, |
|
"grad_norm": 2.0597546067330454, |
|
"learning_rate": 1.8996739870396027e-05, |
|
"loss": 1.0545, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.231757687012391, |
|
"grad_norm": 2.0948012744577467, |
|
"learning_rate": 1.896148218346028e-05, |
|
"loss": 1.052, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.23405231757687012, |
|
"grad_norm": 2.1526817626887227, |
|
"learning_rate": 1.8925649504143244e-05, |
|
"loss": 1.0462, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.23634694814134924, |
|
"grad_norm": 2.038989673045541, |
|
"learning_rate": 1.888924413156432e-05, |
|
"loss": 1.0651, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.23864157870582836, |
|
"grad_norm": 2.1006533572356183, |
|
"learning_rate": 1.885226840158843e-05, |
|
"loss": 1.0644, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.24093620927030748, |
|
"grad_norm": 2.3332747317371596, |
|
"learning_rate": 1.8814724686676133e-05, |
|
"loss": 1.0722, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2432308398347866, |
|
"grad_norm": 1.9851117275060848, |
|
"learning_rate": 1.8776615395731398e-05, |
|
"loss": 1.0596, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.24552547039926573, |
|
"grad_norm": 2.030081193927863, |
|
"learning_rate": 1.8737942973947062e-05, |
|
"loss": 1.0759, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.24782010096374485, |
|
"grad_norm": 2.2516155399349866, |
|
"learning_rate": 1.8698709902647903e-05, |
|
"loss": 1.0453, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.25011473152822394, |
|
"grad_norm": 2.5068515840745067, |
|
"learning_rate": 1.865891869913147e-05, |
|
"loss": 1.0908, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.2524093620927031, |
|
"grad_norm": 2.4364345139539307, |
|
"learning_rate": 1.8618571916506548e-05, |
|
"loss": 1.0747, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2547039926571822, |
|
"grad_norm": 2.174192040006664, |
|
"learning_rate": 1.8577672143529337e-05, |
|
"loss": 1.0695, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.25699862322166134, |
|
"grad_norm": 2.1082629086312306, |
|
"learning_rate": 1.853622200443737e-05, |
|
"loss": 1.0678, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.25929325378614043, |
|
"grad_norm": 2.026795919074908, |
|
"learning_rate": 1.849422415878112e-05, |
|
"loss": 1.0615, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.2615878843506195, |
|
"grad_norm": 4.391522020875447, |
|
"learning_rate": 1.8451681301253363e-05, |
|
"loss": 1.0748, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2638825149150987, |
|
"grad_norm": 2.2545875042843218, |
|
"learning_rate": 1.840859616151627e-05, |
|
"loss": 1.0841, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.26617714547957777, |
|
"grad_norm": 2.4383336966875806, |
|
"learning_rate": 1.8364971504026273e-05, |
|
"loss": 1.0683, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2684717760440569, |
|
"grad_norm": 2.032234747949604, |
|
"learning_rate": 1.8320810127856706e-05, |
|
"loss": 1.0503, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.270766406608536, |
|
"grad_norm": 2.135201384180716, |
|
"learning_rate": 1.827611486651817e-05, |
|
"loss": 1.0702, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.27306103717301516, |
|
"grad_norm": 3.134318242625564, |
|
"learning_rate": 1.8230888587776758e-05, |
|
"loss": 1.0808, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.27535566773749426, |
|
"grad_norm": 2.098719263576858, |
|
"learning_rate": 1.8185134193470043e-05, |
|
"loss": 1.0639, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2776502983019734, |
|
"grad_norm": 2.167620655991022, |
|
"learning_rate": 1.8138854619320893e-05, |
|
"loss": 1.0679, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.2799449288664525, |
|
"grad_norm": 2.110572139729922, |
|
"learning_rate": 1.8092052834749094e-05, |
|
"loss": 1.0481, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2822395594309316, |
|
"grad_norm": 2.111753085065974, |
|
"learning_rate": 1.804473184268084e-05, |
|
"loss": 1.0929, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.28453418999541075, |
|
"grad_norm": 2.047982339354027, |
|
"learning_rate": 1.799689467935604e-05, |
|
"loss": 1.0675, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.28682882055988984, |
|
"grad_norm": 1.9901275601162307, |
|
"learning_rate": 1.7948544414133534e-05, |
|
"loss": 1.0671, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.289123451124369, |
|
"grad_norm": 2.355283701088896, |
|
"learning_rate": 1.7899684149294118e-05, |
|
"loss": 1.0831, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2914180816888481, |
|
"grad_norm": 2.034042330707897, |
|
"learning_rate": 1.7850317019841514e-05, |
|
"loss": 1.0551, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.29371271225332723, |
|
"grad_norm": 2.0459885339908337, |
|
"learning_rate": 1.7800446193301225e-05, |
|
"loss": 1.0656, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.29600734281780633, |
|
"grad_norm": 1.9809660700340832, |
|
"learning_rate": 1.7750074869517285e-05, |
|
"loss": 1.0547, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.2983019733822855, |
|
"grad_norm": 2.1222169648334037, |
|
"learning_rate": 1.7699206280446955e-05, |
|
"loss": 1.0856, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3005966039467646, |
|
"grad_norm": 2.033320981208692, |
|
"learning_rate": 1.7647843689953352e-05, |
|
"loss": 1.0496, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.30289123451124367, |
|
"grad_norm": 1.9724823263811897, |
|
"learning_rate": 1.759599039359603e-05, |
|
"loss": 1.058, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3051858650757228, |
|
"grad_norm": 2.081504542418749, |
|
"learning_rate": 1.754364971841952e-05, |
|
"loss": 1.0652, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3074804956402019, |
|
"grad_norm": 1.9226506132795518, |
|
"learning_rate": 1.749082502273988e-05, |
|
"loss": 1.0498, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.30977512620468106, |
|
"grad_norm": 2.093061922783108, |
|
"learning_rate": 1.7437519695929194e-05, |
|
"loss": 1.0469, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.31206975676916016, |
|
"grad_norm": 1.9436746042499786, |
|
"learning_rate": 1.738373715819811e-05, |
|
"loss": 1.0935, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3143643873336393, |
|
"grad_norm": 2.141922458996327, |
|
"learning_rate": 1.7329480860376392e-05, |
|
"loss": 1.0708, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3166590178981184, |
|
"grad_norm": 1.9342237159056526, |
|
"learning_rate": 1.7274754283691507e-05, |
|
"loss": 1.0576, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.3189536484625975, |
|
"grad_norm": 2.3671090315479724, |
|
"learning_rate": 1.7219560939545246e-05, |
|
"loss": 1.0314, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.32124827902707664, |
|
"grad_norm": 2.804036651572821, |
|
"learning_rate": 1.7163904369288443e-05, |
|
"loss": 1.037, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.32354290959155574, |
|
"grad_norm": 2.130875477845555, |
|
"learning_rate": 1.710778814399374e-05, |
|
"loss": 1.0676, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3258375401560349, |
|
"grad_norm": 2.2917073480609145, |
|
"learning_rate": 1.705121586422647e-05, |
|
"loss": 1.0603, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.328132170720514, |
|
"grad_norm": 1.9344196026563947, |
|
"learning_rate": 1.699419115981361e-05, |
|
"loss": 1.0563, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.33042680128499313, |
|
"grad_norm": 2.588695773647949, |
|
"learning_rate": 1.69367176896109e-05, |
|
"loss": 1.0383, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3327214318494722, |
|
"grad_norm": 1.8338278514413, |
|
"learning_rate": 1.6878799141268107e-05, |
|
"loss": 1.0367, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.3350160624139514, |
|
"grad_norm": 1.9232017037694178, |
|
"learning_rate": 1.682043923099234e-05, |
|
"loss": 1.0609, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.33731069297843047, |
|
"grad_norm": 2.0707722390620638, |
|
"learning_rate": 1.6761641703309702e-05, |
|
"loss": 1.0544, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.33960532354290957, |
|
"grad_norm": 1.863894286464851, |
|
"learning_rate": 1.6702410330824962e-05, |
|
"loss": 1.0459, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3418999541073887, |
|
"grad_norm": 2.295179291530668, |
|
"learning_rate": 1.6642748913979515e-05, |
|
"loss": 1.0664, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3441945846718678, |
|
"grad_norm": 1.9748901739443767, |
|
"learning_rate": 1.6582661280807553e-05, |
|
"loss": 1.0824, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.34648921523634696, |
|
"grad_norm": 2.0941843827334976, |
|
"learning_rate": 1.652215128669042e-05, |
|
"loss": 1.0663, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.34878384580082605, |
|
"grad_norm": 2.026330695493712, |
|
"learning_rate": 1.646122281410927e-05, |
|
"loss": 1.0582, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3510784763653052, |
|
"grad_norm": 1.814204512009467, |
|
"learning_rate": 1.6399879772395915e-05, |
|
"loss": 1.0664, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.3533731069297843, |
|
"grad_norm": 2.0517724058062794, |
|
"learning_rate": 1.633812609748206e-05, |
|
"loss": 1.0479, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.35566773749426345, |
|
"grad_norm": 1.917088322296075, |
|
"learning_rate": 1.6275965751646682e-05, |
|
"loss": 1.049, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.35796236805874254, |
|
"grad_norm": 1.8734674623090466, |
|
"learning_rate": 1.6213402723261852e-05, |
|
"loss": 1.0544, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.36025699862322164, |
|
"grad_norm": 2.1022753225906268, |
|
"learning_rate": 1.6150441026536827e-05, |
|
"loss": 1.0487, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.3625516291877008, |
|
"grad_norm": 1.8722897359622224, |
|
"learning_rate": 1.6087084701260468e-05, |
|
"loss": 1.0349, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.3648462597521799, |
|
"grad_norm": 4.170483033000324, |
|
"learning_rate": 1.6023337812542048e-05, |
|
"loss": 1.0416, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.36714089031665903, |
|
"grad_norm": 1.9386838285813406, |
|
"learning_rate": 1.5959204450550427e-05, |
|
"loss": 1.0588, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.3694355208811381, |
|
"grad_norm": 2.0117934404635136, |
|
"learning_rate": 1.5894688730251613e-05, |
|
"loss": 1.0454, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.3717301514456173, |
|
"grad_norm": 2.121087436021138, |
|
"learning_rate": 1.5829794791144723e-05, |
|
"loss": 1.0388, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.37402478201009637, |
|
"grad_norm": 2.0084458470640394, |
|
"learning_rate": 1.57645267969964e-05, |
|
"loss": 1.0425, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.3763194125745755, |
|
"grad_norm": 1.9502413688336317, |
|
"learning_rate": 1.569888893557365e-05, |
|
"loss": 1.0352, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.3786140431390546, |
|
"grad_norm": 1.9753852329094403, |
|
"learning_rate": 1.5632885418375136e-05, |
|
"loss": 1.0562, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.3809086737035337, |
|
"grad_norm": 2.0333895336233745, |
|
"learning_rate": 1.556652048036096e-05, |
|
"loss": 1.0571, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.38320330426801286, |
|
"grad_norm": 2.2000920597322464, |
|
"learning_rate": 1.549979837968094e-05, |
|
"loss": 1.0504, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.38549793483249195, |
|
"grad_norm": 2.0041860947851777, |
|
"learning_rate": 1.5432723397401406e-05, |
|
"loss": 1.0483, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3877925653969711, |
|
"grad_norm": 2.063158568485873, |
|
"learning_rate": 1.5365299837230483e-05, |
|
"loss": 1.0479, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.3900871959614502, |
|
"grad_norm": 2.0282677973594017, |
|
"learning_rate": 1.5297532025241993e-05, |
|
"loss": 1.03, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.39238182652592934, |
|
"grad_norm": 1.8181342854668368, |
|
"learning_rate": 1.5229424309597853e-05, |
|
"loss": 1.0461, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.39467645709040844, |
|
"grad_norm": 1.9188404459309296, |
|
"learning_rate": 1.5160981060269107e-05, |
|
"loss": 1.0926, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3969710876548876, |
|
"grad_norm": 1.8587214491979347, |
|
"learning_rate": 1.5092206668755518e-05, |
|
"loss": 1.023, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.3992657182193667, |
|
"grad_norm": 2.0702564004428736, |
|
"learning_rate": 1.5023105547803807e-05, |
|
"loss": 1.0497, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.4015603487838458, |
|
"grad_norm": 1.866283622621383, |
|
"learning_rate": 1.4953682131124527e-05, |
|
"loss": 1.0548, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.4038549793483249, |
|
"grad_norm": 2.3748260414549613, |
|
"learning_rate": 1.4883940873107572e-05, |
|
"loss": 1.0354, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.406149609912804, |
|
"grad_norm": 1.8622352558527133, |
|
"learning_rate": 1.4813886248536376e-05, |
|
"loss": 1.0272, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.40844424047728317, |
|
"grad_norm": 1.8451716042859445, |
|
"learning_rate": 1.4743522752300793e-05, |
|
"loss": 1.0346, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.41073887104176227, |
|
"grad_norm": 1.7791032929700201, |
|
"learning_rate": 1.467285489910872e-05, |
|
"loss": 1.0445, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.4130335016062414, |
|
"grad_norm": 1.9142844645885038, |
|
"learning_rate": 1.4601887223196374e-05, |
|
"loss": 1.0367, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4153281321707205, |
|
"grad_norm": 1.9821960797296039, |
|
"learning_rate": 1.4530624278037406e-05, |
|
"loss": 1.0459, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.41762276273519966, |
|
"grad_norm": 1.8030416883238183, |
|
"learning_rate": 1.4459070636050721e-05, |
|
"loss": 1.0395, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.41991739329967875, |
|
"grad_norm": 1.8541049154816385, |
|
"learning_rate": 1.4387230888307098e-05, |
|
"loss": 1.0297, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.42221202386415785, |
|
"grad_norm": 1.882570932670042, |
|
"learning_rate": 1.4315109644234619e-05, |
|
"loss": 1.0526, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.424506654428637, |
|
"grad_norm": 2.104659835262144, |
|
"learning_rate": 1.4242711531322912e-05, |
|
"loss": 1.0602, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.4268012849931161, |
|
"grad_norm": 1.9520034799103085, |
|
"learning_rate": 1.4170041194826247e-05, |
|
"loss": 1.0513, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.42909591555759524, |
|
"grad_norm": 1.8636499197241878, |
|
"learning_rate": 1.4097103297465471e-05, |
|
"loss": 1.0313, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.43139054612207434, |
|
"grad_norm": 1.866901038371879, |
|
"learning_rate": 1.402390251912885e-05, |
|
"loss": 1.0492, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.4336851766865535, |
|
"grad_norm": 1.9095505177623704, |
|
"learning_rate": 1.395044355657178e-05, |
|
"loss": 1.0427, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.4359798072510326, |
|
"grad_norm": 1.8833835923493178, |
|
"learning_rate": 1.387673112311545e-05, |
|
"loss": 1.0062, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.4382744378155117, |
|
"grad_norm": 1.8503750464774162, |
|
"learning_rate": 1.3802769948344406e-05, |
|
"loss": 1.0584, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.4405690683799908, |
|
"grad_norm": 1.9850425042063173, |
|
"learning_rate": 1.3728564777803089e-05, |
|
"loss": 1.0299, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.4428636989444699, |
|
"grad_norm": 2.0470542749046308, |
|
"learning_rate": 1.3654120372691361e-05, |
|
"loss": 1.0211, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.44515832950894907, |
|
"grad_norm": 1.963916772949296, |
|
"learning_rate": 1.3579441509559007e-05, |
|
"loss": 1.0453, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.44745296007342816, |
|
"grad_norm": 1.889454130597838, |
|
"learning_rate": 1.350453297999925e-05, |
|
"loss": 1.0441, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.4497475906379073, |
|
"grad_norm": 1.8300661545935728, |
|
"learning_rate": 1.3429399590341325e-05, |
|
"loss": 1.0109, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4520422212023864, |
|
"grad_norm": 1.8927209237187554, |
|
"learning_rate": 1.3354046161342087e-05, |
|
"loss": 1.0153, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.45433685176686556, |
|
"grad_norm": 1.8419785511286724, |
|
"learning_rate": 1.327847752787669e-05, |
|
"loss": 1.0234, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.45663148233134465, |
|
"grad_norm": 1.8479631910892635, |
|
"learning_rate": 1.3202698538628376e-05, |
|
"loss": 1.0515, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.45892611289582375, |
|
"grad_norm": 1.8454383680403843, |
|
"learning_rate": 1.3126714055777378e-05, |
|
"loss": 1.0283, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4612207434603029, |
|
"grad_norm": 1.8614938060407424, |
|
"learning_rate": 1.3050528954688932e-05, |
|
"loss": 1.028, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.463515374024782, |
|
"grad_norm": 2.138120924499351, |
|
"learning_rate": 1.2974148123600477e-05, |
|
"loss": 1.0154, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.46581000458926114, |
|
"grad_norm": 1.8576229088325686, |
|
"learning_rate": 1.2897576463307999e-05, |
|
"loss": 1.036, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.46810463515374023, |
|
"grad_norm": 2.0045877966342776, |
|
"learning_rate": 1.2820818886851599e-05, |
|
"loss": 1.0382, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.4703992657182194, |
|
"grad_norm": 1.8120186612041058, |
|
"learning_rate": 1.2743880319200241e-05, |
|
"loss": 1.0532, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.4726938962826985, |
|
"grad_norm": 1.8202058192088733, |
|
"learning_rate": 1.2666765696935773e-05, |
|
"loss": 1.0223, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.47498852684717763, |
|
"grad_norm": 1.8341249835092734, |
|
"learning_rate": 1.2589479967936163e-05, |
|
"loss": 1.0327, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.4772831574116567, |
|
"grad_norm": 1.890017754125909, |
|
"learning_rate": 1.2512028091058044e-05, |
|
"loss": 1.0282, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.4795777879761358, |
|
"grad_norm": 1.7963761077816227, |
|
"learning_rate": 1.2434415035818535e-05, |
|
"loss": 1.015, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.48187241854061497, |
|
"grad_norm": 1.89045217328483, |
|
"learning_rate": 1.2356645782076384e-05, |
|
"loss": 1.0189, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.48416704910509406, |
|
"grad_norm": 1.8184075309069563, |
|
"learning_rate": 1.2278725319712449e-05, |
|
"loss": 1.0388, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.4864616796695732, |
|
"grad_norm": 1.7960523305438247, |
|
"learning_rate": 1.2200658648309531e-05, |
|
"loss": 1.0447, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.4887563102340523, |
|
"grad_norm": 1.8467823592474473, |
|
"learning_rate": 1.2122450776831593e-05, |
|
"loss": 1.0122, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.49105094079853145, |
|
"grad_norm": 1.7814611078259526, |
|
"learning_rate": 1.2044106723302364e-05, |
|
"loss": 0.998, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.49334557136301055, |
|
"grad_norm": 1.8655763524159674, |
|
"learning_rate": 1.1965631514483376e-05, |
|
"loss": 1.0051, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.4956402019274897, |
|
"grad_norm": 1.8504586712684936, |
|
"learning_rate": 1.1887030185551427e-05, |
|
"loss": 1.0173, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.4979348324919688, |
|
"grad_norm": 1.8553598830430822, |
|
"learning_rate": 1.1808307779775518e-05, |
|
"loss": 1.0276, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5002294630564479, |
|
"grad_norm": 1.7545364104117196, |
|
"learning_rate": 1.1729469348193263e-05, |
|
"loss": 1.0079, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.502524093620927, |
|
"grad_norm": 1.7459754103129477, |
|
"learning_rate": 1.1650519949286797e-05, |
|
"loss": 1.0049, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5048187241854062, |
|
"grad_norm": 1.8061298693213486, |
|
"learning_rate": 1.1571464648658201e-05, |
|
"loss": 0.9806, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5071133547498853, |
|
"grad_norm": 1.7062918430841572, |
|
"learning_rate": 1.1492308518704507e-05, |
|
"loss": 1.0133, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5094079853143644, |
|
"grad_norm": 1.744436514313244, |
|
"learning_rate": 1.1413056638292215e-05, |
|
"loss": 0.9962, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5117026158788435, |
|
"grad_norm": 1.7695374210335713, |
|
"learning_rate": 1.1333714092431423e-05, |
|
"loss": 1.0085, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.5139972464433227, |
|
"grad_norm": 1.9055267822840887, |
|
"learning_rate": 1.1254285971949574e-05, |
|
"loss": 1.0198, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5162918770078018, |
|
"grad_norm": 1.7470563158478891, |
|
"learning_rate": 1.1174777373164797e-05, |
|
"loss": 1.005, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5185865075722809, |
|
"grad_norm": 1.8402758989282078, |
|
"learning_rate": 1.109519339755893e-05, |
|
"loss": 1.0225, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.52088113813676, |
|
"grad_norm": 1.9185683165429528, |
|
"learning_rate": 1.1015539151450172e-05, |
|
"loss": 1.0202, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.523175768701239, |
|
"grad_norm": 1.7862218928745999, |
|
"learning_rate": 1.0935819745665477e-05, |
|
"loss": 0.9938, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5254703992657183, |
|
"grad_norm": 1.7946257670444443, |
|
"learning_rate": 1.0856040295212614e-05, |
|
"loss": 1.0203, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.5277650298301974, |
|
"grad_norm": 1.8084692996883154, |
|
"learning_rate": 1.077620591895197e-05, |
|
"loss": 1.0132, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5300596603946764, |
|
"grad_norm": 1.8496851492898159, |
|
"learning_rate": 1.069632173926812e-05, |
|
"loss": 1.009, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.5323542909591555, |
|
"grad_norm": 1.7965406450823929, |
|
"learning_rate": 1.0616392881741166e-05, |
|
"loss": 0.9898, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5346489215236347, |
|
"grad_norm": 1.8696271364197514, |
|
"learning_rate": 1.0536424474817848e-05, |
|
"loss": 0.9949, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.5369435520881138, |
|
"grad_norm": 1.8512029165438884, |
|
"learning_rate": 1.0456421649482502e-05, |
|
"loss": 1.0065, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.5392381826525929, |
|
"grad_norm": 1.833426494602823, |
|
"learning_rate": 1.0376389538927841e-05, |
|
"loss": 1.0158, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.541532813217072, |
|
"grad_norm": 1.8287651297802663, |
|
"learning_rate": 1.0296333278225599e-05, |
|
"loss": 1.0064, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.5438274437815511, |
|
"grad_norm": 1.8964046455895252, |
|
"learning_rate": 1.0216258003997044e-05, |
|
"loss": 1.0198, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.5461220743460303, |
|
"grad_norm": 1.777279291853422, |
|
"learning_rate": 1.0136168854083401e-05, |
|
"loss": 0.9712, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.5484167049105094, |
|
"grad_norm": 1.8585809331304046, |
|
"learning_rate": 1.0056070967216199e-05, |
|
"loss": 0.9954, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.5507113354749885, |
|
"grad_norm": 1.9725460354320736, |
|
"learning_rate": 9.975969482687547e-06, |
|
"loss": 0.9835, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.5530059660394676, |
|
"grad_norm": 1.7245123642288822, |
|
"learning_rate": 9.8958695400204e-06, |
|
"loss": 1.0004, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.5553005966039468, |
|
"grad_norm": 1.8366846045184153, |
|
"learning_rate": 9.815776278638772e-06, |
|
"loss": 1.0265, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.5575952271684259, |
|
"grad_norm": 1.8649330489030282, |
|
"learning_rate": 9.735694837537993e-06, |
|
"loss": 0.9572, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.559889857732905, |
|
"grad_norm": 1.7668993960491821, |
|
"learning_rate": 9.655630354954974e-06, |
|
"loss": 0.9922, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.5621844882973841, |
|
"grad_norm": 1.9573923418916903, |
|
"learning_rate": 9.57558796803852e-06, |
|
"loss": 1.0109, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.5644791188618632, |
|
"grad_norm": 1.711437620336517, |
|
"learning_rate": 9.495572812519718e-06, |
|
"loss": 0.9946, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.5667737494263424, |
|
"grad_norm": 1.7691621774728759, |
|
"learning_rate": 9.41559002238242e-06, |
|
"loss": 1.0061, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.5690683799908215, |
|
"grad_norm": 1.8386891074186076, |
|
"learning_rate": 9.33564472953383e-06, |
|
"loss": 0.9886, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.5713630105553006, |
|
"grad_norm": 1.8757816038935415, |
|
"learning_rate": 9.255742063475228e-06, |
|
"loss": 0.9952, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.5736576411197797, |
|
"grad_norm": 1.7204727826291393, |
|
"learning_rate": 9.175887150972841e-06, |
|
"loss": 0.9922, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.5759522716842589, |
|
"grad_norm": 1.7936118442403097, |
|
"learning_rate": 9.096085115728902e-06, |
|
"loss": 0.982, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.578246902248738, |
|
"grad_norm": 1.6703981680215847, |
|
"learning_rate": 9.016341078052908e-06, |
|
"loss": 0.9828, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.5805415328132171, |
|
"grad_norm": 1.836996443217227, |
|
"learning_rate": 8.93666015453307e-06, |
|
"loss": 0.9882, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.5828361633776962, |
|
"grad_norm": 2.001392851330759, |
|
"learning_rate": 8.857047457708023e-06, |
|
"loss": 0.9923, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.5851307939421753, |
|
"grad_norm": 1.779792680703507, |
|
"learning_rate": 8.777508095738818e-06, |
|
"loss": 0.9821, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.5874254245066545, |
|
"grad_norm": 1.7551844873505347, |
|
"learning_rate": 8.698047172081129e-06, |
|
"loss": 0.9905, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5897200550711336, |
|
"grad_norm": 1.934345593929013, |
|
"learning_rate": 8.618669785157825e-06, |
|
"loss": 0.9796, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.5920146856356127, |
|
"grad_norm": 1.8648278829036111, |
|
"learning_rate": 8.539381028031838e-06, |
|
"loss": 1.0053, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.5943093162000918, |
|
"grad_norm": 1.7236526799565135, |
|
"learning_rate": 8.46018598807938e-06, |
|
"loss": 1.0005, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.596603946764571, |
|
"grad_norm": 1.8020519118969505, |
|
"learning_rate": 8.381089746663517e-06, |
|
"loss": 0.9855, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.59889857732905, |
|
"grad_norm": 1.8759050818870677, |
|
"learning_rate": 8.302097378808147e-06, |
|
"loss": 0.9884, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.6011932078935291, |
|
"grad_norm": 1.7101070470722577, |
|
"learning_rate": 8.223213952872353e-06, |
|
"loss": 0.9939, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.6034878384580082, |
|
"grad_norm": 1.8363414696856302, |
|
"learning_rate": 8.144444530225237e-06, |
|
"loss": 1.006, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.6057824690224873, |
|
"grad_norm": 1.8189880317647966, |
|
"learning_rate": 8.065794164921128e-06, |
|
"loss": 1.0103, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6080770995869665, |
|
"grad_norm": 1.7184329357048536, |
|
"learning_rate": 7.987267903375331e-06, |
|
"loss": 0.9802, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.6103717301514456, |
|
"grad_norm": 1.6760569997350725, |
|
"learning_rate": 7.90887078404033e-06, |
|
"loss": 0.9864, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.6126663607159247, |
|
"grad_norm": 1.759298284823385, |
|
"learning_rate": 7.830607837082494e-06, |
|
"loss": 0.979, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.6149609912804038, |
|
"grad_norm": 1.693614807322877, |
|
"learning_rate": 7.75248408405934e-06, |
|
"loss": 0.9865, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.617255621844883, |
|
"grad_norm": 1.7256883752598309, |
|
"learning_rate": 7.674504537597336e-06, |
|
"loss": 0.9883, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.6195502524093621, |
|
"grad_norm": 1.7555572901812775, |
|
"learning_rate": 7.596674201070282e-06, |
|
"loss": 0.9953, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6218448829738412, |
|
"grad_norm": 1.7549420037924819, |
|
"learning_rate": 7.518998068278266e-06, |
|
"loss": 0.9761, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.6241395135383203, |
|
"grad_norm": 1.7638758955771752, |
|
"learning_rate": 7.441481123127257e-06, |
|
"loss": 0.9663, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.6264341441027994, |
|
"grad_norm": 1.9316469572623007, |
|
"learning_rate": 7.364128339309326e-06, |
|
"loss": 0.9912, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.6287287746672786, |
|
"grad_norm": 1.7094849210033014, |
|
"learning_rate": 7.286944679983521e-06, |
|
"loss": 0.9759, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.6310234052317577, |
|
"grad_norm": 1.6887590729536133, |
|
"learning_rate": 7.209935097457412e-06, |
|
"loss": 0.9899, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.6333180357962368, |
|
"grad_norm": 1.7727590940094575, |
|
"learning_rate": 7.133104532869342e-06, |
|
"loss": 1.0, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6356126663607159, |
|
"grad_norm": 1.8172481765220854, |
|
"learning_rate": 7.056457915871399e-06, |
|
"loss": 0.9663, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.637907296925195, |
|
"grad_norm": 1.7974312188733144, |
|
"learning_rate": 6.980000164313093e-06, |
|
"loss": 0.9586, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.6402019274896742, |
|
"grad_norm": 1.7508328675764726, |
|
"learning_rate": 6.903736183925835e-06, |
|
"loss": 0.9719, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.6424965580541533, |
|
"grad_norm": 1.7326843284737148, |
|
"learning_rate": 6.82767086800817e-06, |
|
"loss": 0.9961, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6447911886186324, |
|
"grad_norm": 1.7352259185492422, |
|
"learning_rate": 6.751809097111799e-06, |
|
"loss": 0.997, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.6470858191831115, |
|
"grad_norm": 1.8933368018227825, |
|
"learning_rate": 6.676155738728438e-06, |
|
"loss": 0.9779, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.6493804497475907, |
|
"grad_norm": 1.6974498786941927, |
|
"learning_rate": 6.600715646977503e-06, |
|
"loss": 0.9799, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.6516750803120698, |
|
"grad_norm": 1.7806592546745614, |
|
"learning_rate": 6.525493662294669e-06, |
|
"loss": 0.9718, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.6539697108765489, |
|
"grad_norm": 1.7658701508098122, |
|
"learning_rate": 6.450494611121274e-06, |
|
"loss": 0.9852, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.656264341441028, |
|
"grad_norm": 1.721774620040715, |
|
"learning_rate": 6.375723305594658e-06, |
|
"loss": 0.9766, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.6585589720055071, |
|
"grad_norm": 1.8456782805007963, |
|
"learning_rate": 6.301184543239398e-06, |
|
"loss": 0.9604, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.6608536025699863, |
|
"grad_norm": 1.6517747886851708, |
|
"learning_rate": 6.2268831066594846e-06, |
|
"loss": 0.9978, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.6631482331344654, |
|
"grad_norm": 1.7868240633431423, |
|
"learning_rate": 6.152823763231463e-06, |
|
"loss": 0.9659, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.6654428636989445, |
|
"grad_norm": 1.6864295399871028, |
|
"learning_rate": 6.079011264798534e-06, |
|
"loss": 0.97, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.6677374942634235, |
|
"grad_norm": 1.7298790044434966, |
|
"learning_rate": 6.005450347365687e-06, |
|
"loss": 0.9807, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.6700321248279028, |
|
"grad_norm": 1.7628376564735946, |
|
"learning_rate": 5.932145730795793e-06, |
|
"loss": 0.9686, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.6723267553923818, |
|
"grad_norm": 1.7343262868098874, |
|
"learning_rate": 5.859102118506787e-06, |
|
"loss": 0.9524, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.6746213859568609, |
|
"grad_norm": 1.9084160680148732, |
|
"learning_rate": 5.786324197169887e-06, |
|
"loss": 0.9818, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.67691601652134, |
|
"grad_norm": 1.735061984384823, |
|
"learning_rate": 5.713816636408871e-06, |
|
"loss": 0.9673, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.6792106470858191, |
|
"grad_norm": 1.7982234436842786, |
|
"learning_rate": 5.641584088500461e-06, |
|
"loss": 0.9694, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6815052776502983, |
|
"grad_norm": 1.7104886881206862, |
|
"learning_rate": 5.569631188075842e-06, |
|
"loss": 0.9638, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.6837999082147774, |
|
"grad_norm": 1.7069667971132039, |
|
"learning_rate": 5.497962551823266e-06, |
|
"loss": 0.9746, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.6860945387792565, |
|
"grad_norm": 2.380253232983682, |
|
"learning_rate": 5.426582778191858e-06, |
|
"loss": 0.9898, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.6883891693437356, |
|
"grad_norm": 1.789361116223533, |
|
"learning_rate": 5.355496447096533e-06, |
|
"loss": 0.9813, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6906837999082148, |
|
"grad_norm": 1.7257448134853217, |
|
"learning_rate": 5.284708119624173e-06, |
|
"loss": 0.9897, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.6929784304726939, |
|
"grad_norm": 1.695066389007114, |
|
"learning_rate": 5.2142223377409616e-06, |
|
"loss": 0.9662, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.695273061037173, |
|
"grad_norm": 1.8265038779545326, |
|
"learning_rate": 5.144043624000944e-06, |
|
"loss": 0.9221, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.6975676916016521, |
|
"grad_norm": 1.682425505945564, |
|
"learning_rate": 5.074176481255873e-06, |
|
"loss": 0.9665, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6998623221661312, |
|
"grad_norm": 1.6947722881421843, |
|
"learning_rate": 5.00462539236628e-06, |
|
"loss": 0.9735, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.7021569527306104, |
|
"grad_norm": 2.0862442184881926, |
|
"learning_rate": 4.935394819913849e-06, |
|
"loss": 0.9462, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.7044515832950895, |
|
"grad_norm": 1.8473532390280518, |
|
"learning_rate": 4.866489205915072e-06, |
|
"loss": 0.9699, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.7067462138595686, |
|
"grad_norm": 1.748002794088265, |
|
"learning_rate": 4.7979129715362625e-06, |
|
"loss": 1.0002, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.7090408444240477, |
|
"grad_norm": 1.70600748304509, |
|
"learning_rate": 4.72967051680985e-06, |
|
"loss": 0.9638, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.7113354749885269, |
|
"grad_norm": 1.6249584663789558, |
|
"learning_rate": 4.661766220352098e-06, |
|
"loss": 0.9414, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.713630105553006, |
|
"grad_norm": 1.80174378572434, |
|
"learning_rate": 4.594204439082122e-06, |
|
"loss": 0.9727, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.7159247361174851, |
|
"grad_norm": 1.8374638729274564, |
|
"learning_rate": 4.526989507942374e-06, |
|
"loss": 0.9565, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.7182193666819642, |
|
"grad_norm": 1.6726191265269426, |
|
"learning_rate": 4.460125739620479e-06, |
|
"loss": 0.9487, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.7205139972464433, |
|
"grad_norm": 1.8264345398924386, |
|
"learning_rate": 4.393617424272527e-06, |
|
"loss": 0.9636, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.7228086278109225, |
|
"grad_norm": 1.6887334363300377, |
|
"learning_rate": 4.3274688292478105e-06, |
|
"loss": 0.961, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.7251032583754016, |
|
"grad_norm": 1.8470657094730811, |
|
"learning_rate": 4.261684198815004e-06, |
|
"loss": 0.9694, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.7273978889398807, |
|
"grad_norm": 1.7132408618736092, |
|
"learning_rate": 4.196267753889864e-06, |
|
"loss": 0.9581, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.7296925195043598, |
|
"grad_norm": 1.6900030303741553, |
|
"learning_rate": 4.131223691764384e-06, |
|
"loss": 0.9325, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.731987150068839, |
|
"grad_norm": 1.7522033167073814, |
|
"learning_rate": 4.066556185837494e-06, |
|
"loss": 0.9347, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.7342817806333181, |
|
"grad_norm": 1.755668968211341, |
|
"learning_rate": 4.002269385347289e-06, |
|
"loss": 0.9256, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7365764111977972, |
|
"grad_norm": 1.713678331254988, |
|
"learning_rate": 3.9383674151047936e-06, |
|
"loss": 0.975, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.7388710417622762, |
|
"grad_norm": 1.6865904408801047, |
|
"learning_rate": 3.8748543752293e-06, |
|
"loss": 0.9729, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.7411656723267553, |
|
"grad_norm": 1.6694830205166722, |
|
"learning_rate": 3.8117343408853124e-06, |
|
"loss": 0.9554, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.7434603028912345, |
|
"grad_norm": 1.6703972771704265, |
|
"learning_rate": 3.7490113620210487e-06, |
|
"loss": 0.9297, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.7457549334557136, |
|
"grad_norm": 1.7865359308285997, |
|
"learning_rate": 3.686689463108608e-06, |
|
"loss": 0.952, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.7480495640201927, |
|
"grad_norm": 1.7448768340578766, |
|
"learning_rate": 3.6247726428857344e-06, |
|
"loss": 0.9286, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.7503441945846718, |
|
"grad_norm": 1.7530030106253895, |
|
"learning_rate": 3.563264874099258e-06, |
|
"loss": 0.9524, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.752638825149151, |
|
"grad_norm": 1.653777443656037, |
|
"learning_rate": 3.5021701032501777e-06, |
|
"loss": 0.9218, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.7549334557136301, |
|
"grad_norm": 3.331790675502453, |
|
"learning_rate": 3.441492250340461e-06, |
|
"loss": 0.9453, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.7572280862781092, |
|
"grad_norm": 1.7244365474975045, |
|
"learning_rate": 3.3812352086215216e-06, |
|
"loss": 0.9374, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.7595227168425883, |
|
"grad_norm": 1.766304020059291, |
|
"learning_rate": 3.3214028443444034e-06, |
|
"loss": 0.9394, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.7618173474070674, |
|
"grad_norm": 1.7263700470016823, |
|
"learning_rate": 3.261998996511736e-06, |
|
"loss": 0.9371, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.7641119779715466, |
|
"grad_norm": 1.7511251400040453, |
|
"learning_rate": 3.203027476631386e-06, |
|
"loss": 0.9535, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.7664066085360257, |
|
"grad_norm": 1.7194345747840845, |
|
"learning_rate": 3.1444920684719394e-06, |
|
"loss": 0.9399, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.7687012391005048, |
|
"grad_norm": 1.7100670269940494, |
|
"learning_rate": 3.086396527819876e-06, |
|
"loss": 0.9311, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.7709958696649839, |
|
"grad_norm": 1.7008517044769462, |
|
"learning_rate": 3.028744582238633e-06, |
|
"loss": 0.9418, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.7732905002294631, |
|
"grad_norm": 1.7289960064005703, |
|
"learning_rate": 2.9715399308294003e-06, |
|
"loss": 0.9748, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.7755851307939422, |
|
"grad_norm": 1.7377650207700148, |
|
"learning_rate": 2.914786243993808e-06, |
|
"loss": 0.936, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.7778797613584213, |
|
"grad_norm": 1.8858822369634092, |
|
"learning_rate": 2.858487163198389e-06, |
|
"loss": 0.9358, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.7801743919229004, |
|
"grad_norm": 1.7588532610850607, |
|
"learning_rate": 2.8026463007409665e-06, |
|
"loss": 0.9399, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7824690224873795, |
|
"grad_norm": 1.8795956958488933, |
|
"learning_rate": 2.747267239518857e-06, |
|
"loss": 0.9642, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.7847636530518587, |
|
"grad_norm": 1.733996923587813, |
|
"learning_rate": 2.6923535327989925e-06, |
|
"loss": 0.9447, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.7870582836163378, |
|
"grad_norm": 1.7231127716770163, |
|
"learning_rate": 2.637908703989924e-06, |
|
"loss": 0.9615, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.7893529141808169, |
|
"grad_norm": 1.7382322874229845, |
|
"learning_rate": 2.5839362464157635e-06, |
|
"loss": 0.9523, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.791647544745296, |
|
"grad_norm": 1.76018582887562, |
|
"learning_rate": 2.5304396230920346e-06, |
|
"loss": 0.9661, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.7939421753097752, |
|
"grad_norm": 1.6962974121939103, |
|
"learning_rate": 2.477422266503473e-06, |
|
"loss": 0.9491, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.7962368058742543, |
|
"grad_norm": 1.6864869043430553, |
|
"learning_rate": 2.424887578383799e-06, |
|
"loss": 0.9495, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.7985314364387334, |
|
"grad_norm": 1.7487461110618132, |
|
"learning_rate": 2.3728389294974472e-06, |
|
"loss": 0.9497, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.8008260670032125, |
|
"grad_norm": 3.804635705666014, |
|
"learning_rate": 2.3212796594232947e-06, |
|
"loss": 0.9633, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.8031206975676916, |
|
"grad_norm": 1.7104227286473583, |
|
"learning_rate": 2.2702130763403674e-06, |
|
"loss": 0.9402, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.8054153281321708, |
|
"grad_norm": 1.703649935060647, |
|
"learning_rate": 2.2196424568156073e-06, |
|
"loss": 0.9493, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.8077099586966499, |
|
"grad_norm": 1.731876264748493, |
|
"learning_rate": 2.1695710455936115e-06, |
|
"loss": 0.949, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.810004589261129, |
|
"grad_norm": 1.6008487931711857, |
|
"learning_rate": 2.1200020553884603e-06, |
|
"loss": 0.931, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.812299219825608, |
|
"grad_norm": 1.7485750666616973, |
|
"learning_rate": 2.0709386666775732e-06, |
|
"loss": 0.9473, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.8145938503900872, |
|
"grad_norm": 1.907625017544001, |
|
"learning_rate": 2.0223840274976413e-06, |
|
"loss": 0.9921, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.8168884809545663, |
|
"grad_norm": 1.7187402463869024, |
|
"learning_rate": 1.9743412532426355e-06, |
|
"loss": 0.9147, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.8191831115190454, |
|
"grad_norm": 1.6617657428032502, |
|
"learning_rate": 1.9268134264639273e-06, |
|
"loss": 0.9457, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.8214777420835245, |
|
"grad_norm": 1.7007750210249686, |
|
"learning_rate": 1.879803596672497e-06, |
|
"loss": 0.9665, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.8237723726480036, |
|
"grad_norm": 1.7101753911626782, |
|
"learning_rate": 1.8333147801432616e-06, |
|
"loss": 0.929, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.8260670032124828, |
|
"grad_norm": 1.774895067675658, |
|
"learning_rate": 1.7873499597215604e-06, |
|
"loss": 0.9205, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8283616337769619, |
|
"grad_norm": 1.7257248036119928, |
|
"learning_rate": 1.7419120846317462e-06, |
|
"loss": 0.9482, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.830656264341441, |
|
"grad_norm": 1.7250389140740945, |
|
"learning_rate": 1.697004070287982e-06, |
|
"loss": 0.907, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.8329508949059201, |
|
"grad_norm": 1.6576292112238928, |
|
"learning_rate": 1.6526287981071477e-06, |
|
"loss": 0.9169, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.8352455254703993, |
|
"grad_norm": 1.6244943528548752, |
|
"learning_rate": 1.6087891153239932e-06, |
|
"loss": 0.9707, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.8375401560348784, |
|
"grad_norm": 1.7684196725742007, |
|
"learning_rate": 1.5654878348084246e-06, |
|
"loss": 0.9597, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.8398347865993575, |
|
"grad_norm": 1.7498743654009963, |
|
"learning_rate": 1.5227277348850466e-06, |
|
"loss": 0.9438, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.8421294171638366, |
|
"grad_norm": 1.666533412682978, |
|
"learning_rate": 1.4805115591548746e-06, |
|
"loss": 0.9234, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.8444240477283157, |
|
"grad_norm": 1.6784896812471775, |
|
"learning_rate": 1.4388420163193217e-06, |
|
"loss": 0.9228, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.8467186782927949, |
|
"grad_norm": 1.782913468726149, |
|
"learning_rate": 1.3977217800063847e-06, |
|
"loss": 0.9525, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.849013308857274, |
|
"grad_norm": 1.8714100640537485, |
|
"learning_rate": 1.3571534885991044e-06, |
|
"loss": 0.9202, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.8513079394217531, |
|
"grad_norm": 1.6341170338063304, |
|
"learning_rate": 1.3171397450662716e-06, |
|
"loss": 0.9324, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.8536025699862322, |
|
"grad_norm": 1.7569629604181838, |
|
"learning_rate": 1.2776831167954252e-06, |
|
"loss": 0.92, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.8558972005507114, |
|
"grad_norm": 1.7383544701565197, |
|
"learning_rate": 1.2387861354281194e-06, |
|
"loss": 0.9415, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.8581918311151905, |
|
"grad_norm": 1.6322549841517964, |
|
"learning_rate": 1.2004512966974746e-06, |
|
"loss": 0.9377, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.8604864616796696, |
|
"grad_norm": 1.6131611632532987, |
|
"learning_rate": 1.162681060268065e-06, |
|
"loss": 0.9339, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.8627810922441487, |
|
"grad_norm": 1.825918080886602, |
|
"learning_rate": 1.1254778495780749e-06, |
|
"loss": 0.9435, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.8650757228086278, |
|
"grad_norm": 1.7362620117149066, |
|
"learning_rate": 1.0888440516838373e-06, |
|
"loss": 0.9375, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.867370353373107, |
|
"grad_norm": 1.6924752721498857, |
|
"learning_rate": 1.0527820171066372e-06, |
|
"loss": 0.9518, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.8696649839375861, |
|
"grad_norm": 1.7645375725424164, |
|
"learning_rate": 1.0172940596819258e-06, |
|
"loss": 0.9105, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.8719596145020652, |
|
"grad_norm": 1.7445241378126046, |
|
"learning_rate": 9.823824564108408e-07, |
|
"loss": 0.9534, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.8742542450665443, |
|
"grad_norm": 1.6812050382224737, |
|
"learning_rate": 9.480494473141189e-07, |
|
"loss": 0.9513, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.8765488756310233, |
|
"grad_norm": 1.7180889119146323, |
|
"learning_rate": 9.142972352883595e-07, |
|
"loss": 0.9352, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.8788435061955026, |
|
"grad_norm": 1.593136362005407, |
|
"learning_rate": 8.811279859646915e-07, |
|
"loss": 0.9135, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.8811381367599816, |
|
"grad_norm": 1.732265054877238, |
|
"learning_rate": 8.485438275698154e-07, |
|
"loss": 0.9325, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.8834327673244607, |
|
"grad_norm": 1.6465162040573431, |
|
"learning_rate": 8.165468507894514e-07, |
|
"loss": 0.9363, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.8857273978889398, |
|
"grad_norm": 1.5926308744021644, |
|
"learning_rate": 7.851391086341953e-07, |
|
"loss": 0.9251, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.888022028453419, |
|
"grad_norm": 1.7096508943146629, |
|
"learning_rate": 7.543226163077899e-07, |
|
"loss": 0.9471, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.8903166590178981, |
|
"grad_norm": 1.803335599460816, |
|
"learning_rate": 7.240993510778304e-07, |
|
"loss": 0.9328, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.8926112895823772, |
|
"grad_norm": 1.7212998187307902, |
|
"learning_rate": 6.944712521488884e-07, |
|
"loss": 0.9479, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.8949059201468563, |
|
"grad_norm": 1.735482839392802, |
|
"learning_rate": 6.654402205380961e-07, |
|
"loss": 0.9541, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.8972005507113354, |
|
"grad_norm": 1.65097416177555, |
|
"learning_rate": 6.370081189531707e-07, |
|
"loss": 0.9132, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.8994951812758146, |
|
"grad_norm": 1.7196384780834688, |
|
"learning_rate": 6.091767716728924e-07, |
|
"loss": 0.9341, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.9017898118402937, |
|
"grad_norm": 1.8341416005690085, |
|
"learning_rate": 5.819479644300563e-07, |
|
"loss": 0.928, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.9040844424047728, |
|
"grad_norm": 1.7371545663059045, |
|
"learning_rate": 5.553234442969014e-07, |
|
"loss": 0.9311, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.9063790729692519, |
|
"grad_norm": 1.6781199455045805, |
|
"learning_rate": 5.293049195730038e-07, |
|
"loss": 0.9394, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.9086737035337311, |
|
"grad_norm": 1.7582513777384394, |
|
"learning_rate": 5.038940596756747e-07, |
|
"loss": 0.9495, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.9109683340982102, |
|
"grad_norm": 1.6554350150475086, |
|
"learning_rate": 4.790924950328435e-07, |
|
"loss": 0.9491, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.9132629646626893, |
|
"grad_norm": 1.5696628957382233, |
|
"learning_rate": 4.5490181697844916e-07, |
|
"loss": 0.9047, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.9155575952271684, |
|
"grad_norm": 1.660006052531741, |
|
"learning_rate": 4.313235776503244e-07, |
|
"loss": 0.9266, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.9178522257916475, |
|
"grad_norm": 1.6789601308304245, |
|
"learning_rate": 4.08359289890623e-07, |
|
"loss": 0.9356, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9201468563561267, |
|
"grad_norm": 1.6938274156881534, |
|
"learning_rate": 3.860104271487397e-07, |
|
"loss": 0.9501, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.9224414869206058, |
|
"grad_norm": 1.6613669924815342, |
|
"learning_rate": 3.6427842338677353e-07, |
|
"loss": 0.9178, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.9247361174850849, |
|
"grad_norm": 1.6863633601389076, |
|
"learning_rate": 3.4316467298752264e-07, |
|
"loss": 0.9283, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.927030748049564, |
|
"grad_norm": 1.6704738495679314, |
|
"learning_rate": 3.226705306650113e-07, |
|
"loss": 0.9397, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.9293253786140432, |
|
"grad_norm": 1.6915649797171424, |
|
"learning_rate": 3.027973113775795e-07, |
|
"loss": 0.9162, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.9316200091785223, |
|
"grad_norm": 1.624516748205876, |
|
"learning_rate": 2.835462902434971e-07, |
|
"loss": 0.9088, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.9339146397430014, |
|
"grad_norm": 1.7321228286245276, |
|
"learning_rate": 2.649187024591604e-07, |
|
"loss": 0.9291, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.9362092703074805, |
|
"grad_norm": 1.6981737432031856, |
|
"learning_rate": 2.4691574321983216e-07, |
|
"loss": 0.9455, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.9385039008719596, |
|
"grad_norm": 1.6209611667412176, |
|
"learning_rate": 2.2953856764295623e-07, |
|
"loss": 0.9427, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.9407985314364388, |
|
"grad_norm": 1.6075798817603826, |
|
"learning_rate": 2.1278829069404483e-07, |
|
"loss": 0.8938, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.9430931620009179, |
|
"grad_norm": 1.6694215956258427, |
|
"learning_rate": 1.9666598711513663e-07, |
|
"loss": 0.9425, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.945387792565397, |
|
"grad_norm": 1.6380060844501796, |
|
"learning_rate": 1.811726913558387e-07, |
|
"loss": 0.9344, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.947682423129876, |
|
"grad_norm": 1.7361661814337723, |
|
"learning_rate": 1.663093975069552e-07, |
|
"loss": 0.9264, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.9499770536943553, |
|
"grad_norm": 1.72352480478006, |
|
"learning_rate": 1.5207705923670158e-07, |
|
"loss": 0.9274, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.9522716842588343, |
|
"grad_norm": 1.7260384890222744, |
|
"learning_rate": 1.3847658972951482e-07, |
|
"loss": 0.9416, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.9545663148233134, |
|
"grad_norm": 1.678840542944468, |
|
"learning_rate": 1.2550886162746468e-07, |
|
"loss": 0.9476, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.9568609453877925, |
|
"grad_norm": 1.7327761448558552, |
|
"learning_rate": 1.1317470697425837e-07, |
|
"loss": 0.9433, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.9591555759522716, |
|
"grad_norm": 1.6188397369323666, |
|
"learning_rate": 1.0147491716185675e-07, |
|
"loss": 0.9306, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.9614502065167508, |
|
"grad_norm": 1.7303211630716537, |
|
"learning_rate": 9.041024287969491e-08, |
|
"loss": 0.951, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.9637448370812299, |
|
"grad_norm": 1.6406456484197742, |
|
"learning_rate": 7.99813940665195e-08, |
|
"loss": 0.9414, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.966039467645709, |
|
"grad_norm": 1.742920472563737, |
|
"learning_rate": 7.018903986483083e-08, |
|
"loss": 0.9331, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.9683340982101881, |
|
"grad_norm": 1.7699514892789145, |
|
"learning_rate": 6.103380857795604e-08, |
|
"loss": 0.9458, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.9706287287746673, |
|
"grad_norm": 1.6207392453935818, |
|
"learning_rate": 5.251628762972916e-08, |
|
"loss": 0.9286, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.9729233593391464, |
|
"grad_norm": 1.6654831370842644, |
|
"learning_rate": 4.4637023526807875e-08, |
|
"loss": 0.9207, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.9752179899036255, |
|
"grad_norm": 1.6652739914078956, |
|
"learning_rate": 3.739652182360054e-08, |
|
"loss": 0.9153, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.9775126204681046, |
|
"grad_norm": 1.7380457873464283, |
|
"learning_rate": 3.079524708983095e-08, |
|
"loss": 0.9193, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.9798072510325837, |
|
"grad_norm": 1.7928242828367038, |
|
"learning_rate": 2.483362288073443e-08, |
|
"loss": 0.9258, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.9821018815970629, |
|
"grad_norm": 1.6588826041799296, |
|
"learning_rate": 1.9512031709874037e-08, |
|
"loss": 0.9275, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.984396512161542, |
|
"grad_norm": 1.763891394623808, |
|
"learning_rate": 1.4830815024606815e-08, |
|
"loss": 0.9211, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.9866911427260211, |
|
"grad_norm": 1.782131811620528, |
|
"learning_rate": 1.0790273184164701e-08, |
|
"loss": 0.8983, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.9889857732905002, |
|
"grad_norm": 2.0241090268732402, |
|
"learning_rate": 7.390665440393241e-09, |
|
"loss": 0.9335, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.9912804038549794, |
|
"grad_norm": 1.6899996297251758, |
|
"learning_rate": 4.632209921107133e-09, |
|
"loss": 0.9444, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.9935750344194585, |
|
"grad_norm": 1.6477330125103407, |
|
"learning_rate": 2.5150836161058624e-09, |
|
"loss": 0.9399, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.9958696649839376, |
|
"grad_norm": 1.7301015675243123, |
|
"learning_rate": 1.03942236580723e-09, |
|
"loss": 0.9332, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.9981642955484167, |
|
"grad_norm": 1.7014766333026237, |
|
"learning_rate": 2.053208525365502e-10, |
|
"loss": 0.9409, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9490520358085632, |
|
"eval_runtime": 758.2182, |
|
"eval_samples_per_second": 20.352, |
|
"eval_steps_per_second": 0.637, |
|
"step": 2179 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2179, |
|
"total_flos": 456238269726720.0, |
|
"train_loss": 1.0045825210718344, |
|
"train_runtime": 27286.1988, |
|
"train_samples_per_second": 5.11, |
|
"train_steps_per_second": 0.08 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2179, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 456238269726720.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|