|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 3710, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.026954177897574125, |
|
"grad_norm": 93.09680938720703, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 5.4416, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05390835579514825, |
|
"grad_norm": 1869.5386962890625, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 4.4275, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08086253369272237, |
|
"grad_norm": 29.11223602294922, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 2.2122, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1078167115902965, |
|
"grad_norm": 45.186805725097656, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 1.9827, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1347708894878706, |
|
"grad_norm": 88.7030029296875, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 1.9525, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16172506738544473, |
|
"grad_norm": 11.193861961364746, |
|
"learning_rate": 4.615384615384616e-05, |
|
"loss": 1.401, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18867924528301888, |
|
"grad_norm": 1.629717469215393, |
|
"learning_rate": 4.993141289437586e-05, |
|
"loss": 1.4541, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.215633423180593, |
|
"grad_norm": 6.375326156616211, |
|
"learning_rate": 4.9794238683127575e-05, |
|
"loss": 1.2295, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24258760107816713, |
|
"grad_norm": 2.1516671180725098, |
|
"learning_rate": 4.965706447187929e-05, |
|
"loss": 1.8626, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2695417789757412, |
|
"grad_norm": 2.126923084259033, |
|
"learning_rate": 4.9519890260631e-05, |
|
"loss": 1.5149, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.29649595687331537, |
|
"grad_norm": 3.0875656604766846, |
|
"learning_rate": 4.938271604938271e-05, |
|
"loss": 1.7612, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.32345013477088946, |
|
"grad_norm": 3.2731056213378906, |
|
"learning_rate": 4.924554183813443e-05, |
|
"loss": 1.1813, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3504043126684636, |
|
"grad_norm": 1.5794568061828613, |
|
"learning_rate": 4.9108367626886145e-05, |
|
"loss": 1.2228, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.37735849056603776, |
|
"grad_norm": 1.0991185903549194, |
|
"learning_rate": 4.8971193415637865e-05, |
|
"loss": 2.1294, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.40431266846361186, |
|
"grad_norm": 0.7588101029396057, |
|
"learning_rate": 4.883401920438958e-05, |
|
"loss": 1.6584, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.431266846361186, |
|
"grad_norm": 0.6242086887359619, |
|
"learning_rate": 4.86968449931413e-05, |
|
"loss": 1.1689, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4582210242587601, |
|
"grad_norm": 5.792896270751953, |
|
"learning_rate": 4.855967078189301e-05, |
|
"loss": 1.3696, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.48517520215633425, |
|
"grad_norm": 2.379605770111084, |
|
"learning_rate": 4.842249657064472e-05, |
|
"loss": 1.2535, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5121293800539084, |
|
"grad_norm": 0.7756444811820984, |
|
"learning_rate": 4.8285322359396435e-05, |
|
"loss": 1.2733, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5390835579514824, |
|
"grad_norm": 0.9577376246452332, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 1.366, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5660377358490566, |
|
"grad_norm": 0.7913112640380859, |
|
"learning_rate": 4.801097393689987e-05, |
|
"loss": 0.8251, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5929919137466307, |
|
"grad_norm": 1.1061924695968628, |
|
"learning_rate": 4.787379972565158e-05, |
|
"loss": 1.0164, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6199460916442049, |
|
"grad_norm": 0.854999840259552, |
|
"learning_rate": 4.773662551440329e-05, |
|
"loss": 1.2698, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6469002695417789, |
|
"grad_norm": 0.9977573156356812, |
|
"learning_rate": 4.7599451303155006e-05, |
|
"loss": 1.2353, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6738544474393531, |
|
"grad_norm": 1.3502521514892578, |
|
"learning_rate": 4.7462277091906725e-05, |
|
"loss": 1.3206, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7008086253369272, |
|
"grad_norm": 0.7636290192604065, |
|
"learning_rate": 4.732510288065844e-05, |
|
"loss": 1.2825, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7277628032345014, |
|
"grad_norm": 1.700835108757019, |
|
"learning_rate": 4.718792866941015e-05, |
|
"loss": 1.0656, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7547169811320755, |
|
"grad_norm": 0.7844202518463135, |
|
"learning_rate": 4.7050754458161864e-05, |
|
"loss": 1.4337, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7816711590296496, |
|
"grad_norm": 2.4619789123535156, |
|
"learning_rate": 4.691358024691358e-05, |
|
"loss": 1.2912, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8086253369272237, |
|
"grad_norm": 2.110649347305298, |
|
"learning_rate": 4.6776406035665296e-05, |
|
"loss": 1.1539, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8355795148247979, |
|
"grad_norm": 1.690564751625061, |
|
"learning_rate": 4.6639231824417016e-05, |
|
"loss": 1.8889, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.862533692722372, |
|
"grad_norm": 2.4102346897125244, |
|
"learning_rate": 4.650205761316873e-05, |
|
"loss": 0.7576, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.889487870619946, |
|
"grad_norm": 1.0781276226043701, |
|
"learning_rate": 4.636488340192044e-05, |
|
"loss": 1.167, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9164420485175202, |
|
"grad_norm": 1.4452636241912842, |
|
"learning_rate": 4.622770919067216e-05, |
|
"loss": 1.4974, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9433962264150944, |
|
"grad_norm": 0.7777872681617737, |
|
"learning_rate": 4.609053497942387e-05, |
|
"loss": 0.7603, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9703504043126685, |
|
"grad_norm": 1.9176746606826782, |
|
"learning_rate": 4.5953360768175586e-05, |
|
"loss": 1.2179, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9973045822102425, |
|
"grad_norm": 0.7132334113121033, |
|
"learning_rate": 4.58161865569273e-05, |
|
"loss": 1.3606, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0242587601078168, |
|
"grad_norm": 2.0499353408813477, |
|
"learning_rate": 4.567901234567901e-05, |
|
"loss": 1.5535, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0512129380053907, |
|
"grad_norm": 1.237725019454956, |
|
"learning_rate": 4.554183813443073e-05, |
|
"loss": 1.2777, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.0781671159029649, |
|
"grad_norm": 0.8941486477851868, |
|
"learning_rate": 4.5404663923182444e-05, |
|
"loss": 1.098, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.105121293800539, |
|
"grad_norm": 3.052698850631714, |
|
"learning_rate": 4.5267489711934157e-05, |
|
"loss": 0.8203, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1320754716981132, |
|
"grad_norm": 1.9386261701583862, |
|
"learning_rate": 4.513031550068587e-05, |
|
"loss": 1.3465, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1590296495956873, |
|
"grad_norm": 1.5312304496765137, |
|
"learning_rate": 4.499314128943759e-05, |
|
"loss": 1.5988, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.1859838274932615, |
|
"grad_norm": 1.5553970336914062, |
|
"learning_rate": 4.48559670781893e-05, |
|
"loss": 1.3158, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2129380053908356, |
|
"grad_norm": 0.9215840101242065, |
|
"learning_rate": 4.4718792866941014e-05, |
|
"loss": 1.1357, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.2398921832884098, |
|
"grad_norm": 0.8981826901435852, |
|
"learning_rate": 4.4581618655692734e-05, |
|
"loss": 1.4697, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.266846361185984, |
|
"grad_norm": 1.3505191802978516, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 1.3321, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.2938005390835579, |
|
"grad_norm": 1.836127758026123, |
|
"learning_rate": 4.4307270233196166e-05, |
|
"loss": 0.7458, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.320754716981132, |
|
"grad_norm": 1.3519006967544556, |
|
"learning_rate": 4.417009602194788e-05, |
|
"loss": 0.9444, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.3477088948787062, |
|
"grad_norm": 0.8336719870567322, |
|
"learning_rate": 4.403292181069959e-05, |
|
"loss": 1.2264, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.3746630727762803, |
|
"grad_norm": 2.3419370651245117, |
|
"learning_rate": 4.3895747599451304e-05, |
|
"loss": 1.4784, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.4016172506738545, |
|
"grad_norm": 1.679734468460083, |
|
"learning_rate": 4.3758573388203024e-05, |
|
"loss": 0.9167, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.7462900876998901, |
|
"learning_rate": 4.3621399176954737e-05, |
|
"loss": 0.8118, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.4555256064690028, |
|
"grad_norm": 1.11771559715271, |
|
"learning_rate": 4.348422496570645e-05, |
|
"loss": 1.2103, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.482479784366577, |
|
"grad_norm": 1.0246716737747192, |
|
"learning_rate": 4.334705075445816e-05, |
|
"loss": 0.9964, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.509433962264151, |
|
"grad_norm": 1.9072849750518799, |
|
"learning_rate": 4.3209876543209875e-05, |
|
"loss": 0.9468, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.536388140161725, |
|
"grad_norm": 2.465437889099121, |
|
"learning_rate": 4.3072702331961594e-05, |
|
"loss": 1.0798, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.5633423180592994, |
|
"grad_norm": 2.021115779876709, |
|
"learning_rate": 4.293552812071331e-05, |
|
"loss": 1.5825, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.5902964959568733, |
|
"grad_norm": 1.403136968612671, |
|
"learning_rate": 4.279835390946502e-05, |
|
"loss": 1.0247, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.6172506738544474, |
|
"grad_norm": 2.3880553245544434, |
|
"learning_rate": 4.266117969821673e-05, |
|
"loss": 0.7598, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.6442048517520216, |
|
"grad_norm": 1.278221607208252, |
|
"learning_rate": 4.252400548696845e-05, |
|
"loss": 1.2538, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.6711590296495957, |
|
"grad_norm": 1.4277830123901367, |
|
"learning_rate": 4.2386831275720165e-05, |
|
"loss": 1.0456, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.6981132075471699, |
|
"grad_norm": 1.370888113975525, |
|
"learning_rate": 4.2249657064471884e-05, |
|
"loss": 1.1437, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.7250673854447438, |
|
"grad_norm": 1.2015599012374878, |
|
"learning_rate": 4.21124828532236e-05, |
|
"loss": 1.309, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.7520215633423182, |
|
"grad_norm": 0.6258445978164673, |
|
"learning_rate": 4.197530864197531e-05, |
|
"loss": 0.7159, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.778975741239892, |
|
"grad_norm": 1.4056499004364014, |
|
"learning_rate": 4.183813443072703e-05, |
|
"loss": 0.9217, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.8059299191374663, |
|
"grad_norm": 2.2113542556762695, |
|
"learning_rate": 4.170096021947874e-05, |
|
"loss": 0.9003, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.8328840970350404, |
|
"grad_norm": 1.0116914510726929, |
|
"learning_rate": 4.1563786008230455e-05, |
|
"loss": 1.0246, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.8598382749326146, |
|
"grad_norm": 1.2756783962249756, |
|
"learning_rate": 4.142661179698217e-05, |
|
"loss": 1.2909, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.8867924528301887, |
|
"grad_norm": 1.2654672861099243, |
|
"learning_rate": 4.128943758573389e-05, |
|
"loss": 1.2136, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.9137466307277629, |
|
"grad_norm": 0.7074457406997681, |
|
"learning_rate": 4.11522633744856e-05, |
|
"loss": 1.4202, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.940700808625337, |
|
"grad_norm": 2.5393640995025635, |
|
"learning_rate": 4.101508916323731e-05, |
|
"loss": 0.959, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.967654986522911, |
|
"grad_norm": 1.194204330444336, |
|
"learning_rate": 4.0877914951989025e-05, |
|
"loss": 0.7979, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.9946091644204853, |
|
"grad_norm": 2.2830913066864014, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 0.9024, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.0215633423180592, |
|
"grad_norm": 1.4763576984405518, |
|
"learning_rate": 4.060356652949246e-05, |
|
"loss": 0.8169, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.0485175202156336, |
|
"grad_norm": 2.3150315284729004, |
|
"learning_rate": 4.046639231824417e-05, |
|
"loss": 1.1729, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.0754716981132075, |
|
"grad_norm": 1.5322928428649902, |
|
"learning_rate": 4.032921810699588e-05, |
|
"loss": 0.8163, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.1024258760107815, |
|
"grad_norm": 1.3889187574386597, |
|
"learning_rate": 4.01920438957476e-05, |
|
"loss": 1.1553, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.129380053908356, |
|
"grad_norm": 1.2974004745483398, |
|
"learning_rate": 4.0054869684499315e-05, |
|
"loss": 1.5599, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.1563342318059298, |
|
"grad_norm": 1.3070930242538452, |
|
"learning_rate": 3.9917695473251035e-05, |
|
"loss": 0.8604, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.183288409703504, |
|
"grad_norm": 2.8242597579956055, |
|
"learning_rate": 3.978052126200275e-05, |
|
"loss": 1.1088, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.210242587601078, |
|
"grad_norm": 1.1547167301177979, |
|
"learning_rate": 3.964334705075446e-05, |
|
"loss": 0.9212, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.2371967654986524, |
|
"grad_norm": 1.5743651390075684, |
|
"learning_rate": 3.950617283950617e-05, |
|
"loss": 1.0367, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.2641509433962264, |
|
"grad_norm": 1.0381134748458862, |
|
"learning_rate": 3.936899862825789e-05, |
|
"loss": 1.1694, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.2911051212938007, |
|
"grad_norm": 1.475040316581726, |
|
"learning_rate": 3.9231824417009605e-05, |
|
"loss": 0.7031, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.3180592991913747, |
|
"grad_norm": 1.6957072019577026, |
|
"learning_rate": 3.909465020576132e-05, |
|
"loss": 0.6501, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.3450134770889486, |
|
"grad_norm": 1.2005444765090942, |
|
"learning_rate": 3.895747599451303e-05, |
|
"loss": 1.1483, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.371967654986523, |
|
"grad_norm": 0.9146741628646851, |
|
"learning_rate": 3.8820301783264744e-05, |
|
"loss": 0.6192, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.398921832884097, |
|
"grad_norm": 2.0229978561401367, |
|
"learning_rate": 3.868312757201646e-05, |
|
"loss": 1.0959, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.4258760107816713, |
|
"grad_norm": 1.864313006401062, |
|
"learning_rate": 3.8545953360768176e-05, |
|
"loss": 1.1319, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.452830188679245, |
|
"grad_norm": 2.050306558609009, |
|
"learning_rate": 3.840877914951989e-05, |
|
"loss": 0.8544, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.4797843665768196, |
|
"grad_norm": 0.8470428586006165, |
|
"learning_rate": 3.82716049382716e-05, |
|
"loss": 0.7248, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.5067385444743935, |
|
"grad_norm": 1.329987645149231, |
|
"learning_rate": 3.813443072702332e-05, |
|
"loss": 0.95, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.533692722371968, |
|
"grad_norm": 1.2895963191986084, |
|
"learning_rate": 3.7997256515775034e-05, |
|
"loss": 0.8183, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.560646900269542, |
|
"grad_norm": 1.2707000970840454, |
|
"learning_rate": 3.786008230452675e-05, |
|
"loss": 1.0259, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.5876010781671157, |
|
"grad_norm": 1.5575352907180786, |
|
"learning_rate": 3.7722908093278466e-05, |
|
"loss": 0.8121, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.61455525606469, |
|
"grad_norm": 1.4395464658737183, |
|
"learning_rate": 3.758573388203018e-05, |
|
"loss": 0.9093, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.641509433962264, |
|
"grad_norm": 1.8523131608963013, |
|
"learning_rate": 3.74485596707819e-05, |
|
"loss": 1.4859, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.6684636118598384, |
|
"grad_norm": 1.0986195802688599, |
|
"learning_rate": 3.731138545953361e-05, |
|
"loss": 0.9775, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.6954177897574123, |
|
"grad_norm": 1.1339648962020874, |
|
"learning_rate": 3.7174211248285324e-05, |
|
"loss": 1.1358, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.7223719676549867, |
|
"grad_norm": 2.267329216003418, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.8947, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.7493261455525606, |
|
"grad_norm": 1.501046061515808, |
|
"learning_rate": 3.6899862825788756e-05, |
|
"loss": 1.3137, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.776280323450135, |
|
"grad_norm": 3.182382345199585, |
|
"learning_rate": 3.676268861454047e-05, |
|
"loss": 1.4114, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.803234501347709, |
|
"grad_norm": 1.9981257915496826, |
|
"learning_rate": 3.662551440329218e-05, |
|
"loss": 1.3174, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.830188679245283, |
|
"grad_norm": 2.4771640300750732, |
|
"learning_rate": 3.6488340192043894e-05, |
|
"loss": 1.7184, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 1.307492971420288, |
|
"learning_rate": 3.635116598079561e-05, |
|
"loss": 1.0643, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.884097035040431, |
|
"grad_norm": 0.7365540862083435, |
|
"learning_rate": 3.6213991769547327e-05, |
|
"loss": 0.9895, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.9110512129380055, |
|
"grad_norm": 1.7630441188812256, |
|
"learning_rate": 3.607681755829904e-05, |
|
"loss": 1.0862, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.9380053908355794, |
|
"grad_norm": 1.731484055519104, |
|
"learning_rate": 3.593964334705075e-05, |
|
"loss": 0.7582, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.964959568733154, |
|
"grad_norm": 1.7666038274765015, |
|
"learning_rate": 3.580246913580247e-05, |
|
"loss": 0.9193, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.9919137466307277, |
|
"grad_norm": 1.3342130184173584, |
|
"learning_rate": 3.566529492455419e-05, |
|
"loss": 0.9391, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.018867924528302, |
|
"grad_norm": 1.7891361713409424, |
|
"learning_rate": 3.5528120713305904e-05, |
|
"loss": 0.4177, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.045822102425876, |
|
"grad_norm": 1.730653166770935, |
|
"learning_rate": 3.539094650205762e-05, |
|
"loss": 1.4771, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.07277628032345, |
|
"grad_norm": 1.5244202613830566, |
|
"learning_rate": 3.525377229080933e-05, |
|
"loss": 0.6919, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.0997304582210243, |
|
"grad_norm": 1.608145833015442, |
|
"learning_rate": 3.511659807956104e-05, |
|
"loss": 1.0934, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.1266846361185983, |
|
"grad_norm": 2.0203192234039307, |
|
"learning_rate": 3.497942386831276e-05, |
|
"loss": 0.8308, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.1536388140161726, |
|
"grad_norm": 1.7629802227020264, |
|
"learning_rate": 3.4842249657064474e-05, |
|
"loss": 0.8283, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.1805929919137466, |
|
"grad_norm": 1.6114568710327148, |
|
"learning_rate": 3.470507544581619e-05, |
|
"loss": 1.1672, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.207547169811321, |
|
"grad_norm": 1.5010507106781006, |
|
"learning_rate": 3.45679012345679e-05, |
|
"loss": 1.5866, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.234501347708895, |
|
"grad_norm": 1.4426625967025757, |
|
"learning_rate": 3.443072702331962e-05, |
|
"loss": 0.8659, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.2614555256064692, |
|
"grad_norm": 1.3204811811447144, |
|
"learning_rate": 3.429355281207133e-05, |
|
"loss": 0.643, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.288409703504043, |
|
"grad_norm": 0.9203002452850342, |
|
"learning_rate": 3.4156378600823045e-05, |
|
"loss": 0.6825, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.315363881401617, |
|
"grad_norm": 1.3211963176727295, |
|
"learning_rate": 3.401920438957476e-05, |
|
"loss": 1.1164, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.3423180592991915, |
|
"grad_norm": 1.4665788412094116, |
|
"learning_rate": 3.388203017832647e-05, |
|
"loss": 0.7564, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.3692722371967654, |
|
"grad_norm": 1.3217601776123047, |
|
"learning_rate": 3.374485596707819e-05, |
|
"loss": 0.8292, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.3962264150943398, |
|
"grad_norm": 1.1328483819961548, |
|
"learning_rate": 3.360768175582991e-05, |
|
"loss": 0.9806, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.4231805929919137, |
|
"grad_norm": 1.3456640243530273, |
|
"learning_rate": 3.347050754458162e-05, |
|
"loss": 0.8297, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.450134770889488, |
|
"grad_norm": 0.9857800006866455, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.6432, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.477088948787062, |
|
"grad_norm": 1.5837385654449463, |
|
"learning_rate": 3.3196159122085054e-05, |
|
"loss": 0.6766, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.5040431266846364, |
|
"grad_norm": 0.9640551805496216, |
|
"learning_rate": 3.305898491083677e-05, |
|
"loss": 0.9636, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.5309973045822103, |
|
"grad_norm": 1.6151617765426636, |
|
"learning_rate": 3.292181069958848e-05, |
|
"loss": 0.5541, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.557951482479784, |
|
"grad_norm": 1.0056880712509155, |
|
"learning_rate": 3.278463648834019e-05, |
|
"loss": 0.8266, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.5849056603773586, |
|
"grad_norm": 2.169956684112549, |
|
"learning_rate": 3.2647462277091905e-05, |
|
"loss": 1.4843, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.6118598382749325, |
|
"grad_norm": 1.395251989364624, |
|
"learning_rate": 3.2510288065843625e-05, |
|
"loss": 0.4211, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.638814016172507, |
|
"grad_norm": 2.2781965732574463, |
|
"learning_rate": 3.237311385459534e-05, |
|
"loss": 0.799, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.665768194070081, |
|
"grad_norm": 2.2836267948150635, |
|
"learning_rate": 3.223593964334705e-05, |
|
"loss": 0.7618, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.6927223719676547, |
|
"grad_norm": 2.4063193798065186, |
|
"learning_rate": 3.209876543209876e-05, |
|
"loss": 1.3887, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.719676549865229, |
|
"grad_norm": 1.7423830032348633, |
|
"learning_rate": 3.196159122085048e-05, |
|
"loss": 0.8476, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.7466307277628035, |
|
"grad_norm": 2.047558307647705, |
|
"learning_rate": 3.1824417009602195e-05, |
|
"loss": 1.0701, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.7735849056603774, |
|
"grad_norm": 1.421557068824768, |
|
"learning_rate": 3.168724279835391e-05, |
|
"loss": 1.6139, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.8005390835579513, |
|
"grad_norm": 1.6829692125320435, |
|
"learning_rate": 3.155006858710563e-05, |
|
"loss": 0.8253, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.8274932614555257, |
|
"grad_norm": 0.9380494356155396, |
|
"learning_rate": 3.141289437585734e-05, |
|
"loss": 1.0122, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.8544474393530996, |
|
"grad_norm": 1.209517478942871, |
|
"learning_rate": 3.127572016460906e-05, |
|
"loss": 1.6921, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.881401617250674, |
|
"grad_norm": 1.2071969509124756, |
|
"learning_rate": 3.113854595336077e-05, |
|
"loss": 1.3358, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.908355795148248, |
|
"grad_norm": 1.6536906957626343, |
|
"learning_rate": 3.1001371742112486e-05, |
|
"loss": 0.7539, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.935309973045822, |
|
"grad_norm": 3.013587713241577, |
|
"learning_rate": 3.08641975308642e-05, |
|
"loss": 1.5292, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.9622641509433962, |
|
"grad_norm": 1.5310529470443726, |
|
"learning_rate": 3.072702331961592e-05, |
|
"loss": 0.8488, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.9892183288409706, |
|
"grad_norm": 1.6104665994644165, |
|
"learning_rate": 3.058984910836763e-05, |
|
"loss": 0.5511, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 4.0161725067385445, |
|
"grad_norm": 1.7617496252059937, |
|
"learning_rate": 3.0452674897119343e-05, |
|
"loss": 1.5566, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 4.0431266846361185, |
|
"grad_norm": 1.6060303449630737, |
|
"learning_rate": 3.0315500685871056e-05, |
|
"loss": 0.6705, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.070080862533692, |
|
"grad_norm": 1.2487995624542236, |
|
"learning_rate": 3.017832647462277e-05, |
|
"loss": 0.8386, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.097035040431267, |
|
"grad_norm": 1.5747429132461548, |
|
"learning_rate": 3.0041152263374488e-05, |
|
"loss": 1.5059, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.123989218328841, |
|
"grad_norm": 2.865582227706909, |
|
"learning_rate": 2.99039780521262e-05, |
|
"loss": 0.801, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.150943396226415, |
|
"grad_norm": 2.48677659034729, |
|
"learning_rate": 2.9766803840877917e-05, |
|
"loss": 1.1271, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.177897574123989, |
|
"grad_norm": 2.485933303833008, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 1.0503, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.204851752021563, |
|
"grad_norm": 2.0622434616088867, |
|
"learning_rate": 2.949245541838135e-05, |
|
"loss": 0.5068, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.231805929919138, |
|
"grad_norm": 0.7956051826477051, |
|
"learning_rate": 2.9355281207133062e-05, |
|
"loss": 1.0424, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.258760107816712, |
|
"grad_norm": 1.3042256832122803, |
|
"learning_rate": 2.9218106995884775e-05, |
|
"loss": 0.5118, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 2.2554943561553955, |
|
"learning_rate": 2.9080932784636488e-05, |
|
"loss": 0.9746, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.3126684636118595, |
|
"grad_norm": 2.549921751022339, |
|
"learning_rate": 2.8943758573388204e-05, |
|
"loss": 0.7228, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.339622641509434, |
|
"grad_norm": 1.333503246307373, |
|
"learning_rate": 2.880658436213992e-05, |
|
"loss": 0.9424, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.366576819407008, |
|
"grad_norm": 1.5245575904846191, |
|
"learning_rate": 2.8669410150891636e-05, |
|
"loss": 1.0232, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.393530997304582, |
|
"grad_norm": 5.889776229858398, |
|
"learning_rate": 2.853223593964335e-05, |
|
"loss": 0.9108, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.420485175202156, |
|
"grad_norm": 1.444460153579712, |
|
"learning_rate": 2.839506172839506e-05, |
|
"loss": 0.7664, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.44743935309973, |
|
"grad_norm": 2.633338689804077, |
|
"learning_rate": 2.825788751714678e-05, |
|
"loss": 0.9278, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.474393530997305, |
|
"grad_norm": 0.8140472769737244, |
|
"learning_rate": 2.8120713305898494e-05, |
|
"loss": 0.7608, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.501347708894879, |
|
"grad_norm": 1.0764524936676025, |
|
"learning_rate": 2.7983539094650207e-05, |
|
"loss": 0.8768, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.528301886792453, |
|
"grad_norm": 1.2656837701797485, |
|
"learning_rate": 2.784636488340192e-05, |
|
"loss": 1.2142, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.555256064690027, |
|
"grad_norm": 1.1997019052505493, |
|
"learning_rate": 2.7709190672153635e-05, |
|
"loss": 1.1219, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.5822102425876015, |
|
"grad_norm": 1.1328818798065186, |
|
"learning_rate": 2.757201646090535e-05, |
|
"loss": 0.7957, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.609164420485175, |
|
"grad_norm": 0.9673919677734375, |
|
"learning_rate": 2.7434842249657068e-05, |
|
"loss": 0.833, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.636118598382749, |
|
"grad_norm": 1.556420087814331, |
|
"learning_rate": 2.729766803840878e-05, |
|
"loss": 0.644, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.663072776280323, |
|
"grad_norm": 1.8422091007232666, |
|
"learning_rate": 2.7160493827160493e-05, |
|
"loss": 1.0444, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.690026954177897, |
|
"grad_norm": 1.5853588581085205, |
|
"learning_rate": 2.7023319615912206e-05, |
|
"loss": 0.8302, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.716981132075472, |
|
"grad_norm": 2.221689462661743, |
|
"learning_rate": 2.6886145404663926e-05, |
|
"loss": 0.8505, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.743935309973046, |
|
"grad_norm": 1.9763739109039307, |
|
"learning_rate": 2.6748971193415638e-05, |
|
"loss": 0.7795, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.77088948787062, |
|
"grad_norm": 2.094904661178589, |
|
"learning_rate": 2.6611796982167354e-05, |
|
"loss": 0.6406, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.797843665768194, |
|
"grad_norm": 2.045687437057495, |
|
"learning_rate": 2.6474622770919067e-05, |
|
"loss": 0.7323, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.824797843665769, |
|
"grad_norm": 2.2809979915618896, |
|
"learning_rate": 2.6337448559670787e-05, |
|
"loss": 0.783, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.8517520215633425, |
|
"grad_norm": 1.7300888299942017, |
|
"learning_rate": 2.62002743484225e-05, |
|
"loss": 0.6659, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.878706199460916, |
|
"grad_norm": 3.9680426120758057, |
|
"learning_rate": 2.6063100137174212e-05, |
|
"loss": 0.6866, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.90566037735849, |
|
"grad_norm": 1.8751779794692993, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.9165, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.932614555256064, |
|
"grad_norm": 1.671337366104126, |
|
"learning_rate": 2.5788751714677638e-05, |
|
"loss": 0.846, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.959568733153639, |
|
"grad_norm": 2.150132179260254, |
|
"learning_rate": 2.5651577503429357e-05, |
|
"loss": 1.2381, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.986522911051213, |
|
"grad_norm": 1.2727786302566528, |
|
"learning_rate": 2.551440329218107e-05, |
|
"loss": 0.7399, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 5.013477088948787, |
|
"grad_norm": 1.6058365106582642, |
|
"learning_rate": 2.5377229080932786e-05, |
|
"loss": 1.3821, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 5.040431266846361, |
|
"grad_norm": 0.873196542263031, |
|
"learning_rate": 2.52400548696845e-05, |
|
"loss": 1.0094, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 5.067385444743936, |
|
"grad_norm": 2.6777074337005615, |
|
"learning_rate": 2.510288065843622e-05, |
|
"loss": 0.956, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 5.09433962264151, |
|
"grad_norm": 2.2035608291625977, |
|
"learning_rate": 2.496570644718793e-05, |
|
"loss": 1.0327, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 5.121293800539084, |
|
"grad_norm": 1.9999809265136719, |
|
"learning_rate": 2.4828532235939644e-05, |
|
"loss": 0.9534, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 5.1482479784366575, |
|
"grad_norm": 1.1194771528244019, |
|
"learning_rate": 2.4691358024691357e-05, |
|
"loss": 0.6969, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 5.175202156334231, |
|
"grad_norm": 2.1058928966522217, |
|
"learning_rate": 2.4554183813443073e-05, |
|
"loss": 1.077, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 5.202156334231806, |
|
"grad_norm": 1.584088921546936, |
|
"learning_rate": 2.441700960219479e-05, |
|
"loss": 1.0808, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 5.22911051212938, |
|
"grad_norm": 2.4530153274536133, |
|
"learning_rate": 2.4279835390946505e-05, |
|
"loss": 1.1974, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 5.256064690026954, |
|
"grad_norm": 1.3323501348495483, |
|
"learning_rate": 2.4142661179698218e-05, |
|
"loss": 1.0003, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 5.283018867924528, |
|
"grad_norm": 2.1293039321899414, |
|
"learning_rate": 2.4005486968449934e-05, |
|
"loss": 1.2501, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 5.309973045822103, |
|
"grad_norm": 2.1004316806793213, |
|
"learning_rate": 2.3868312757201647e-05, |
|
"loss": 1.0852, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 5.336927223719677, |
|
"grad_norm": 2.6623051166534424, |
|
"learning_rate": 2.3731138545953363e-05, |
|
"loss": 0.6813, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 5.363881401617251, |
|
"grad_norm": 1.0680763721466064, |
|
"learning_rate": 2.3593964334705075e-05, |
|
"loss": 0.6413, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 5.390835579514825, |
|
"grad_norm": 1.4404453039169312, |
|
"learning_rate": 2.345679012345679e-05, |
|
"loss": 0.8624, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.4177897574123985, |
|
"grad_norm": 2.0535359382629395, |
|
"learning_rate": 2.3319615912208508e-05, |
|
"loss": 0.8049, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 5.444743935309973, |
|
"grad_norm": 1.0573314428329468, |
|
"learning_rate": 2.318244170096022e-05, |
|
"loss": 0.8271, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 5.471698113207547, |
|
"grad_norm": 3.0816051959991455, |
|
"learning_rate": 2.3045267489711937e-05, |
|
"loss": 1.2149, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.498652291105121, |
|
"grad_norm": 0.8514117002487183, |
|
"learning_rate": 2.290809327846365e-05, |
|
"loss": 0.6105, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.525606469002695, |
|
"grad_norm": 2.2031140327453613, |
|
"learning_rate": 2.2770919067215366e-05, |
|
"loss": 0.7386, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.55256064690027, |
|
"grad_norm": 0.6656814217567444, |
|
"learning_rate": 2.2633744855967078e-05, |
|
"loss": 0.64, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.579514824797844, |
|
"grad_norm": 1.3257490396499634, |
|
"learning_rate": 2.2496570644718794e-05, |
|
"loss": 0.5246, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.606469002695418, |
|
"grad_norm": 1.9381606578826904, |
|
"learning_rate": 2.2359396433470507e-05, |
|
"loss": 0.8002, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.633423180592992, |
|
"grad_norm": 1.5016483068466187, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.7496, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.660377358490566, |
|
"grad_norm": 1.4559197425842285, |
|
"learning_rate": 2.208504801097394e-05, |
|
"loss": 0.4904, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.6873315363881405, |
|
"grad_norm": 1.7636557817459106, |
|
"learning_rate": 2.1947873799725652e-05, |
|
"loss": 0.6664, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.714285714285714, |
|
"grad_norm": 1.5143455266952515, |
|
"learning_rate": 2.1810699588477368e-05, |
|
"loss": 0.9208, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.741239892183288, |
|
"grad_norm": 2.0838751792907715, |
|
"learning_rate": 2.167352537722908e-05, |
|
"loss": 0.3976, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.768194070080862, |
|
"grad_norm": 1.7315948009490967, |
|
"learning_rate": 2.1536351165980797e-05, |
|
"loss": 0.6964, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.795148247978437, |
|
"grad_norm": 2.732347249984741, |
|
"learning_rate": 2.139917695473251e-05, |
|
"loss": 1.4496, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.822102425876011, |
|
"grad_norm": 1.661921501159668, |
|
"learning_rate": 2.1262002743484226e-05, |
|
"loss": 0.8408, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.849056603773585, |
|
"grad_norm": 1.123995065689087, |
|
"learning_rate": 2.1124828532235942e-05, |
|
"loss": 0.5594, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.876010781671159, |
|
"grad_norm": 1.3693888187408447, |
|
"learning_rate": 2.0987654320987655e-05, |
|
"loss": 1.0366, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.902964959568733, |
|
"grad_norm": 1.589400053024292, |
|
"learning_rate": 2.085048010973937e-05, |
|
"loss": 1.1677, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.929919137466308, |
|
"grad_norm": 1.3596928119659424, |
|
"learning_rate": 2.0713305898491084e-05, |
|
"loss": 0.6217, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.9568733153638815, |
|
"grad_norm": 1.5818856954574585, |
|
"learning_rate": 2.05761316872428e-05, |
|
"loss": 1.0682, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.9838274932614555, |
|
"grad_norm": 2.694117546081543, |
|
"learning_rate": 2.0438957475994513e-05, |
|
"loss": 0.9281, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 6.010781671159029, |
|
"grad_norm": 1.463112473487854, |
|
"learning_rate": 2.030178326474623e-05, |
|
"loss": 0.7868, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 6.037735849056604, |
|
"grad_norm": 1.8323993682861328, |
|
"learning_rate": 2.016460905349794e-05, |
|
"loss": 0.6472, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 6.064690026954178, |
|
"grad_norm": 2.3480756282806396, |
|
"learning_rate": 2.0027434842249658e-05, |
|
"loss": 0.6044, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 6.091644204851752, |
|
"grad_norm": 3.0445172786712646, |
|
"learning_rate": 1.9890260631001374e-05, |
|
"loss": 0.5516, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 6.118598382749326, |
|
"grad_norm": 2.489713430404663, |
|
"learning_rate": 1.9753086419753087e-05, |
|
"loss": 0.5861, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 6.1455525606469, |
|
"grad_norm": 2.1993355751037598, |
|
"learning_rate": 1.9615912208504803e-05, |
|
"loss": 0.6938, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 6.172506738544475, |
|
"grad_norm": 1.914015769958496, |
|
"learning_rate": 1.9478737997256515e-05, |
|
"loss": 0.8575, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 6.199460916442049, |
|
"grad_norm": 2.359715223312378, |
|
"learning_rate": 1.934156378600823e-05, |
|
"loss": 0.6444, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 6.226415094339623, |
|
"grad_norm": 0.865803062915802, |
|
"learning_rate": 1.9204389574759944e-05, |
|
"loss": 0.6543, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 6.2533692722371965, |
|
"grad_norm": 1.363655686378479, |
|
"learning_rate": 1.906721536351166e-05, |
|
"loss": 0.9914, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 6.280323450134771, |
|
"grad_norm": 2.174215316772461, |
|
"learning_rate": 1.8930041152263377e-05, |
|
"loss": 0.6777, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 6.307277628032345, |
|
"grad_norm": 1.1655148267745972, |
|
"learning_rate": 1.879286694101509e-05, |
|
"loss": 0.6198, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 6.334231805929919, |
|
"grad_norm": 2.751349449157715, |
|
"learning_rate": 1.8655692729766806e-05, |
|
"loss": 1.3854, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 6.361185983827493, |
|
"grad_norm": 1.7437028884887695, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.6498, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 6.388140161725067, |
|
"grad_norm": 2.6876518726348877, |
|
"learning_rate": 1.8381344307270234e-05, |
|
"loss": 0.8044, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 6.415094339622642, |
|
"grad_norm": 2.60074520111084, |
|
"learning_rate": 1.8244170096021947e-05, |
|
"loss": 1.155, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 6.442048517520216, |
|
"grad_norm": 2.644638776779175, |
|
"learning_rate": 1.8106995884773663e-05, |
|
"loss": 0.8515, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 6.46900269541779, |
|
"grad_norm": 3.353593587875366, |
|
"learning_rate": 1.7969821673525376e-05, |
|
"loss": 0.5867, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.495956873315364, |
|
"grad_norm": 1.9784125089645386, |
|
"learning_rate": 1.7832647462277096e-05, |
|
"loss": 0.6024, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 6.5229110512129385, |
|
"grad_norm": 1.7953509092330933, |
|
"learning_rate": 1.769547325102881e-05, |
|
"loss": 1.2158, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 6.549865229110512, |
|
"grad_norm": 2.232206344604492, |
|
"learning_rate": 1.755829903978052e-05, |
|
"loss": 0.8265, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.576819407008086, |
|
"grad_norm": 1.054999828338623, |
|
"learning_rate": 1.7421124828532237e-05, |
|
"loss": 0.7372, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.60377358490566, |
|
"grad_norm": 1.8623944520950317, |
|
"learning_rate": 1.728395061728395e-05, |
|
"loss": 1.4106, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.630727762803234, |
|
"grad_norm": 2.689140796661377, |
|
"learning_rate": 1.7146776406035666e-05, |
|
"loss": 0.9934, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.657681940700809, |
|
"grad_norm": 1.7552543878555298, |
|
"learning_rate": 1.700960219478738e-05, |
|
"loss": 0.8231, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.684636118598383, |
|
"grad_norm": 3.4416518211364746, |
|
"learning_rate": 1.6872427983539095e-05, |
|
"loss": 0.7788, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.711590296495957, |
|
"grad_norm": 3.726334571838379, |
|
"learning_rate": 1.673525377229081e-05, |
|
"loss": 0.7407, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.738544474393531, |
|
"grad_norm": 2.932966470718384, |
|
"learning_rate": 1.6598079561042527e-05, |
|
"loss": 1.0143, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.765498652291106, |
|
"grad_norm": 1.8736401796340942, |
|
"learning_rate": 1.646090534979424e-05, |
|
"loss": 0.8365, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.7924528301886795, |
|
"grad_norm": 2.0926668643951416, |
|
"learning_rate": 1.6323731138545953e-05, |
|
"loss": 0.7736, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.819407008086253, |
|
"grad_norm": 3.3576009273529053, |
|
"learning_rate": 1.618655692729767e-05, |
|
"loss": 1.1465, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.846361185983827, |
|
"grad_norm": 2.4661567211151123, |
|
"learning_rate": 1.604938271604938e-05, |
|
"loss": 0.7863, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.873315363881401, |
|
"grad_norm": 1.88754141330719, |
|
"learning_rate": 1.5912208504801098e-05, |
|
"loss": 0.9081, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.900269541778976, |
|
"grad_norm": 47.142337799072266, |
|
"learning_rate": 1.5775034293552814e-05, |
|
"loss": 0.8261, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.92722371967655, |
|
"grad_norm": 2.473158359527588, |
|
"learning_rate": 1.563786008230453e-05, |
|
"loss": 0.4822, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.954177897574124, |
|
"grad_norm": 1.6194536685943604, |
|
"learning_rate": 1.5500685871056243e-05, |
|
"loss": 0.856, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.981132075471698, |
|
"grad_norm": 5.963684558868408, |
|
"learning_rate": 1.536351165980796e-05, |
|
"loss": 0.7661, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 7.008086253369272, |
|
"grad_norm": 1.8639130592346191, |
|
"learning_rate": 1.5226337448559672e-05, |
|
"loss": 1.388, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 7.035040431266847, |
|
"grad_norm": 3.447125196456909, |
|
"learning_rate": 1.5089163237311384e-05, |
|
"loss": 0.7474, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 7.061994609164421, |
|
"grad_norm": 2.3289992809295654, |
|
"learning_rate": 1.49519890260631e-05, |
|
"loss": 0.5907, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 7.0889487870619945, |
|
"grad_norm": 1.342872977256775, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.8798, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 7.115902964959568, |
|
"grad_norm": 1.3832533359527588, |
|
"learning_rate": 1.4677640603566531e-05, |
|
"loss": 1.0841, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 7.142857142857143, |
|
"grad_norm": 1.0350087881088257, |
|
"learning_rate": 1.4540466392318244e-05, |
|
"loss": 1.2334, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 7.169811320754717, |
|
"grad_norm": 4.020228862762451, |
|
"learning_rate": 1.440329218106996e-05, |
|
"loss": 0.8947, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 7.196765498652291, |
|
"grad_norm": 2.982022523880005, |
|
"learning_rate": 1.4266117969821674e-05, |
|
"loss": 0.9667, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 7.223719676549865, |
|
"grad_norm": 2.171691417694092, |
|
"learning_rate": 1.412894375857339e-05, |
|
"loss": 0.7494, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 7.250673854447439, |
|
"grad_norm": 2.717907667160034, |
|
"learning_rate": 1.3991769547325103e-05, |
|
"loss": 0.6898, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 7.277628032345014, |
|
"grad_norm": 2.5439579486846924, |
|
"learning_rate": 1.3854595336076818e-05, |
|
"loss": 0.4311, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 7.304582210242588, |
|
"grad_norm": 0.7530654072761536, |
|
"learning_rate": 1.3717421124828534e-05, |
|
"loss": 0.9008, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 7.331536388140162, |
|
"grad_norm": 0.637667179107666, |
|
"learning_rate": 1.3580246913580247e-05, |
|
"loss": 1.087, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 7.3584905660377355, |
|
"grad_norm": 1.1592116355895996, |
|
"learning_rate": 1.3443072702331963e-05, |
|
"loss": 0.6196, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 7.38544474393531, |
|
"grad_norm": 2.054795503616333, |
|
"learning_rate": 1.3305898491083677e-05, |
|
"loss": 0.7915, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 7.412398921832884, |
|
"grad_norm": 1.4349403381347656, |
|
"learning_rate": 1.3168724279835393e-05, |
|
"loss": 0.6572, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 7.439353099730458, |
|
"grad_norm": 1.0293610095977783, |
|
"learning_rate": 1.3031550068587106e-05, |
|
"loss": 1.0812, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 7.466307277628032, |
|
"grad_norm": 2.4686598777770996, |
|
"learning_rate": 1.2894375857338819e-05, |
|
"loss": 0.9702, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 7.493261455525606, |
|
"grad_norm": 1.4416760206222534, |
|
"learning_rate": 1.2757201646090535e-05, |
|
"loss": 1.0412, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 7.520215633423181, |
|
"grad_norm": 3.614410400390625, |
|
"learning_rate": 1.262002743484225e-05, |
|
"loss": 1.2559, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 7.547169811320755, |
|
"grad_norm": 0.8973541855812073, |
|
"learning_rate": 1.2482853223593966e-05, |
|
"loss": 0.4049, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.574123989218329, |
|
"grad_norm": 1.4669396877288818, |
|
"learning_rate": 1.2345679012345678e-05, |
|
"loss": 0.764, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 7.601078167115903, |
|
"grad_norm": 2.6035633087158203, |
|
"learning_rate": 1.2208504801097394e-05, |
|
"loss": 0.7421, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 7.628032345013477, |
|
"grad_norm": 3.8187856674194336, |
|
"learning_rate": 1.2071330589849109e-05, |
|
"loss": 0.8026, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 7.654986522911051, |
|
"grad_norm": 1.5178192853927612, |
|
"learning_rate": 1.1934156378600823e-05, |
|
"loss": 0.8348, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.681940700808625, |
|
"grad_norm": 1.0527846813201904, |
|
"learning_rate": 1.1796982167352538e-05, |
|
"loss": 0.7379, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.708894878706199, |
|
"grad_norm": 2.7532355785369873, |
|
"learning_rate": 1.1659807956104254e-05, |
|
"loss": 1.4819, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.735849056603773, |
|
"grad_norm": 1.9591217041015625, |
|
"learning_rate": 1.1522633744855968e-05, |
|
"loss": 0.4296, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.762803234501348, |
|
"grad_norm": 2.7292425632476807, |
|
"learning_rate": 1.1385459533607683e-05, |
|
"loss": 0.6324, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.789757412398922, |
|
"grad_norm": 2.3577399253845215, |
|
"learning_rate": 1.1248285322359397e-05, |
|
"loss": 0.5892, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.816711590296496, |
|
"grad_norm": 1.4059489965438843, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.5519, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.84366576819407, |
|
"grad_norm": 1.9480534791946411, |
|
"learning_rate": 1.0973936899862826e-05, |
|
"loss": 0.9436, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.870619946091644, |
|
"grad_norm": 2.3746042251586914, |
|
"learning_rate": 1.083676268861454e-05, |
|
"loss": 0.3955, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.8975741239892185, |
|
"grad_norm": 1.2576045989990234, |
|
"learning_rate": 1.0699588477366255e-05, |
|
"loss": 0.604, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.9245283018867925, |
|
"grad_norm": 2.509427309036255, |
|
"learning_rate": 1.0562414266117971e-05, |
|
"loss": 0.7358, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.951482479784366, |
|
"grad_norm": 1.8080178499221802, |
|
"learning_rate": 1.0425240054869686e-05, |
|
"loss": 0.6726, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.97843665768194, |
|
"grad_norm": 3.254493474960327, |
|
"learning_rate": 1.02880658436214e-05, |
|
"loss": 0.6716, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 8.005390835579515, |
|
"grad_norm": 1.1409560441970825, |
|
"learning_rate": 1.0150891632373114e-05, |
|
"loss": 0.8359, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 8.032345013477089, |
|
"grad_norm": 2.8074634075164795, |
|
"learning_rate": 1.0013717421124829e-05, |
|
"loss": 0.8532, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 8.059299191374663, |
|
"grad_norm": 1.6796783208847046, |
|
"learning_rate": 9.876543209876543e-06, |
|
"loss": 1.1366, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 8.086253369272237, |
|
"grad_norm": 0.6373213529586792, |
|
"learning_rate": 9.739368998628258e-06, |
|
"loss": 0.5121, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 8.11320754716981, |
|
"grad_norm": 1.404941201210022, |
|
"learning_rate": 9.602194787379972e-06, |
|
"loss": 0.7168, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 8.140161725067385, |
|
"grad_norm": 3.0173532962799072, |
|
"learning_rate": 9.465020576131688e-06, |
|
"loss": 0.6495, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 8.167115902964959, |
|
"grad_norm": 0.9373369812965393, |
|
"learning_rate": 9.327846364883403e-06, |
|
"loss": 1.1245, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 8.194070080862534, |
|
"grad_norm": 1.8687936067581177, |
|
"learning_rate": 9.190672153635117e-06, |
|
"loss": 0.7253, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 8.221024258760108, |
|
"grad_norm": 0.9592246413230896, |
|
"learning_rate": 9.053497942386832e-06, |
|
"loss": 1.2584, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 8.247978436657682, |
|
"grad_norm": 2.668527126312256, |
|
"learning_rate": 8.916323731138548e-06, |
|
"loss": 0.6645, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 8.274932614555256, |
|
"grad_norm": 0.8130900859832764, |
|
"learning_rate": 8.77914951989026e-06, |
|
"loss": 0.5244, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 8.30188679245283, |
|
"grad_norm": 1.980900764465332, |
|
"learning_rate": 8.641975308641975e-06, |
|
"loss": 0.5843, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 8.328840970350404, |
|
"grad_norm": 1.7426378726959229, |
|
"learning_rate": 8.50480109739369e-06, |
|
"loss": 0.9521, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 8.355795148247978, |
|
"grad_norm": 2.6671996116638184, |
|
"learning_rate": 8.367626886145406e-06, |
|
"loss": 0.8797, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 8.382749326145552, |
|
"grad_norm": 2.629798650741577, |
|
"learning_rate": 8.23045267489712e-06, |
|
"loss": 0.5629, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 8.409703504043126, |
|
"grad_norm": 1.723059058189392, |
|
"learning_rate": 8.093278463648834e-06, |
|
"loss": 0.5738, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 8.436657681940702, |
|
"grad_norm": 0.5404053926467896, |
|
"learning_rate": 7.956104252400549e-06, |
|
"loss": 0.72, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 8.463611859838275, |
|
"grad_norm": 2.8987836837768555, |
|
"learning_rate": 7.818930041152265e-06, |
|
"loss": 0.7432, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 8.49056603773585, |
|
"grad_norm": 1.2646089792251587, |
|
"learning_rate": 7.68175582990398e-06, |
|
"loss": 0.5356, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 8.517520215633423, |
|
"grad_norm": 3.3997156620025635, |
|
"learning_rate": 7.544581618655692e-06, |
|
"loss": 0.8056, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 8.544474393530997, |
|
"grad_norm": 2.264604330062866, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 1.0019, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 8.571428571428571, |
|
"grad_norm": 1.962279200553894, |
|
"learning_rate": 7.270233196159122e-06, |
|
"loss": 0.6352, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 8.598382749326145, |
|
"grad_norm": 2.4050302505493164, |
|
"learning_rate": 7.133058984910837e-06, |
|
"loss": 0.6712, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 8.625336927223719, |
|
"grad_norm": 1.2828004360198975, |
|
"learning_rate": 6.995884773662552e-06, |
|
"loss": 0.625, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.652291105121293, |
|
"grad_norm": 3.306525230407715, |
|
"learning_rate": 6.858710562414267e-06, |
|
"loss": 0.7727, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 8.679245283018869, |
|
"grad_norm": 1.3902239799499512, |
|
"learning_rate": 6.721536351165981e-06, |
|
"loss": 0.4857, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 8.706199460916443, |
|
"grad_norm": 3.665642261505127, |
|
"learning_rate": 6.584362139917697e-06, |
|
"loss": 0.9572, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 8.733153638814017, |
|
"grad_norm": 2.6966567039489746, |
|
"learning_rate": 6.447187928669409e-06, |
|
"loss": 0.739, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 8.76010781671159, |
|
"grad_norm": 2.127206325531006, |
|
"learning_rate": 6.310013717421125e-06, |
|
"loss": 0.8983, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.787061994609164, |
|
"grad_norm": 2.1118884086608887, |
|
"learning_rate": 6.172839506172839e-06, |
|
"loss": 1.0385, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.814016172506738, |
|
"grad_norm": 1.6121476888656616, |
|
"learning_rate": 6.0356652949245544e-06, |
|
"loss": 0.4564, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.840970350404312, |
|
"grad_norm": 3.346813440322876, |
|
"learning_rate": 5.898491083676269e-06, |
|
"loss": 0.9239, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.867924528301886, |
|
"grad_norm": 2.808685541152954, |
|
"learning_rate": 5.761316872427984e-06, |
|
"loss": 1.0246, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.89487870619946, |
|
"grad_norm": 0.6503840684890747, |
|
"learning_rate": 5.624142661179699e-06, |
|
"loss": 0.4066, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.921832884097036, |
|
"grad_norm": 2.2090868949890137, |
|
"learning_rate": 5.486968449931413e-06, |
|
"loss": 0.8856, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.94878706199461, |
|
"grad_norm": 2.4998106956481934, |
|
"learning_rate": 5.3497942386831275e-06, |
|
"loss": 1.0539, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.975741239892184, |
|
"grad_norm": 1.3785419464111328, |
|
"learning_rate": 5.212620027434843e-06, |
|
"loss": 1.0206, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 9.002695417789758, |
|
"grad_norm": 2.4248452186584473, |
|
"learning_rate": 5.075445816186557e-06, |
|
"loss": 0.6985, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 9.029649595687331, |
|
"grad_norm": 2.76381254196167, |
|
"learning_rate": 4.938271604938272e-06, |
|
"loss": 0.8552, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 9.056603773584905, |
|
"grad_norm": 0.44387710094451904, |
|
"learning_rate": 4.801097393689986e-06, |
|
"loss": 0.7421, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 9.08355795148248, |
|
"grad_norm": 1.8279584646224976, |
|
"learning_rate": 4.663923182441701e-06, |
|
"loss": 0.6471, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 9.110512129380053, |
|
"grad_norm": 4.515883922576904, |
|
"learning_rate": 4.526748971193416e-06, |
|
"loss": 0.8336, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 9.137466307277627, |
|
"grad_norm": 2.7579915523529053, |
|
"learning_rate": 4.38957475994513e-06, |
|
"loss": 1.0069, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 9.164420485175203, |
|
"grad_norm": 2.561863660812378, |
|
"learning_rate": 4.252400548696845e-06, |
|
"loss": 0.8613, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 9.191374663072777, |
|
"grad_norm": 3.5827927589416504, |
|
"learning_rate": 4.11522633744856e-06, |
|
"loss": 0.7177, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 9.21832884097035, |
|
"grad_norm": 1.109462022781372, |
|
"learning_rate": 3.9780521262002744e-06, |
|
"loss": 0.6787, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 9.245283018867925, |
|
"grad_norm": 2.3508827686309814, |
|
"learning_rate": 3.84087791495199e-06, |
|
"loss": 0.7854, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 9.272237196765499, |
|
"grad_norm": 3.206239938735962, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.6083, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 9.299191374663073, |
|
"grad_norm": 5.175552845001221, |
|
"learning_rate": 3.5665294924554186e-06, |
|
"loss": 0.6398, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 9.326145552560646, |
|
"grad_norm": 1.54671311378479, |
|
"learning_rate": 3.4293552812071335e-06, |
|
"loss": 1.3045, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 9.35309973045822, |
|
"grad_norm": 1.7365717887878418, |
|
"learning_rate": 3.2921810699588483e-06, |
|
"loss": 0.6332, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 9.380053908355794, |
|
"grad_norm": 1.5034387111663818, |
|
"learning_rate": 3.1550068587105624e-06, |
|
"loss": 0.6763, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 9.40700808625337, |
|
"grad_norm": 3.266697645187378, |
|
"learning_rate": 3.0178326474622772e-06, |
|
"loss": 0.8033, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 9.433962264150944, |
|
"grad_norm": 1.4214359521865845, |
|
"learning_rate": 2.880658436213992e-06, |
|
"loss": 0.748, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 9.460916442048518, |
|
"grad_norm": 2.0403311252593994, |
|
"learning_rate": 2.7434842249657065e-06, |
|
"loss": 0.862, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 9.487870619946092, |
|
"grad_norm": 1.678673505783081, |
|
"learning_rate": 2.6063100137174214e-06, |
|
"loss": 0.6221, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 9.514824797843666, |
|
"grad_norm": 2.317265033721924, |
|
"learning_rate": 2.469135802469136e-06, |
|
"loss": 0.697, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 9.54177897574124, |
|
"grad_norm": 2.5591745376586914, |
|
"learning_rate": 2.3319615912208507e-06, |
|
"loss": 0.6209, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 9.568733153638814, |
|
"grad_norm": 4.573774337768555, |
|
"learning_rate": 2.194787379972565e-06, |
|
"loss": 0.6689, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 9.595687331536388, |
|
"grad_norm": 1.756940484046936, |
|
"learning_rate": 2.05761316872428e-06, |
|
"loss": 0.9016, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 9.622641509433961, |
|
"grad_norm": 1.913479208946228, |
|
"learning_rate": 1.920438957475995e-06, |
|
"loss": 0.7732, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 9.649595687331537, |
|
"grad_norm": 3.303154230117798, |
|
"learning_rate": 1.7832647462277093e-06, |
|
"loss": 1.0092, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 9.676549865229111, |
|
"grad_norm": 2.583827018737793, |
|
"learning_rate": 1.6460905349794242e-06, |
|
"loss": 1.1913, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 9.703504043126685, |
|
"grad_norm": 0.6483349800109863, |
|
"learning_rate": 1.5089163237311386e-06, |
|
"loss": 0.8234, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 9.730458221024259, |
|
"grad_norm": 3.4046685695648193, |
|
"learning_rate": 1.3717421124828533e-06, |
|
"loss": 1.1062, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 9.757412398921833, |
|
"grad_norm": 1.3275524377822876, |
|
"learning_rate": 1.234567901234568e-06, |
|
"loss": 0.5139, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 9.784366576819407, |
|
"grad_norm": 2.8111588954925537, |
|
"learning_rate": 1.0973936899862826e-06, |
|
"loss": 0.7716, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 9.81132075471698, |
|
"grad_norm": 1.3000844717025757, |
|
"learning_rate": 9.602194787379974e-07, |
|
"loss": 0.4894, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 9.838274932614555, |
|
"grad_norm": 2.7694694995880127, |
|
"learning_rate": 8.230452674897121e-07, |
|
"loss": 0.5647, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 9.865229110512129, |
|
"grad_norm": 1.6062043905258179, |
|
"learning_rate": 6.858710562414266e-07, |
|
"loss": 0.9145, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.892183288409704, |
|
"grad_norm": 1.323443055152893, |
|
"learning_rate": 5.486968449931413e-07, |
|
"loss": 0.7909, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 9.919137466307278, |
|
"grad_norm": 3.0170159339904785, |
|
"learning_rate": 4.1152263374485604e-07, |
|
"loss": 0.7927, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.946091644204852, |
|
"grad_norm": 3.5468873977661133, |
|
"learning_rate": 2.7434842249657064e-07, |
|
"loss": 0.7568, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 9.973045822102426, |
|
"grad_norm": 1.8293559551239014, |
|
"learning_rate": 1.3717421124828532e-07, |
|
"loss": 0.6616, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 2.379469633102417, |
|
"learning_rate": 0.0, |
|
"loss": 0.3192, |
|
"step": 3710 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3710, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3062070293667840.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|