|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1950, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005128205128205128, |
|
"grad_norm": 27.855213353292697, |
|
"learning_rate": 5.1282051282051286e-08, |
|
"loss": 1.4714, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002564102564102564, |
|
"grad_norm": 24.297444748053664, |
|
"learning_rate": 2.564102564102564e-07, |
|
"loss": 1.3999, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005128205128205128, |
|
"grad_norm": 16.726484469297354, |
|
"learning_rate": 5.128205128205128e-07, |
|
"loss": 1.3624, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007692307692307693, |
|
"grad_norm": 12.476091472369891, |
|
"learning_rate": 7.692307692307694e-07, |
|
"loss": 1.2347, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.010256410256410256, |
|
"grad_norm": 8.35097021976234, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.121, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01282051282051282, |
|
"grad_norm": 3.6024304396596554, |
|
"learning_rate": 1.282051282051282e-06, |
|
"loss": 0.9839, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.015384615384615385, |
|
"grad_norm": 3.480763645073715, |
|
"learning_rate": 1.5384615384615387e-06, |
|
"loss": 0.9315, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.017948717948717947, |
|
"grad_norm": 3.0807184674746546, |
|
"learning_rate": 1.794871794871795e-06, |
|
"loss": 0.9081, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.020512820512820513, |
|
"grad_norm": 2.915980172201366, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 0.8695, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.023076923076923078, |
|
"grad_norm": 2.9407435214657136, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 0.8613, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02564102564102564, |
|
"grad_norm": 2.9974488091505522, |
|
"learning_rate": 2.564102564102564e-06, |
|
"loss": 0.8461, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028205128205128206, |
|
"grad_norm": 3.2196932971809713, |
|
"learning_rate": 2.8205128205128207e-06, |
|
"loss": 0.8273, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03076923076923077, |
|
"grad_norm": 3.148550996118887, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 0.8436, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03333333333333333, |
|
"grad_norm": 3.220886247551075, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.8211, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.035897435897435895, |
|
"grad_norm": 3.0507943848526597, |
|
"learning_rate": 3.58974358974359e-06, |
|
"loss": 0.8178, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.038461538461538464, |
|
"grad_norm": 2.9585065948581484, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 0.8077, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.041025641025641026, |
|
"grad_norm": 3.0972764668895576, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 0.7975, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04358974358974359, |
|
"grad_norm": 3.042612736519298, |
|
"learning_rate": 4.358974358974359e-06, |
|
"loss": 0.802, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.046153846153846156, |
|
"grad_norm": 3.249820635668479, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 0.7839, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04871794871794872, |
|
"grad_norm": 2.9967690432516747, |
|
"learning_rate": 4.871794871794872e-06, |
|
"loss": 0.7686, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05128205128205128, |
|
"grad_norm": 3.3554959103069737, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 0.7628, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05384615384615385, |
|
"grad_norm": 3.0970153076575975, |
|
"learning_rate": 5.384615384615385e-06, |
|
"loss": 0.7564, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05641025641025641, |
|
"grad_norm": 3.19251255800907, |
|
"learning_rate": 5.641025641025641e-06, |
|
"loss": 0.7521, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05897435897435897, |
|
"grad_norm": 2.989469084400876, |
|
"learning_rate": 5.897435897435898e-06, |
|
"loss": 0.7497, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06153846153846154, |
|
"grad_norm": 2.9701683904826743, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.7482, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0641025641025641, |
|
"grad_norm": 2.806869258124627, |
|
"learning_rate": 6.410256410256412e-06, |
|
"loss": 0.7584, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06666666666666667, |
|
"grad_norm": 2.951227277607944, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.744, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06923076923076923, |
|
"grad_norm": 3.0730318127191385, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 0.7413, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07179487179487179, |
|
"grad_norm": 2.8953064927208025, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.738, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07435897435897436, |
|
"grad_norm": 2.9241997829974147, |
|
"learning_rate": 7.435897435897437e-06, |
|
"loss": 0.7274, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 2.885045926032792, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 0.7296, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07948717948717948, |
|
"grad_norm": 3.0783343062947184, |
|
"learning_rate": 7.948717948717949e-06, |
|
"loss": 0.7154, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08205128205128205, |
|
"grad_norm": 2.9470190275773542, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 0.7215, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08461538461538462, |
|
"grad_norm": 3.28788682771588, |
|
"learning_rate": 8.461538461538462e-06, |
|
"loss": 0.7314, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08717948717948718, |
|
"grad_norm": 2.839890562503017, |
|
"learning_rate": 8.717948717948719e-06, |
|
"loss": 0.7119, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08974358974358974, |
|
"grad_norm": 2.908179397354329, |
|
"learning_rate": 8.974358974358976e-06, |
|
"loss": 0.7201, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09230769230769231, |
|
"grad_norm": 2.775686236070062, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.732, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09487179487179487, |
|
"grad_norm": 2.8091812680053803, |
|
"learning_rate": 9.487179487179487e-06, |
|
"loss": 0.7192, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09743589743589744, |
|
"grad_norm": 3.043641292876537, |
|
"learning_rate": 9.743589743589744e-06, |
|
"loss": 0.7289, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.806409321185608, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7211, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10256410256410256, |
|
"grad_norm": 2.71943069020764, |
|
"learning_rate": 9.999799726899261e-06, |
|
"loss": 0.7212, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10512820512820513, |
|
"grad_norm": 2.783096760671244, |
|
"learning_rate": 9.999198923640774e-06, |
|
"loss": 0.7203, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1076923076923077, |
|
"grad_norm": 2.9232797812449123, |
|
"learning_rate": 9.998197638354428e-06, |
|
"loss": 0.7247, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11025641025641025, |
|
"grad_norm": 2.6455216659425345, |
|
"learning_rate": 9.996795951252427e-06, |
|
"loss": 0.7047, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11282051282051282, |
|
"grad_norm": 2.770746104512415, |
|
"learning_rate": 9.994993974622863e-06, |
|
"loss": 0.7237, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11538461538461539, |
|
"grad_norm": 2.942090186870347, |
|
"learning_rate": 9.992791852820709e-06, |
|
"loss": 0.7318, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.11794871794871795, |
|
"grad_norm": 2.8360683350136324, |
|
"learning_rate": 9.990189762256275e-06, |
|
"loss": 0.7223, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12051282051282051, |
|
"grad_norm": 2.7028129721998035, |
|
"learning_rate": 9.987187911381059e-06, |
|
"loss": 0.6966, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12307692307692308, |
|
"grad_norm": 2.8576067757026955, |
|
"learning_rate": 9.983786540671052e-06, |
|
"loss": 0.6921, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12564102564102564, |
|
"grad_norm": 3.1181897606257025, |
|
"learning_rate": 9.979985922607476e-06, |
|
"loss": 0.6892, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 2.6050383237898274, |
|
"learning_rate": 9.975786361654959e-06, |
|
"loss": 0.6907, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13076923076923078, |
|
"grad_norm": 2.588587494074759, |
|
"learning_rate": 9.971188194237141e-06, |
|
"loss": 0.7072, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 2.583793235527538, |
|
"learning_rate": 9.966191788709716e-06, |
|
"loss": 0.7059, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1358974358974359, |
|
"grad_norm": 2.585476026019637, |
|
"learning_rate": 9.960797545330936e-06, |
|
"loss": 0.7125, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.13846153846153847, |
|
"grad_norm": 2.7111790603577877, |
|
"learning_rate": 9.955005896229543e-06, |
|
"loss": 0.6997, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14102564102564102, |
|
"grad_norm": 2.546803585442374, |
|
"learning_rate": 9.948817305370145e-06, |
|
"loss": 0.6886, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14358974358974358, |
|
"grad_norm": 2.504186693121665, |
|
"learning_rate": 9.942232268516051e-06, |
|
"loss": 0.6904, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14615384615384616, |
|
"grad_norm": 2.423059673415451, |
|
"learning_rate": 9.935251313189564e-06, |
|
"loss": 0.7086, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.14871794871794872, |
|
"grad_norm": 2.5596326656143806, |
|
"learning_rate": 9.927874998629714e-06, |
|
"loss": 0.6893, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15128205128205127, |
|
"grad_norm": 2.658800164746175, |
|
"learning_rate": 9.920103915747452e-06, |
|
"loss": 0.6819, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 2.5618934622112106, |
|
"learning_rate": 9.911938687078324e-06, |
|
"loss": 0.6823, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1564102564102564, |
|
"grad_norm": 2.5974964155912903, |
|
"learning_rate": 9.9033799667326e-06, |
|
"loss": 0.7085, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.15897435897435896, |
|
"grad_norm": 2.607874540525578, |
|
"learning_rate": 9.89442844034286e-06, |
|
"loss": 0.6766, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16153846153846155, |
|
"grad_norm": 2.732487640942877, |
|
"learning_rate": 9.885084825009085e-06, |
|
"loss": 0.6863, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.1641025641025641, |
|
"grad_norm": 2.5827249383659865, |
|
"learning_rate": 9.875349869241202e-06, |
|
"loss": 0.6994, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 2.5074970797423, |
|
"learning_rate": 9.86522435289912e-06, |
|
"loss": 0.672, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.16923076923076924, |
|
"grad_norm": 2.535326853580366, |
|
"learning_rate": 9.854709087130261e-06, |
|
"loss": 0.6842, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1717948717948718, |
|
"grad_norm": 2.5728898533459663, |
|
"learning_rate": 9.843804914304578e-06, |
|
"loss": 0.6826, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.17435897435897435, |
|
"grad_norm": 2.3552532454072503, |
|
"learning_rate": 9.83251270794707e-06, |
|
"loss": 0.6947, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.17692307692307693, |
|
"grad_norm": 2.37727229081133, |
|
"learning_rate": 9.820833372667813e-06, |
|
"loss": 0.676, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.1794871794871795, |
|
"grad_norm": 2.4920852746047966, |
|
"learning_rate": 9.80876784408948e-06, |
|
"loss": 0.678, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18205128205128204, |
|
"grad_norm": 2.6448068047121156, |
|
"learning_rate": 9.796317088772402e-06, |
|
"loss": 0.6708, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.18461538461538463, |
|
"grad_norm": 2.6210733196063174, |
|
"learning_rate": 9.783482104137127e-06, |
|
"loss": 0.6641, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.18717948717948718, |
|
"grad_norm": 2.4774458867373337, |
|
"learning_rate": 9.770263918384523e-06, |
|
"loss": 0.6678, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.18974358974358974, |
|
"grad_norm": 2.4109738214433567, |
|
"learning_rate": 9.75666359041341e-06, |
|
"loss": 0.6664, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 2.6223339803254118, |
|
"learning_rate": 9.742682209735727e-06, |
|
"loss": 0.6865, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.19487179487179487, |
|
"grad_norm": 2.5414701027854787, |
|
"learning_rate": 9.728320896389263e-06, |
|
"loss": 0.6951, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.19743589743589743, |
|
"grad_norm": 2.379668261870325, |
|
"learning_rate": 9.713580800847917e-06, |
|
"loss": 0.6672, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.444662137369538, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 0.6622, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.20256410256410257, |
|
"grad_norm": 2.449846598434235, |
|
"learning_rate": 9.682969016701357e-06, |
|
"loss": 0.65, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.20512820512820512, |
|
"grad_norm": 2.4410258623541763, |
|
"learning_rate": 9.66709978038292e-06, |
|
"loss": 0.6608, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2076923076923077, |
|
"grad_norm": 2.3782584039118015, |
|
"learning_rate": 9.650856666246693e-06, |
|
"loss": 0.6566, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21025641025641026, |
|
"grad_norm": 2.4939928679664773, |
|
"learning_rate": 9.63424097551621e-06, |
|
"loss": 0.645, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2128205128205128, |
|
"grad_norm": 2.431913499962849, |
|
"learning_rate": 9.617254039261835e-06, |
|
"loss": 0.6333, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2153846153846154, |
|
"grad_norm": 2.4550799694984318, |
|
"learning_rate": 9.599897218294122e-06, |
|
"loss": 0.6716, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.21794871794871795, |
|
"grad_norm": 2.454256850201106, |
|
"learning_rate": 9.582171903054815e-06, |
|
"loss": 0.643, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2205128205128205, |
|
"grad_norm": 2.38325690019972, |
|
"learning_rate": 9.564079513505455e-06, |
|
"loss": 0.6351, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2230769230769231, |
|
"grad_norm": 2.4210446447570533, |
|
"learning_rate": 9.54562149901362e-06, |
|
"loss": 0.6627, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.22564102564102564, |
|
"grad_norm": 2.479279392696381, |
|
"learning_rate": 9.526799338236828e-06, |
|
"loss": 0.6506, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2282051282051282, |
|
"grad_norm": 2.577651511733567, |
|
"learning_rate": 9.507614539004082e-06, |
|
"loss": 0.6306, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 2.416794598688409, |
|
"learning_rate": 9.488068638195072e-06, |
|
"loss": 0.6401, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.23333333333333334, |
|
"grad_norm": 2.746257367697141, |
|
"learning_rate": 9.468163201617063e-06, |
|
"loss": 0.6299, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2358974358974359, |
|
"grad_norm": 2.443879358348906, |
|
"learning_rate": 9.447899823879456e-06, |
|
"loss": 0.6453, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.23846153846153847, |
|
"grad_norm": 2.3287172810254773, |
|
"learning_rate": 9.427280128266049e-06, |
|
"loss": 0.628, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24102564102564103, |
|
"grad_norm": 2.462832532756568, |
|
"learning_rate": 9.406305766604996e-06, |
|
"loss": 0.6266, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.24358974358974358, |
|
"grad_norm": 2.6197702096400937, |
|
"learning_rate": 9.384978419136469e-06, |
|
"loss": 0.632, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.24615384615384617, |
|
"grad_norm": 2.457574610754138, |
|
"learning_rate": 9.363299794378072e-06, |
|
"loss": 0.6218, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.24871794871794872, |
|
"grad_norm": 2.3841136661317552, |
|
"learning_rate": 9.34127162898797e-06, |
|
"loss": 0.6192, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2512820512820513, |
|
"grad_norm": 2.3066096761948383, |
|
"learning_rate": 9.318895687625752e-06, |
|
"loss": 0.6285, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.25384615384615383, |
|
"grad_norm": 2.4121900484386805, |
|
"learning_rate": 9.296173762811084e-06, |
|
"loss": 0.6214, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 2.401649957120604, |
|
"learning_rate": 9.273107674780102e-06, |
|
"loss": 0.6295, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.258974358974359, |
|
"grad_norm": 2.3261987507248825, |
|
"learning_rate": 9.249699271339594e-06, |
|
"loss": 0.6165, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.26153846153846155, |
|
"grad_norm": 2.4189305418813336, |
|
"learning_rate": 9.225950427718974e-06, |
|
"loss": 0.6148, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2641025641025641, |
|
"grad_norm": 2.3669436725839543, |
|
"learning_rate": 9.201863046420065e-06, |
|
"loss": 0.6374, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 2.323600220845717, |
|
"learning_rate": 9.177439057064684e-06, |
|
"loss": 0.6168, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.2692307692307692, |
|
"grad_norm": 2.48316209601843, |
|
"learning_rate": 9.152680416240059e-06, |
|
"loss": 0.6163, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2717948717948718, |
|
"grad_norm": 2.491614167030535, |
|
"learning_rate": 9.1275891073421e-06, |
|
"loss": 0.61, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.2743589743589744, |
|
"grad_norm": 2.644322477647823, |
|
"learning_rate": 9.102167140416503e-06, |
|
"loss": 0.6274, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.27692307692307694, |
|
"grad_norm": 2.3420947737074957, |
|
"learning_rate": 9.076416551997721e-06, |
|
"loss": 0.5981, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2794871794871795, |
|
"grad_norm": 2.4856053771378086, |
|
"learning_rate": 9.050339404945834e-06, |
|
"loss": 0.6134, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.28205128205128205, |
|
"grad_norm": 2.3446213277401995, |
|
"learning_rate": 9.023937788281278e-06, |
|
"loss": 0.6014, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2846153846153846, |
|
"grad_norm": 3.401791255832871, |
|
"learning_rate": 8.997213817017508e-06, |
|
"loss": 0.5976, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.28717948717948716, |
|
"grad_norm": 2.511130997702265, |
|
"learning_rate": 8.970169631991556e-06, |
|
"loss": 0.5913, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.28974358974358977, |
|
"grad_norm": 2.475069745994417, |
|
"learning_rate": 8.942807399692543e-06, |
|
"loss": 0.6244, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.2923076923076923, |
|
"grad_norm": 2.379901357997744, |
|
"learning_rate": 8.915129312088112e-06, |
|
"loss": 0.6126, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2948717948717949, |
|
"grad_norm": 2.579387941858795, |
|
"learning_rate": 8.88713758644883e-06, |
|
"loss": 0.6094, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.29743589743589743, |
|
"grad_norm": 2.509534211853439, |
|
"learning_rate": 8.858834465170576e-06, |
|
"loss": 0.6073, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.4610011089524164, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.6018, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.30256410256410254, |
|
"grad_norm": 2.3560790069702304, |
|
"learning_rate": 8.801303129827352e-06, |
|
"loss": 0.5972, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.30512820512820515, |
|
"grad_norm": 2.564068568417491, |
|
"learning_rate": 8.772079524553951e-06, |
|
"loss": 0.6027, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 2.2846318016556513, |
|
"learning_rate": 8.742553740855507e-06, |
|
"loss": 0.5968, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.31025641025641026, |
|
"grad_norm": 2.3284611207841217, |
|
"learning_rate": 8.712728144020118e-06, |
|
"loss": 0.6043, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3128205128205128, |
|
"grad_norm": 2.3902221577154528, |
|
"learning_rate": 8.682605123353685e-06, |
|
"loss": 0.5961, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3153846153846154, |
|
"grad_norm": 2.425567977054307, |
|
"learning_rate": 8.652187091988516e-06, |
|
"loss": 0.5936, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.31794871794871793, |
|
"grad_norm": 2.4720501112801068, |
|
"learning_rate": 8.621476486689991e-06, |
|
"loss": 0.5914, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.32051282051282054, |
|
"grad_norm": 2.5269541650197063, |
|
"learning_rate": 8.590475767661371e-06, |
|
"loss": 0.5825, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3230769230769231, |
|
"grad_norm": 2.441311110280109, |
|
"learning_rate": 8.559187418346703e-06, |
|
"loss": 0.5847, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.32564102564102565, |
|
"grad_norm": 2.4920344440939575, |
|
"learning_rate": 8.527613945231886e-06, |
|
"loss": 0.5849, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3282051282051282, |
|
"grad_norm": 2.2770162283475615, |
|
"learning_rate": 8.495757877643857e-06, |
|
"loss": 0.5775, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.33076923076923076, |
|
"grad_norm": 2.45173559412692, |
|
"learning_rate": 8.463621767547998e-06, |
|
"loss": 0.5714, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 2.37188665141408, |
|
"learning_rate": 8.43120818934367e-06, |
|
"loss": 0.582, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.33589743589743587, |
|
"grad_norm": 2.28633165733625, |
|
"learning_rate": 8.398519739657997e-06, |
|
"loss": 0.5766, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3384615384615385, |
|
"grad_norm": 2.390940218026475, |
|
"learning_rate": 8.36555903713785e-06, |
|
"loss": 0.5824, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.34102564102564104, |
|
"grad_norm": 2.436136269822359, |
|
"learning_rate": 8.332328722240072e-06, |
|
"loss": 0.5801, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3435897435897436, |
|
"grad_norm": 2.3346149193120183, |
|
"learning_rate": 8.298831457019943e-06, |
|
"loss": 0.5781, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.34615384615384615, |
|
"grad_norm": 2.343996317953977, |
|
"learning_rate": 8.265069924917925e-06, |
|
"loss": 0.573, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3487179487179487, |
|
"grad_norm": 2.362387018042547, |
|
"learning_rate": 8.231046830544716e-06, |
|
"loss": 0.5459, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.35128205128205126, |
|
"grad_norm": 2.3640353741297178, |
|
"learning_rate": 8.196764899464552e-06, |
|
"loss": 0.577, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.35384615384615387, |
|
"grad_norm": 2.393950482821057, |
|
"learning_rate": 8.162226877976886e-06, |
|
"loss": 0.571, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.3564102564102564, |
|
"grad_norm": 2.5266665484244717, |
|
"learning_rate": 8.127435532896388e-06, |
|
"loss": 0.5698, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.358974358974359, |
|
"grad_norm": 2.4236369019301987, |
|
"learning_rate": 8.092393651331275e-06, |
|
"loss": 0.5574, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.36153846153846153, |
|
"grad_norm": 2.366524077967537, |
|
"learning_rate": 8.057104040460062e-06, |
|
"loss": 0.5656, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3641025641025641, |
|
"grad_norm": 2.51901241992306, |
|
"learning_rate": 8.021569527306663e-06, |
|
"loss": 0.5564, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.36666666666666664, |
|
"grad_norm": 2.3012788394311388, |
|
"learning_rate": 7.985792958513932e-06, |
|
"loss": 0.5452, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.36923076923076925, |
|
"grad_norm": 2.256394871040715, |
|
"learning_rate": 7.949777200115617e-06, |
|
"loss": 0.5573, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3717948717948718, |
|
"grad_norm": 2.345080301865664, |
|
"learning_rate": 7.913525137306756e-06, |
|
"loss": 0.562, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.37435897435897436, |
|
"grad_norm": 2.356034194528926, |
|
"learning_rate": 7.877039674212569e-06, |
|
"loss": 0.5502, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.3769230769230769, |
|
"grad_norm": 2.258668319004814, |
|
"learning_rate": 7.84032373365578e-06, |
|
"loss": 0.5583, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.37948717948717947, |
|
"grad_norm": 2.406002737003363, |
|
"learning_rate": 7.803380256922495e-06, |
|
"loss": 0.5532, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.382051282051282, |
|
"grad_norm": 2.4436486647408557, |
|
"learning_rate": 7.76621220352657e-06, |
|
"loss": 0.5625, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 2.2622529339051116, |
|
"learning_rate": 7.728822550972523e-06, |
|
"loss": 0.544, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3871794871794872, |
|
"grad_norm": 2.360201684948899, |
|
"learning_rate": 7.69121429451702e-06, |
|
"loss": 0.5419, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.38974358974358975, |
|
"grad_norm": 2.523997978763824, |
|
"learning_rate": 7.65339044692891e-06, |
|
"loss": 0.5447, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3923076923076923, |
|
"grad_norm": 2.3781942951542905, |
|
"learning_rate": 7.615354038247889e-06, |
|
"loss": 0.5455, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.39487179487179486, |
|
"grad_norm": 2.3551793216491252, |
|
"learning_rate": 7.577108115541761e-06, |
|
"loss": 0.5522, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3974358974358974, |
|
"grad_norm": 2.2869590132059687, |
|
"learning_rate": 7.53865574266234e-06, |
|
"loss": 0.5425, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.3895152812865135, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.5546, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4025641025641026, |
|
"grad_norm": 2.326678141379568, |
|
"learning_rate": 7.461143984236925e-06, |
|
"loss": 0.5336, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.40512820512820513, |
|
"grad_norm": 2.332360549526255, |
|
"learning_rate": 7.422090808099014e-06, |
|
"loss": 0.5582, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4076923076923077, |
|
"grad_norm": 2.73441064133198, |
|
"learning_rate": 7.382843600106539e-06, |
|
"loss": 0.5446, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.41025641025641024, |
|
"grad_norm": 2.4297671403954126, |
|
"learning_rate": 7.343405504323519e-06, |
|
"loss": 0.5352, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4128205128205128, |
|
"grad_norm": 2.407219494603257, |
|
"learning_rate": 7.303779680105844e-06, |
|
"loss": 0.5369, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4153846153846154, |
|
"grad_norm": 2.409663247487236, |
|
"learning_rate": 7.263969301848188e-06, |
|
"loss": 0.5235, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.41794871794871796, |
|
"grad_norm": 2.4425251774533114, |
|
"learning_rate": 7.223977558729707e-06, |
|
"loss": 0.5465, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.4205128205128205, |
|
"grad_norm": 2.288578248006796, |
|
"learning_rate": 7.183807654458565e-06, |
|
"loss": 0.5143, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4230769230769231, |
|
"grad_norm": 2.3832843352346846, |
|
"learning_rate": 7.143462807015271e-06, |
|
"loss": 0.531, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4256410256410256, |
|
"grad_norm": 2.353471391032929, |
|
"learning_rate": 7.102946248394908e-06, |
|
"loss": 0.5163, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4282051282051282, |
|
"grad_norm": 2.328206416879441, |
|
"learning_rate": 7.0622612243482035e-06, |
|
"loss": 0.5263, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.4307692307692308, |
|
"grad_norm": 2.3182487758013672, |
|
"learning_rate": 7.021410994121525e-06, |
|
"loss": 0.5244, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.43333333333333335, |
|
"grad_norm": 2.4570735364008627, |
|
"learning_rate": 6.980398830195785e-06, |
|
"loss": 0.5268, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4358974358974359, |
|
"grad_norm": 2.363628640743217, |
|
"learning_rate": 6.939228018024275e-06, |
|
"loss": 0.5327, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.43846153846153846, |
|
"grad_norm": 2.532584427272012, |
|
"learning_rate": 6.897901855769483e-06, |
|
"loss": 0.5043, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.441025641025641, |
|
"grad_norm": 2.32923251434474, |
|
"learning_rate": 6.856423654038868e-06, |
|
"loss": 0.5274, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.44358974358974357, |
|
"grad_norm": 2.280683467658302, |
|
"learning_rate": 6.814796735619664e-06, |
|
"loss": 0.5215, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.4461538461538462, |
|
"grad_norm": 2.41025108576172, |
|
"learning_rate": 6.773024435212678e-06, |
|
"loss": 0.5166, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.44871794871794873, |
|
"grad_norm": 2.315707168471926, |
|
"learning_rate": 6.731110099165165e-06, |
|
"loss": 0.515, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.4512820512820513, |
|
"grad_norm": 276.06259122484, |
|
"learning_rate": 6.689057085202737e-06, |
|
"loss": 0.5416, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.45384615384615384, |
|
"grad_norm": 2.4156201482273016, |
|
"learning_rate": 6.646868762160399e-06, |
|
"loss": 0.5159, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.4564102564102564, |
|
"grad_norm": 2.2786600416058924, |
|
"learning_rate": 6.6045485097126585e-06, |
|
"loss": 0.5101, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.45897435897435895, |
|
"grad_norm": 2.353965318631809, |
|
"learning_rate": 6.562099718102788e-06, |
|
"loss": 0.5155, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 2.4114551311265204, |
|
"learning_rate": 6.519525787871235e-06, |
|
"loss": 0.517, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4641025641025641, |
|
"grad_norm": 2.3985660530296062, |
|
"learning_rate": 6.476830129583207e-06, |
|
"loss": 0.5004, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4666666666666667, |
|
"grad_norm": 2.360463974629649, |
|
"learning_rate": 6.434016163555452e-06, |
|
"loss": 0.5096, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.46923076923076923, |
|
"grad_norm": 2.4536941202621745, |
|
"learning_rate": 6.391087319582264e-06, |
|
"loss": 0.5134, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.4717948717948718, |
|
"grad_norm": 2.2791112676567744, |
|
"learning_rate": 6.34804703666072e-06, |
|
"loss": 0.5146, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.47435897435897434, |
|
"grad_norm": 2.3821675561579236, |
|
"learning_rate": 6.304898762715187e-06, |
|
"loss": 0.508, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.47692307692307695, |
|
"grad_norm": 2.4529360596394265, |
|
"learning_rate": 6.261645954321109e-06, |
|
"loss": 0.4985, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.4794871794871795, |
|
"grad_norm": 2.256298131213456, |
|
"learning_rate": 6.21829207642811e-06, |
|
"loss": 0.4921, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.48205128205128206, |
|
"grad_norm": 2.448767917095956, |
|
"learning_rate": 6.1748406020824115e-06, |
|
"loss": 0.4978, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.4846153846153846, |
|
"grad_norm": 2.315987919198238, |
|
"learning_rate": 6.131295012148613e-06, |
|
"loss": 0.5003, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.48717948717948717, |
|
"grad_norm": 2.4549261729386482, |
|
"learning_rate": 6.087658795030838e-06, |
|
"loss": 0.5119, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.4897435897435897, |
|
"grad_norm": 2.3677954185891728, |
|
"learning_rate": 6.043935446393294e-06, |
|
"loss": 0.4872, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.49230769230769234, |
|
"grad_norm": 2.264324311811878, |
|
"learning_rate": 6.000128468880223e-06, |
|
"loss": 0.5122, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.4948717948717949, |
|
"grad_norm": 2.3791395718772006, |
|
"learning_rate": 5.956241371835312e-06, |
|
"loss": 0.4903, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.49743589743589745, |
|
"grad_norm": 2.3183613178906843, |
|
"learning_rate": 5.912277671020564e-06, |
|
"loss": 0.4977, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.29335823998158, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.4869, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5025641025641026, |
|
"grad_norm": 2.3281002741187646, |
|
"learning_rate": 5.824134551530783e-06, |
|
"loss": 0.4876, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5051282051282051, |
|
"grad_norm": 2.5742883235915617, |
|
"learning_rate": 5.77996219393409e-06, |
|
"loss": 0.4915, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5076923076923077, |
|
"grad_norm": 2.365622183252692, |
|
"learning_rate": 5.735727354158581e-06, |
|
"loss": 0.4805, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5102564102564102, |
|
"grad_norm": 2.3390404631244186, |
|
"learning_rate": 5.6914335758236665e-06, |
|
"loss": 0.4847, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 2.409479555334482, |
|
"learning_rate": 5.647084407270277e-06, |
|
"loss": 0.4803, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5153846153846153, |
|
"grad_norm": 2.1772689630729722, |
|
"learning_rate": 5.6026834012766155e-06, |
|
"loss": 0.4639, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.517948717948718, |
|
"grad_norm": 2.344704474447901, |
|
"learning_rate": 5.5582341147735396e-06, |
|
"loss": 0.4908, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5205128205128206, |
|
"grad_norm": 2.377431610687667, |
|
"learning_rate": 5.5137401085596224e-06, |
|
"loss": 0.4726, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5230769230769231, |
|
"grad_norm": 2.232621441545563, |
|
"learning_rate": 5.469204947015897e-06, |
|
"loss": 0.4781, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5256410256410257, |
|
"grad_norm": 2.2830639259821326, |
|
"learning_rate": 5.424632197820325e-06, |
|
"loss": 0.481, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5282051282051282, |
|
"grad_norm": 2.278086265992867, |
|
"learning_rate": 5.380025431661981e-06, |
|
"loss": 0.4638, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5307692307692308, |
|
"grad_norm": 2.269191869325939, |
|
"learning_rate": 5.335388221955012e-06, |
|
"loss": 0.4832, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 2.454836078007937, |
|
"learning_rate": 5.290724144552379e-06, |
|
"loss": 0.4724, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5358974358974359, |
|
"grad_norm": 2.3451732210243286, |
|
"learning_rate": 5.246036777459391e-06, |
|
"loss": 0.4728, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 2.3588173670427595, |
|
"learning_rate": 5.201329700547077e-06, |
|
"loss": 0.4696, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.541025641025641, |
|
"grad_norm": 2.3165391366093684, |
|
"learning_rate": 5.156606495265402e-06, |
|
"loss": 0.4778, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5435897435897435, |
|
"grad_norm": 2.421604099533384, |
|
"learning_rate": 5.111870744356366e-06, |
|
"loss": 0.4802, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.5461538461538461, |
|
"grad_norm": 2.437578551376967, |
|
"learning_rate": 5.067126031566988e-06, |
|
"loss": 0.471, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5487179487179488, |
|
"grad_norm": 2.2866015786243197, |
|
"learning_rate": 5.022375941362218e-06, |
|
"loss": 0.4634, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5512820512820513, |
|
"grad_norm": 2.3760034162224057, |
|
"learning_rate": 4.977624058637783e-06, |
|
"loss": 0.461, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.5538461538461539, |
|
"grad_norm": 2.196491886505775, |
|
"learning_rate": 4.932873968433014e-06, |
|
"loss": 0.4613, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5564102564102564, |
|
"grad_norm": 2.3547607515039726, |
|
"learning_rate": 4.8881292556436355e-06, |
|
"loss": 0.4681, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.558974358974359, |
|
"grad_norm": 2.3142668747054422, |
|
"learning_rate": 4.8433935047346e-06, |
|
"loss": 0.4646, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5615384615384615, |
|
"grad_norm": 2.2294897374308724, |
|
"learning_rate": 4.798670299452926e-06, |
|
"loss": 0.4621, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5641025641025641, |
|
"grad_norm": 2.201796451540025, |
|
"learning_rate": 4.75396322254061e-06, |
|
"loss": 0.4643, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5666666666666667, |
|
"grad_norm": 2.3004102141787706, |
|
"learning_rate": 4.7092758554476215e-06, |
|
"loss": 0.4473, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5692307692307692, |
|
"grad_norm": 2.4629293661391642, |
|
"learning_rate": 4.664611778044988e-06, |
|
"loss": 0.4595, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5717948717948718, |
|
"grad_norm": 2.3152580137062198, |
|
"learning_rate": 4.619974568338021e-06, |
|
"loss": 0.4532, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.5743589743589743, |
|
"grad_norm": 2.30027584904841, |
|
"learning_rate": 4.575367802179675e-06, |
|
"loss": 0.4455, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 2.183997017733211, |
|
"learning_rate": 4.530795052984104e-06, |
|
"loss": 0.4535, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5794871794871795, |
|
"grad_norm": 2.233588195344829, |
|
"learning_rate": 4.48625989144038e-06, |
|
"loss": 0.4568, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5820512820512821, |
|
"grad_norm": 2.3225007847891375, |
|
"learning_rate": 4.441765885226462e-06, |
|
"loss": 0.4454, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.5846153846153846, |
|
"grad_norm": 2.1432323382255887, |
|
"learning_rate": 4.397316598723385e-06, |
|
"loss": 0.452, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5871794871794872, |
|
"grad_norm": 2.249098192197577, |
|
"learning_rate": 4.352915592729723e-06, |
|
"loss": 0.4406, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.5897435897435898, |
|
"grad_norm": 2.3668720077166663, |
|
"learning_rate": 4.308566424176336e-06, |
|
"loss": 0.4612, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5923076923076923, |
|
"grad_norm": 2.3280371739332106, |
|
"learning_rate": 4.264272645841419e-06, |
|
"loss": 0.4563, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.5948717948717949, |
|
"grad_norm": 2.2992039199150343, |
|
"learning_rate": 4.220037806065911e-06, |
|
"loss": 0.4404, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5974358974358974, |
|
"grad_norm": 2.352728324858546, |
|
"learning_rate": 4.175865448469219e-06, |
|
"loss": 0.447, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.2680523365737466, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 0.4374, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6025641025641025, |
|
"grad_norm": 2.339304021822968, |
|
"learning_rate": 4.087722328979437e-06, |
|
"loss": 0.4576, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6051282051282051, |
|
"grad_norm": 2.360150826251379, |
|
"learning_rate": 4.043758628164688e-06, |
|
"loss": 0.437, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6076923076923076, |
|
"grad_norm": 2.3068618660691937, |
|
"learning_rate": 3.999871531119779e-06, |
|
"loss": 0.4336, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.6102564102564103, |
|
"grad_norm": 2.2504790874563785, |
|
"learning_rate": 3.956064553606708e-06, |
|
"loss": 0.4303, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6128205128205129, |
|
"grad_norm": 2.3568871454086784, |
|
"learning_rate": 3.912341204969164e-06, |
|
"loss": 0.4505, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 2.29775545667767, |
|
"learning_rate": 3.86870498785139e-06, |
|
"loss": 0.4349, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.617948717948718, |
|
"grad_norm": 2.249325620803784, |
|
"learning_rate": 3.825159397917589e-06, |
|
"loss": 0.4245, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6205128205128205, |
|
"grad_norm": 2.217056799875781, |
|
"learning_rate": 3.781707923571891e-06, |
|
"loss": 0.4409, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6230769230769231, |
|
"grad_norm": 2.162077945290354, |
|
"learning_rate": 3.7383540456788915e-06, |
|
"loss": 0.4321, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6256410256410256, |
|
"grad_norm": 2.269939213889014, |
|
"learning_rate": 3.695101237284815e-06, |
|
"loss": 0.4323, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6282051282051282, |
|
"grad_norm": 2.321141003845967, |
|
"learning_rate": 3.6519529633392825e-06, |
|
"loss": 0.4307, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6307692307692307, |
|
"grad_norm": 2.290163674183007, |
|
"learning_rate": 3.6089126804177373e-06, |
|
"loss": 0.43, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6333333333333333, |
|
"grad_norm": 2.2796933735948235, |
|
"learning_rate": 3.5659838364445505e-06, |
|
"loss": 0.4257, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6358974358974359, |
|
"grad_norm": 2.3515817559831516, |
|
"learning_rate": 3.523169870416795e-06, |
|
"loss": 0.421, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6384615384615384, |
|
"grad_norm": 2.224007354395459, |
|
"learning_rate": 3.480474212128766e-06, |
|
"loss": 0.4371, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 2.2228662307484472, |
|
"learning_rate": 3.4379002818972122e-06, |
|
"loss": 0.4251, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.6435897435897436, |
|
"grad_norm": 2.354244944899084, |
|
"learning_rate": 3.3954514902873427e-06, |
|
"loss": 0.4329, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.6461538461538462, |
|
"grad_norm": 2.2750993825918093, |
|
"learning_rate": 3.3531312378396026e-06, |
|
"loss": 0.4151, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6487179487179487, |
|
"grad_norm": 2.2325391828770274, |
|
"learning_rate": 3.310942914797265e-06, |
|
"loss": 0.4327, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.6512820512820513, |
|
"grad_norm": 2.3345273581362935, |
|
"learning_rate": 3.2688899008348386e-06, |
|
"loss": 0.4111, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6538461538461539, |
|
"grad_norm": 2.3549983976092284, |
|
"learning_rate": 3.226975564787322e-06, |
|
"loss": 0.4182, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6564102564102564, |
|
"grad_norm": 2.2210755036476924, |
|
"learning_rate": 3.1852032643803377e-06, |
|
"loss": 0.4292, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.658974358974359, |
|
"grad_norm": 2.3446968695245376, |
|
"learning_rate": 3.143576345961132e-06, |
|
"loss": 0.4173, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6615384615384615, |
|
"grad_norm": 2.2100916363822765, |
|
"learning_rate": 3.1020981442305187e-06, |
|
"loss": 0.4103, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6641025641025641, |
|
"grad_norm": 2.3879357481967207, |
|
"learning_rate": 3.0607719819757264e-06, |
|
"loss": 0.4249, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 2.321287867020548, |
|
"learning_rate": 3.019601169804216e-06, |
|
"loss": 0.4291, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6692307692307692, |
|
"grad_norm": 2.1709645290877475, |
|
"learning_rate": 2.978589005878476e-06, |
|
"loss": 0.4043, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.6717948717948717, |
|
"grad_norm": 2.1963788275980867, |
|
"learning_rate": 2.937738775651798e-06, |
|
"loss": 0.4195, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.6743589743589744, |
|
"grad_norm": 2.3134447029337277, |
|
"learning_rate": 2.8970537516050935e-06, |
|
"loss": 0.415, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.676923076923077, |
|
"grad_norm": 2.215720987796974, |
|
"learning_rate": 2.8565371929847286e-06, |
|
"loss": 0.4168, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6794871794871795, |
|
"grad_norm": 2.4752032392325716, |
|
"learning_rate": 2.816192345541437e-06, |
|
"loss": 0.4181, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.6820512820512821, |
|
"grad_norm": 2.201152450487114, |
|
"learning_rate": 2.776022441270295e-06, |
|
"loss": 0.4156, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.6846153846153846, |
|
"grad_norm": 2.2072519693672272, |
|
"learning_rate": 2.736030698151815e-06, |
|
"loss": 0.4166, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.6871794871794872, |
|
"grad_norm": 2.3733944621945464, |
|
"learning_rate": 2.6962203198941587e-06, |
|
"loss": 0.4191, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.6897435897435897, |
|
"grad_norm": 2.3531096957130746, |
|
"learning_rate": 2.656594495676482e-06, |
|
"loss": 0.4082, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 2.180269591424075, |
|
"learning_rate": 2.6171563998934605e-06, |
|
"loss": 0.3952, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6948717948717948, |
|
"grad_norm": 2.066368640585333, |
|
"learning_rate": 2.577909191900988e-06, |
|
"loss": 0.4192, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.6974358974358974, |
|
"grad_norm": 2.2737966608576534, |
|
"learning_rate": 2.5388560157630765e-06, |
|
"loss": 0.3935, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.17705872571539, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.4125, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7025641025641025, |
|
"grad_norm": 2.218745613533214, |
|
"learning_rate": 2.4613442573376625e-06, |
|
"loss": 0.4008, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7051282051282052, |
|
"grad_norm": 2.1239348156972016, |
|
"learning_rate": 2.422891884458241e-06, |
|
"loss": 0.4094, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.7076923076923077, |
|
"grad_norm": 2.179223646983379, |
|
"learning_rate": 2.384645961752113e-06, |
|
"loss": 0.4092, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7102564102564103, |
|
"grad_norm": 2.3185213442449406, |
|
"learning_rate": 2.346609553071093e-06, |
|
"loss": 0.4088, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7128205128205128, |
|
"grad_norm": 2.294944820481849, |
|
"learning_rate": 2.308785705482982e-06, |
|
"loss": 0.3894, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7153846153846154, |
|
"grad_norm": 2.1876725669932, |
|
"learning_rate": 2.2711774490274767e-06, |
|
"loss": 0.3913, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.717948717948718, |
|
"grad_norm": 2.3670833638473607, |
|
"learning_rate": 2.2337877964734324e-06, |
|
"loss": 0.4004, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7205128205128205, |
|
"grad_norm": 2.213035848099544, |
|
"learning_rate": 2.1966197430775056e-06, |
|
"loss": 0.4051, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7230769230769231, |
|
"grad_norm": 2.1456079138282216, |
|
"learning_rate": 2.159676266344222e-06, |
|
"loss": 0.3998, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7256410256410256, |
|
"grad_norm": 2.368556775998427, |
|
"learning_rate": 2.122960325787432e-06, |
|
"loss": 0.3999, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7282051282051282, |
|
"grad_norm": 2.2866531656271714, |
|
"learning_rate": 2.086474862693244e-06, |
|
"loss": 0.3954, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7307692307692307, |
|
"grad_norm": 2.2147361106092247, |
|
"learning_rate": 2.050222799884387e-06, |
|
"loss": 0.3973, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7333333333333333, |
|
"grad_norm": 2.262377192608549, |
|
"learning_rate": 2.0142070414860704e-06, |
|
"loss": 0.3858, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.735897435897436, |
|
"grad_norm": 2.254959040727779, |
|
"learning_rate": 1.9784304726933384e-06, |
|
"loss": 0.3957, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7384615384615385, |
|
"grad_norm": 2.443844940991985, |
|
"learning_rate": 1.942895959539939e-06, |
|
"loss": 0.4015, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7410256410256411, |
|
"grad_norm": 2.1875651185476577, |
|
"learning_rate": 1.9076063486687256e-06, |
|
"loss": 0.3822, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.7435897435897436, |
|
"grad_norm": 2.2266048608412574, |
|
"learning_rate": 1.8725644671036125e-06, |
|
"loss": 0.4001, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7461538461538462, |
|
"grad_norm": 2.142836559910304, |
|
"learning_rate": 1.8377731220231144e-06, |
|
"loss": 0.3904, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.7487179487179487, |
|
"grad_norm": 2.2609455403429557, |
|
"learning_rate": 1.803235100535452e-06, |
|
"loss": 0.3948, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7512820512820513, |
|
"grad_norm": 2.264696810926414, |
|
"learning_rate": 1.7689531694552863e-06, |
|
"loss": 0.3976, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.7538461538461538, |
|
"grad_norm": 2.230112130398212, |
|
"learning_rate": 1.7349300750820758e-06, |
|
"loss": 0.3859, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7564102564102564, |
|
"grad_norm": 2.2268561992720173, |
|
"learning_rate": 1.7011685429800596e-06, |
|
"loss": 0.3849, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7589743589743589, |
|
"grad_norm": 2.1244858256821306, |
|
"learning_rate": 1.6676712777599275e-06, |
|
"loss": 0.3835, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7615384615384615, |
|
"grad_norm": 2.3007192861817534, |
|
"learning_rate": 1.6344409628621482e-06, |
|
"loss": 0.3818, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.764102564102564, |
|
"grad_norm": 2.1550992793361896, |
|
"learning_rate": 1.6014802603420044e-06, |
|
"loss": 0.3788, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.7666666666666667, |
|
"grad_norm": 2.285182407331258, |
|
"learning_rate": 1.5687918106563326e-06, |
|
"loss": 0.3707, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 2.184234648354467, |
|
"learning_rate": 1.5363782324520033e-06, |
|
"loss": 0.3804, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7717948717948718, |
|
"grad_norm": 2.3833866092139546, |
|
"learning_rate": 1.504242122356143e-06, |
|
"loss": 0.3917, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.7743589743589744, |
|
"grad_norm": 2.213841954806635, |
|
"learning_rate": 1.4723860547681163e-06, |
|
"loss": 0.3948, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.7769230769230769, |
|
"grad_norm": 2.1343451977199335, |
|
"learning_rate": 1.4408125816532981e-06, |
|
"loss": 0.383, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.7794871794871795, |
|
"grad_norm": 2.3514768336870824, |
|
"learning_rate": 1.4095242323386305e-06, |
|
"loss": 0.3989, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.782051282051282, |
|
"grad_norm": 2.316285751451528, |
|
"learning_rate": 1.3785235133100088e-06, |
|
"loss": 0.3836, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.7846153846153846, |
|
"grad_norm": 2.136192342183207, |
|
"learning_rate": 1.347812908011485e-06, |
|
"loss": 0.3737, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.7871794871794872, |
|
"grad_norm": 2.240858366870099, |
|
"learning_rate": 1.3173948766463146e-06, |
|
"loss": 0.3832, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.7897435897435897, |
|
"grad_norm": 2.130447099646043, |
|
"learning_rate": 1.2872718559798852e-06, |
|
"loss": 0.3809, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.7923076923076923, |
|
"grad_norm": 2.2629722022590615, |
|
"learning_rate": 1.257446259144494e-06, |
|
"loss": 0.3827, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.7948717948717948, |
|
"grad_norm": 2.1714635036854792, |
|
"learning_rate": 1.2279204754460494e-06, |
|
"loss": 0.3773, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.7974358974358975, |
|
"grad_norm": 2.375860318741745, |
|
"learning_rate": 1.1986968701726492e-06, |
|
"loss": 0.4044, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.160587779645344, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.3875, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8025641025641026, |
|
"grad_norm": 2.198147969238877, |
|
"learning_rate": 1.141165534829425e-06, |
|
"loss": 0.3981, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8051282051282052, |
|
"grad_norm": 2.1389541240218373, |
|
"learning_rate": 1.1128624135511712e-06, |
|
"loss": 0.3861, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8076923076923077, |
|
"grad_norm": 2.0625001013798068, |
|
"learning_rate": 1.0848706879118893e-06, |
|
"loss": 0.3744, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8102564102564103, |
|
"grad_norm": 2.255946436278033, |
|
"learning_rate": 1.057192600307456e-06, |
|
"loss": 0.3663, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8128205128205128, |
|
"grad_norm": 2.057286421099507, |
|
"learning_rate": 1.0298303680084448e-06, |
|
"loss": 0.3806, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.8153846153846154, |
|
"grad_norm": 2.1422638338742725, |
|
"learning_rate": 1.0027861829824953e-06, |
|
"loss": 0.3704, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8179487179487179, |
|
"grad_norm": 2.018897710780144, |
|
"learning_rate": 9.760622117187234e-07, |
|
"loss": 0.3759, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8205128205128205, |
|
"grad_norm": 2.248733412879705, |
|
"learning_rate": 9.496605950541676e-07, |
|
"loss": 0.3736, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.823076923076923, |
|
"grad_norm": 2.2165304029183894, |
|
"learning_rate": 9.235834480022788e-07, |
|
"loss": 0.3664, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8256410256410256, |
|
"grad_norm": 2.092232769500133, |
|
"learning_rate": 8.978328595834984e-07, |
|
"loss": 0.3946, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.8282051282051283, |
|
"grad_norm": 2.051244434233085, |
|
"learning_rate": 8.724108926579e-07, |
|
"loss": 0.3711, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.8307692307692308, |
|
"grad_norm": 2.1357191883411146, |
|
"learning_rate": 8.473195837599419e-07, |
|
"loss": 0.375, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 2.3476361797899727, |
|
"learning_rate": 8.225609429353187e-07, |
|
"loss": 0.379, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8358974358974359, |
|
"grad_norm": 2.1434969249566267, |
|
"learning_rate": 7.981369535799354e-07, |
|
"loss": 0.3718, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8384615384615385, |
|
"grad_norm": 2.1291787139915734, |
|
"learning_rate": 7.740495722810271e-07, |
|
"loss": 0.3619, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.841025641025641, |
|
"grad_norm": 2.1371175381872067, |
|
"learning_rate": 7.50300728660407e-07, |
|
"loss": 0.3836, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8435897435897436, |
|
"grad_norm": 2.0182192104108205, |
|
"learning_rate": 7.26892325219899e-07, |
|
"loss": 0.3696, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 2.182781125364085, |
|
"learning_rate": 7.03826237188916e-07, |
|
"loss": 0.3798, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8487179487179487, |
|
"grad_norm": 2.1819095950658967, |
|
"learning_rate": 6.811043123742494e-07, |
|
"loss": 0.3626, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8512820512820513, |
|
"grad_norm": 1.9947387983712035, |
|
"learning_rate": 6.587283710120324e-07, |
|
"loss": 0.3654, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.8538461538461538, |
|
"grad_norm": 2.3863133588522447, |
|
"learning_rate": 6.367002056219285e-07, |
|
"loss": 0.3769, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.8564102564102564, |
|
"grad_norm": 2.131117707340598, |
|
"learning_rate": 6.150215808635334e-07, |
|
"loss": 0.3695, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.8589743589743589, |
|
"grad_norm": 2.127014385149758, |
|
"learning_rate": 5.936942333950063e-07, |
|
"loss": 0.3689, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.8615384615384616, |
|
"grad_norm": 2.048791526125599, |
|
"learning_rate": 5.727198717339511e-07, |
|
"loss": 0.3557, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.8641025641025641, |
|
"grad_norm": 2.1611061050109286, |
|
"learning_rate": 5.521001761205441e-07, |
|
"loss": 0.3744, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.8666666666666667, |
|
"grad_norm": 2.253709876904671, |
|
"learning_rate": 5.318367983829393e-07, |
|
"loss": 0.3652, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.8692307692307693, |
|
"grad_norm": 2.0719950658447823, |
|
"learning_rate": 5.119313618049309e-07, |
|
"loss": 0.3665, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.8717948717948718, |
|
"grad_norm": 2.0316305277614775, |
|
"learning_rate": 4.9238546099592e-07, |
|
"loss": 0.3657, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.8743589743589744, |
|
"grad_norm": 2.5087707696707326, |
|
"learning_rate": 4.732006617631729e-07, |
|
"loss": 0.3795, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.8769230769230769, |
|
"grad_norm": 2.1498909379980287, |
|
"learning_rate": 4.54378500986381e-07, |
|
"loss": 0.3663, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.8794871794871795, |
|
"grad_norm": 1.9681404281106394, |
|
"learning_rate": 4.35920486494546e-07, |
|
"loss": 0.3754, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.882051282051282, |
|
"grad_norm": 2.1003321632219643, |
|
"learning_rate": 4.1782809694518533e-07, |
|
"loss": 0.3553, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.8846153846153846, |
|
"grad_norm": 2.0527201502010235, |
|
"learning_rate": 4.001027817058789e-07, |
|
"loss": 0.371, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.8871794871794871, |
|
"grad_norm": 2.201330552650921, |
|
"learning_rate": 3.8274596073816784e-07, |
|
"loss": 0.3667, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.8897435897435897, |
|
"grad_norm": 2.047245403408523, |
|
"learning_rate": 3.657590244837911e-07, |
|
"loss": 0.3557, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.8923076923076924, |
|
"grad_norm": 2.227196360471304, |
|
"learning_rate": 3.49143333753309e-07, |
|
"loss": 0.3699, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.8948717948717949, |
|
"grad_norm": 2.1749366287892804, |
|
"learning_rate": 3.3290021961708163e-07, |
|
"loss": 0.3583, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 2.2245295170216397, |
|
"learning_rate": 3.1703098329864237e-07, |
|
"loss": 0.3613, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.0951082558511125, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.3502, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9025641025641026, |
|
"grad_norm": 2.3736518735276277, |
|
"learning_rate": 2.864191991520848e-07, |
|
"loss": 0.3648, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9051282051282051, |
|
"grad_norm": 1.9061212020960874, |
|
"learning_rate": 2.71679103610738e-07, |
|
"loss": 0.3616, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9076923076923077, |
|
"grad_norm": 2.2330643798467213, |
|
"learning_rate": 2.573177902642726e-07, |
|
"loss": 0.3612, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.9102564102564102, |
|
"grad_norm": 2.3165257034110054, |
|
"learning_rate": 2.4333640958659144e-07, |
|
"loss": 0.3636, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.9128205128205128, |
|
"grad_norm": 2.0684872184860614, |
|
"learning_rate": 2.2973608161547755e-07, |
|
"loss": 0.3613, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9153846153846154, |
|
"grad_norm": 2.247620069960479, |
|
"learning_rate": 2.1651789586287442e-07, |
|
"loss": 0.3732, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9179487179487179, |
|
"grad_norm": 2.0738095357541653, |
|
"learning_rate": 2.0368291122759898e-07, |
|
"loss": 0.3544, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.9205128205128205, |
|
"grad_norm": 2.3019916845092947, |
|
"learning_rate": 1.9123215591052014e-07, |
|
"loss": 0.3618, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 2.038724564899476, |
|
"learning_rate": 1.7916662733218848e-07, |
|
"loss": 0.3801, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.9256410256410257, |
|
"grad_norm": 2.2889296400333654, |
|
"learning_rate": 1.6748729205293024e-07, |
|
"loss": 0.3712, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.9282051282051282, |
|
"grad_norm": 1.9287399651858075, |
|
"learning_rate": 1.5619508569542363e-07, |
|
"loss": 0.3616, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.9307692307692308, |
|
"grad_norm": 1.9456575180320899, |
|
"learning_rate": 1.4529091286973994e-07, |
|
"loss": 0.369, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 2.0665497215433226, |
|
"learning_rate": 1.3477564710088097e-07, |
|
"loss": 0.3497, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.9358974358974359, |
|
"grad_norm": 2.0871154036132604, |
|
"learning_rate": 1.2465013075879884e-07, |
|
"loss": 0.3559, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9384615384615385, |
|
"grad_norm": 1.9712698998583276, |
|
"learning_rate": 1.1491517499091498e-07, |
|
"loss": 0.3541, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.941025641025641, |
|
"grad_norm": 2.163801366341123, |
|
"learning_rate": 1.055715596571405e-07, |
|
"loss": 0.3661, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.9435897435897436, |
|
"grad_norm": 2.2509599324394984, |
|
"learning_rate": 9.662003326740166e-08, |
|
"loss": 0.3621, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9461538461538461, |
|
"grad_norm": 2.1871133765282345, |
|
"learning_rate": 8.80613129216762e-08, |
|
"loss": 0.361, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.9487179487179487, |
|
"grad_norm": 2.1099575248965152, |
|
"learning_rate": 7.989608425254924e-08, |
|
"loss": 0.3533, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9512820512820512, |
|
"grad_norm": 2.176020883382461, |
|
"learning_rate": 7.212500137028789e-08, |
|
"loss": 0.3678, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.9538461538461539, |
|
"grad_norm": 2.1204394793153067, |
|
"learning_rate": 6.474868681043578e-08, |
|
"loss": 0.3526, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.9564102564102565, |
|
"grad_norm": 2.1621021084689507, |
|
"learning_rate": 5.776773148394976e-08, |
|
"loss": 0.3651, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.958974358974359, |
|
"grad_norm": 2.031947785174627, |
|
"learning_rate": 5.1182694629857145e-08, |
|
"loss": 0.3734, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 2.096096759539924, |
|
"learning_rate": 4.499410377045765e-08, |
|
"loss": 0.3703, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.9641025641025641, |
|
"grad_norm": 2.3222972214818283, |
|
"learning_rate": 3.9202454669063915e-08, |
|
"loss": 0.368, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.9666666666666667, |
|
"grad_norm": 1.9780233715193638, |
|
"learning_rate": 3.3808211290284886e-08, |
|
"loss": 0.3479, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.9692307692307692, |
|
"grad_norm": 1.9718006540884354, |
|
"learning_rate": 2.8811805762860578e-08, |
|
"loss": 0.3555, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.9717948717948718, |
|
"grad_norm": 2.2122182201885394, |
|
"learning_rate": 2.4213638345040868e-08, |
|
"loss": 0.3633, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.9743589743589743, |
|
"grad_norm": 2.0210334835222876, |
|
"learning_rate": 2.0014077392525035e-08, |
|
"loss": 0.3664, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.9769230769230769, |
|
"grad_norm": 1.9777551247351057, |
|
"learning_rate": 1.6213459328950355e-08, |
|
"loss": 0.3637, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.9794871794871794, |
|
"grad_norm": 1.9849050081166892, |
|
"learning_rate": 1.2812088618942009e-08, |
|
"loss": 0.3442, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.982051282051282, |
|
"grad_norm": 2.0028405839895775, |
|
"learning_rate": 9.810237743724805e-09, |
|
"loss": 0.3502, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.9846153846153847, |
|
"grad_norm": 1.982479484802511, |
|
"learning_rate": 7.2081471792911914e-09, |
|
"loss": 0.3563, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.9871794871794872, |
|
"grad_norm": 2.108119859172211, |
|
"learning_rate": 5.006025377138901e-09, |
|
"loss": 0.3593, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.9897435897435898, |
|
"grad_norm": 2.1025186070928763, |
|
"learning_rate": 3.204048747573185e-09, |
|
"loss": 0.36, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.9923076923076923, |
|
"grad_norm": 2.1697555086596823, |
|
"learning_rate": 1.8023616455731253e-09, |
|
"loss": 0.3606, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.9948717948717949, |
|
"grad_norm": 2.0730870494656424, |
|
"learning_rate": 8.010763592264381e-10, |
|
"loss": 0.3671, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.9974358974358974, |
|
"grad_norm": 2.131587781280676, |
|
"learning_rate": 2.0027310073833516e-10, |
|
"loss": 0.3723, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.9834898868136892, |
|
"learning_rate": 0.0, |
|
"loss": 0.3656, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.3188657760620117, |
|
"eval_runtime": 1.193, |
|
"eval_samples_per_second": 1.676, |
|
"eval_steps_per_second": 0.838, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1950, |
|
"total_flos": 204145164288000.0, |
|
"train_loss": 0.5252774189068721, |
|
"train_runtime": 19779.9981, |
|
"train_samples_per_second": 1.577, |
|
"train_steps_per_second": 0.099 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1950, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 204145164288000.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|