|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 534, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 307.2925252001398, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 2.14, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 348.5579299962218, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 2.1592, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 295.8217735171242, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 2.1112, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 320.95278420676283, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 2.0727, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 273.6994356101882, |
|
"learning_rate": 5.555555555555555e-06, |
|
"loss": 1.8522, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 137.18652764811333, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.5844, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.105539597979915, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 1.4424, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.0951798632526275, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.4244, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.6078027918916717, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 1.3763, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9798839986563381, |
|
"learning_rate": 1.111111111111111e-05, |
|
"loss": 1.3583, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.065159602417454, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 1.3335, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.3002391180584008, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.309, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.2825778137111357, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 1.2932, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0205928394685457, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 1.2689, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.3373431587246087, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.2489, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.8043774075647928, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 1.2302, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.8741531962350515, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.2195, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7028376256413013, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 1.2027, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6648050831010063, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 1.1887, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.7354468182477213, |
|
"learning_rate": 2.222222222222222e-05, |
|
"loss": 1.1704, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7839624362161648, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.1654, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7752033041911668, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 1.1544, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7396236066452132, |
|
"learning_rate": 2.5555555555555557e-05, |
|
"loss": 1.138, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.748517600591021, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.133, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.7065074963471533, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 1.1119, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7289352652301433, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 1.1132, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7324972388581718, |
|
"learning_rate": 3e-05, |
|
"loss": 1.1008, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7020306466156484, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 1.1067, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7080095311543003, |
|
"learning_rate": 3.222222222222223e-05, |
|
"loss": 1.1028, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7149252394051553, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.0953, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7285982439983161, |
|
"learning_rate": 3.444444444444445e-05, |
|
"loss": 1.0859, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7057181936211316, |
|
"learning_rate": 3.555555555555555e-05, |
|
"loss": 1.0829, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.718290068719298, |
|
"learning_rate": 3.666666666666667e-05, |
|
"loss": 1.0658, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6998775916382852, |
|
"learning_rate": 3.777777777777778e-05, |
|
"loss": 1.0687, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6851057008807085, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.0701, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6724489509428345, |
|
"learning_rate": 3.9999999999999996e-05, |
|
"loss": 1.0675, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6480552435697329, |
|
"learning_rate": 4.1111111111111116e-05, |
|
"loss": 1.0537, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6383559954105056, |
|
"learning_rate": 4.222222222222222e-05, |
|
"loss": 1.0463, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6029067517028285, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.0422, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5709974683191396, |
|
"learning_rate": 4.444444444444444e-05, |
|
"loss": 1.037, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5363586512263623, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 1.0249, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5064701225673895, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.0269, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4492710010511058, |
|
"learning_rate": 4.777777777777778e-05, |
|
"loss": 1.014, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4154607764986574, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 1.0209, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3647804068160132, |
|
"learning_rate": 5e-05, |
|
"loss": 1.019, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3247880218369176, |
|
"learning_rate": 5.1111111111111115e-05, |
|
"loss": 1.0142, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3054048294238843, |
|
"learning_rate": 5.222222222222222e-05, |
|
"loss": 1.005, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2911566588563454, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.0182, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.30175247842711633, |
|
"learning_rate": 5.4444444444444446e-05, |
|
"loss": 1.003, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2957438299690764, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 1.0018, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.222811420448816, |
|
"learning_rate": 5.6666666666666664e-05, |
|
"loss": 0.9982, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.19282721337866324, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 1.003, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2271139316205582, |
|
"learning_rate": 5.888888888888889e-05, |
|
"loss": 0.9884, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.22400303611020278, |
|
"learning_rate": 6e-05, |
|
"loss": 0.9933, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2640000653332148, |
|
"learning_rate": 5.999935744992388e-05, |
|
"loss": 0.9898, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.29683962789566454, |
|
"learning_rate": 5.999742982722021e-05, |
|
"loss": 0.9894, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.23953312072588218, |
|
"learning_rate": 5.999421721446195e-05, |
|
"loss": 0.9891, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.22760431232110737, |
|
"learning_rate": 5.9989719749266715e-05, |
|
"loss": 0.9794, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.28069530624064654, |
|
"learning_rate": 5.998393762429097e-05, |
|
"loss": 0.9827, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.33248489828390104, |
|
"learning_rate": 5.997687108722169e-05, |
|
"loss": 0.9829, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.37460157299926583, |
|
"learning_rate": 5.9968520440765807e-05, |
|
"loss": 0.9865, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.40204606775328766, |
|
"learning_rate": 5.9958886042637214e-05, |
|
"loss": 0.9872, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2760468657673376, |
|
"learning_rate": 5.994796830554148e-05, |
|
"loss": 0.9825, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.25200297636338487, |
|
"learning_rate": 5.9935767697158103e-05, |
|
"loss": 0.9761, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3387334098621161, |
|
"learning_rate": 5.992228474012056e-05, |
|
"loss": 0.9724, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3352356569193807, |
|
"learning_rate": 5.990752001199384e-05, |
|
"loss": 0.9694, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.25307126239627026, |
|
"learning_rate": 5.989147414524976e-05, |
|
"loss": 0.9751, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.28603448445681706, |
|
"learning_rate": 5.987414782723985e-05, |
|
"loss": 0.9675, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.2512510471162242, |
|
"learning_rate": 5.985554180016591e-05, |
|
"loss": 0.9713, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.23308838498745504, |
|
"learning_rate": 5.98356568610482e-05, |
|
"loss": 0.9675, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2507676754613168, |
|
"learning_rate": 5.981449386169134e-05, |
|
"loss": 0.9768, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.17834215895890057, |
|
"learning_rate": 5.979205370864779e-05, |
|
"loss": 0.9736, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.22157097251518248, |
|
"learning_rate": 5.976833736317901e-05, |
|
"loss": 0.9761, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.19478083716793773, |
|
"learning_rate": 5.9743345841214316e-05, |
|
"loss": 0.9578, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.23929984172471444, |
|
"learning_rate": 5.9717080213307314e-05, |
|
"loss": 0.9637, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1792504123152509, |
|
"learning_rate": 5.968954160459011e-05, |
|
"loss": 0.9694, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1942820411854134, |
|
"learning_rate": 5.966073119472502e-05, |
|
"loss": 0.9654, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.20225817833116058, |
|
"learning_rate": 5.963065021785414e-05, |
|
"loss": 0.9568, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.17871245685588816, |
|
"learning_rate": 5.9599299962546375e-05, |
|
"loss": 0.9672, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.19357967707631799, |
|
"learning_rate": 5.956668177174234e-05, |
|
"loss": 0.9581, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.17703904963665285, |
|
"learning_rate": 5.953279704269675e-05, |
|
"loss": 0.9399, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.18397738069475075, |
|
"learning_rate": 5.949764722691864e-05, |
|
"loss": 0.9582, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1667458999382299, |
|
"learning_rate": 5.9461233830109117e-05, |
|
"loss": 0.9574, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.15246629979305598, |
|
"learning_rate": 5.9423558412096914e-05, |
|
"loss": 0.9624, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.147474418280348, |
|
"learning_rate": 5.938462258677154e-05, |
|
"loss": 0.9574, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.18908685471550463, |
|
"learning_rate": 5.934442802201417e-05, |
|
"loss": 0.9559, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.15962484297812873, |
|
"learning_rate": 5.930297643962617e-05, |
|
"loss": 0.9565, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.15626218745541218, |
|
"learning_rate": 5.926026961525538e-05, |
|
"loss": 0.9669, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.16363056001381104, |
|
"learning_rate": 5.921630937832001e-05, |
|
"loss": 0.9575, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.17266359267085402, |
|
"learning_rate": 5.91710976119303e-05, |
|
"loss": 0.9482, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.16154285700465842, |
|
"learning_rate": 5.9124636252807844e-05, |
|
"loss": 0.9486, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1505238744935054, |
|
"learning_rate": 5.907692729120263e-05, |
|
"loss": 0.9465, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1665515957602505, |
|
"learning_rate": 5.9027972770807796e-05, |
|
"loss": 0.9458, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.15288744931626277, |
|
"learning_rate": 5.897777478867205e-05, |
|
"loss": 0.9513, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.13893588088386338, |
|
"learning_rate": 5.892633549510988e-05, |
|
"loss": 0.9517, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.15518080707211024, |
|
"learning_rate": 5.887365709360941e-05, |
|
"loss": 0.956, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.16735904200015367, |
|
"learning_rate": 5.881974184073806e-05, |
|
"loss": 0.9644, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.16211582659946666, |
|
"learning_rate": 5.876459204604579e-05, |
|
"loss": 0.947, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.16067404496452142, |
|
"learning_rate": 5.8708210071966266e-05, |
|
"loss": 0.9493, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.16466229984590008, |
|
"learning_rate": 5.8650598333715604e-05, |
|
"loss": 0.9525, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.15635651077742227, |
|
"learning_rate": 5.8591759299188915e-05, |
|
"loss": 0.9462, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1445739463712597, |
|
"learning_rate": 5.853169548885461e-05, |
|
"loss": 0.9557, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.16706069442335733, |
|
"learning_rate": 5.847040947564642e-05, |
|
"loss": 0.9571, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.14383289999902457, |
|
"learning_rate": 5.8407903884853173e-05, |
|
"loss": 0.9452, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1409356084289515, |
|
"learning_rate": 5.8344181394006345e-05, |
|
"loss": 0.9452, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1697312533943109, |
|
"learning_rate": 5.827924473276536e-05, |
|
"loss": 0.9567, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.17531082504359882, |
|
"learning_rate": 5.821309668280065e-05, |
|
"loss": 0.9462, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1639474813042821, |
|
"learning_rate": 5.814574007767453e-05, |
|
"loss": 0.9485, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.17850681384081132, |
|
"learning_rate": 5.807717780271977e-05, |
|
"loss": 0.9366, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.16125613154173873, |
|
"learning_rate": 5.800741279491605e-05, |
|
"loss": 0.9451, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.197900179672024, |
|
"learning_rate": 5.7936448042764106e-05, |
|
"loss": 0.9495, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.21076815721637063, |
|
"learning_rate": 5.7864286586157726e-05, |
|
"loss": 0.9435, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.19782059920889028, |
|
"learning_rate": 5.7790931516253545e-05, |
|
"loss": 0.9416, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1836244550876009, |
|
"learning_rate": 5.7716385975338605e-05, |
|
"loss": 0.9466, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1613256822257701, |
|
"learning_rate": 5.764065315669578e-05, |
|
"loss": 0.9513, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.16741350820768655, |
|
"learning_rate": 5.756373630446695e-05, |
|
"loss": 0.9418, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.18843157688751017, |
|
"learning_rate": 5.748563871351408e-05, |
|
"loss": 0.945, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.15069085597597218, |
|
"learning_rate": 5.7406363729278026e-05, |
|
"loss": 0.9466, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.16662142102625724, |
|
"learning_rate": 5.7325914747635275e-05, |
|
"loss": 0.9486, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.19590155354881233, |
|
"learning_rate": 5.724429521475244e-05, |
|
"loss": 0.9435, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.15881882311241918, |
|
"learning_rate": 5.716150862693866e-05, |
|
"loss": 0.9466, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.14283264252478434, |
|
"learning_rate": 5.707755853049582e-05, |
|
"loss": 0.9412, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.16782472517291772, |
|
"learning_rate": 5.699244852156665e-05, |
|
"loss": 0.9382, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.16548141172284359, |
|
"learning_rate": 5.690618224598065e-05, |
|
"loss": 0.9479, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.13809707490800946, |
|
"learning_rate": 5.681876339909797e-05, |
|
"loss": 0.9429, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.18656335425800988, |
|
"learning_rate": 5.673019572565103e-05, |
|
"loss": 0.9381, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1732296780937527, |
|
"learning_rate": 5.664048301958422e-05, |
|
"loss": 0.9431, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.14221356678314365, |
|
"learning_rate": 5.654962912389126e-05, |
|
"loss": 0.9523, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.15119656927035072, |
|
"learning_rate": 5.645763793045065e-05, |
|
"loss": 0.9392, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.14464956279442504, |
|
"learning_rate": 5.636451337985896e-05, |
|
"loss": 0.9384, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.13140580588464784, |
|
"learning_rate": 5.627025946126199e-05, |
|
"loss": 0.9372, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.14493562139149813, |
|
"learning_rate": 5.617488021218392e-05, |
|
"loss": 0.9358, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.14893216335321577, |
|
"learning_rate": 5.6078379718354315e-05, |
|
"loss": 0.9419, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.14722396099871643, |
|
"learning_rate": 5.5980762113533166e-05, |
|
"loss": 0.944, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1484388680331261, |
|
"learning_rate": 5.588203157933376e-05, |
|
"loss": 0.946, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.14453486799975523, |
|
"learning_rate": 5.578219234504359e-05, |
|
"loss": 0.9502, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.13849264435281988, |
|
"learning_rate": 5.568124868744315e-05, |
|
"loss": 0.9339, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.16199659629172095, |
|
"learning_rate": 5.557920493062277e-05, |
|
"loss": 0.9238, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.16192446153254933, |
|
"learning_rate": 5.547606544579737e-05, |
|
"loss": 0.9336, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.14648464155352814, |
|
"learning_rate": 5.5371834651119204e-05, |
|
"loss": 0.9305, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1383162638141149, |
|
"learning_rate": 5.5266517011488596e-05, |
|
"loss": 0.9391, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1543998232523172, |
|
"learning_rate": 5.5160117038362726e-05, |
|
"loss": 0.9366, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.15730672921508043, |
|
"learning_rate": 5.5052639289562294e-05, |
|
"loss": 0.9346, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.13879299352222021, |
|
"learning_rate": 5.494408836907636e-05, |
|
"loss": 0.9364, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.15487093688296455, |
|
"learning_rate": 5.483446892686507e-05, |
|
"loss": 0.9246, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.14401066674945762, |
|
"learning_rate": 5.472378565866047e-05, |
|
"loss": 0.9361, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.14277478926628612, |
|
"learning_rate": 5.461204330576541e-05, |
|
"loss": 0.9389, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.16131149816932222, |
|
"learning_rate": 5.4499246654850374e-05, |
|
"loss": 0.9371, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.17407874387998404, |
|
"learning_rate": 5.4385400537748465e-05, |
|
"loss": 0.9372, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.17636478629943686, |
|
"learning_rate": 5.427050983124843e-05, |
|
"loss": 0.9343, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.16977563161518314, |
|
"learning_rate": 5.4154579456885744e-05, |
|
"loss": 0.9281, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.14318477872489785, |
|
"learning_rate": 5.403761438073182e-05, |
|
"loss": 0.9365, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1397942734256781, |
|
"learning_rate": 5.3919619613181215e-05, |
|
"loss": 0.9469, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.137963649770748, |
|
"learning_rate": 5.3800600208737054e-05, |
|
"loss": 0.9359, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.15171364959388206, |
|
"learning_rate": 5.3680561265794496e-05, |
|
"loss": 0.9269, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.14709787438935637, |
|
"learning_rate": 5.3559507926422344e-05, |
|
"loss": 0.9383, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12890500807018168, |
|
"learning_rate": 5.343744537614276e-05, |
|
"loss": 0.924, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11549683107184264, |
|
"learning_rate": 5.331437884370913e-05, |
|
"loss": 0.9283, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.15022761689747757, |
|
"learning_rate": 5.319031360088211e-05, |
|
"loss": 0.9307, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.15714839886122223, |
|
"learning_rate": 5.306525496220379e-05, |
|
"loss": 0.935, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.16859132275494298, |
|
"learning_rate": 5.293920828477001e-05, |
|
"loss": 0.9239, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.13633581062447336, |
|
"learning_rate": 5.281217896800093e-05, |
|
"loss": 0.9414, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1513556776118746, |
|
"learning_rate": 5.268417245340968e-05, |
|
"loss": 0.9338, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1757338104436492, |
|
"learning_rate": 5.255519422436932e-05, |
|
"loss": 0.9351, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.19112425290854476, |
|
"learning_rate": 5.242524980587791e-05, |
|
"loss": 0.9333, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.17979027309488244, |
|
"learning_rate": 5.2294344764321825e-05, |
|
"loss": 0.9179, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.16529910589891425, |
|
"learning_rate": 5.2162484707237387e-05, |
|
"loss": 0.9356, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.20692577816446356, |
|
"learning_rate": 5.202967528307057e-05, |
|
"loss": 0.9276, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.197154695709334, |
|
"learning_rate": 5.1895922180935066e-05, |
|
"loss": 0.9303, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.14150974456692325, |
|
"learning_rate": 5.176123113036863e-05, |
|
"loss": 0.9364, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.15101923044037124, |
|
"learning_rate": 5.162560790108756e-05, |
|
"loss": 0.9219, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.19585100899813354, |
|
"learning_rate": 5.148905830273964e-05, |
|
"loss": 0.9282, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.16466476745138203, |
|
"learning_rate": 5.135158818465514e-05, |
|
"loss": 0.9267, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.13928826936678446, |
|
"learning_rate": 5.1213203435596425e-05, |
|
"loss": 0.9204, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.16777686357665667, |
|
"learning_rate": 5.107390998350555e-05, |
|
"loss": 0.9209, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.13941558577566035, |
|
"learning_rate": 5.093371379525041e-05, |
|
"loss": 0.933, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1312376870568112, |
|
"learning_rate": 5.079262087636908e-05, |
|
"loss": 0.9273, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.13304976321730042, |
|
"learning_rate": 5.0650637270812615e-05, |
|
"loss": 0.9325, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9060415029525757, |
|
"eval_runtime": 367.2491, |
|
"eval_samples_per_second": 35.657, |
|
"eval_steps_per_second": 0.054, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.14350856865519188, |
|
"learning_rate": 5.0507769060686136e-05, |
|
"loss": 0.8991, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.14242795375317008, |
|
"learning_rate": 5.036402236598826e-05, |
|
"loss": 0.8819, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.1481380625280409, |
|
"learning_rate": 5.021940334434894e-05, |
|
"loss": 0.9013, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.14135102829732513, |
|
"learning_rate": 5.007391819076575e-05, |
|
"loss": 0.8876, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.14410031273084303, |
|
"learning_rate": 4.9927573137338456e-05, |
|
"loss": 0.8962, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.1356425817573695, |
|
"learning_rate": 4.978037445300207e-05, |
|
"loss": 0.8984, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.15138222667915374, |
|
"learning_rate": 4.963232844325832e-05, |
|
"loss": 0.8934, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.1476182931802454, |
|
"learning_rate": 4.948344144990551e-05, |
|
"loss": 0.8875, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.1542666076621437, |
|
"learning_rate": 4.933371985076692e-05, |
|
"loss": 0.8916, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.1522457621047058, |
|
"learning_rate": 4.9183170059417543e-05, |
|
"loss": 0.8924, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.1401929471695145, |
|
"learning_rate": 4.903179852490937e-05, |
|
"loss": 0.8961, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.1397403483022461, |
|
"learning_rate": 4.887961173149513e-05, |
|
"loss": 0.8841, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.16199935662086337, |
|
"learning_rate": 4.872661619835054e-05, |
|
"loss": 0.8934, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.17579997757407093, |
|
"learning_rate": 4.857281847929503e-05, |
|
"loss": 0.8912, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.12918417902945037, |
|
"learning_rate": 4.8418225162510994e-05, |
|
"loss": 0.8955, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.1357691544652963, |
|
"learning_rate": 4.826284287026162e-05, |
|
"loss": 0.8876, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.13562412043146593, |
|
"learning_rate": 4.8106678258607146e-05, |
|
"loss": 0.8925, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.14459127567703792, |
|
"learning_rate": 4.794973801711977e-05, |
|
"loss": 0.9046, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.13612859027230298, |
|
"learning_rate": 4.7792028868597114e-05, |
|
"loss": 0.9008, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.13139758263957305, |
|
"learning_rate": 4.7633557568774194e-05, |
|
"loss": 0.8883, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.1332017950455885, |
|
"learning_rate": 4.7474330906034067e-05, |
|
"loss": 0.8872, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.14003547669435232, |
|
"learning_rate": 4.731435570111701e-05, |
|
"loss": 0.8852, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.12548622314931585, |
|
"learning_rate": 4.7153638806828365e-05, |
|
"loss": 0.8918, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.1316324333822756, |
|
"learning_rate": 4.699218710774499e-05, |
|
"loss": 0.8911, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.14152097644548858, |
|
"learning_rate": 4.68300075199203e-05, |
|
"loss": 0.8989, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.1270200473456502, |
|
"learning_rate": 4.6667106990588066e-05, |
|
"loss": 0.8855, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.1498866663693573, |
|
"learning_rate": 4.650349249786481e-05, |
|
"loss": 0.8915, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.13575618100316017, |
|
"learning_rate": 4.633917105045082e-05, |
|
"loss": 0.8868, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.12252093912010241, |
|
"learning_rate": 4.617414968733002e-05, |
|
"loss": 0.8993, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.17129363052080313, |
|
"learning_rate": 4.6008435477468346e-05, |
|
"loss": 0.8979, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.13086986947624585, |
|
"learning_rate": 4.584203551951104e-05, |
|
"loss": 0.9042, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.1289023381093015, |
|
"learning_rate": 4.567495694147847e-05, |
|
"loss": 0.9079, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.12646977417389443, |
|
"learning_rate": 4.5507206900460824e-05, |
|
"loss": 0.8805, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.135408603034375, |
|
"learning_rate": 4.533879258231156e-05, |
|
"loss": 0.9036, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.13151933660920634, |
|
"learning_rate": 4.516972120133954e-05, |
|
"loss": 0.8963, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.14642201202966043, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.8884, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.13215103394112293, |
|
"learning_rate": 4.4829636248584336e-05, |
|
"loss": 0.8954, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.14081499259661354, |
|
"learning_rate": 4.4658637244908654e-05, |
|
"loss": 0.9078, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.13325013129319507, |
|
"learning_rate": 4.448701031400112e-05, |
|
"loss": 0.889, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.14567267671389802, |
|
"learning_rate": 4.431476280778825e-05, |
|
"loss": 0.8896, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.1530402998595256, |
|
"learning_rate": 4.414190210477994e-05, |
|
"loss": 0.8863, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.11883142231327087, |
|
"learning_rate": 4.396843560975334e-05, |
|
"loss": 0.8956, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.1441333083283775, |
|
"learning_rate": 4.37943707534358e-05, |
|
"loss": 0.8886, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.14323097711463215, |
|
"learning_rate": 4.3619714992186405e-05, |
|
"loss": 0.8934, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.14847369219388526, |
|
"learning_rate": 4.344447580767668e-05, |
|
"loss": 0.8993, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.12999113661604003, |
|
"learning_rate": 4.326866070657004e-05, |
|
"loss": 0.8837, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.1590097564167615, |
|
"learning_rate": 4.309227722020026e-05, |
|
"loss": 0.893, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.13787186379941546, |
|
"learning_rate": 4.291533290424886e-05, |
|
"loss": 0.8873, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.13561372365659452, |
|
"learning_rate": 4.27378353384214e-05, |
|
"loss": 0.8909, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.12883018074018207, |
|
"learning_rate": 4.2559792126122843e-05, |
|
"loss": 0.8859, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.13365029333191705, |
|
"learning_rate": 4.238121089413184e-05, |
|
"loss": 0.9062, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.14359777080953676, |
|
"learning_rate": 4.2202099292274015e-05, |
|
"loss": 0.8991, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.127263628452024, |
|
"learning_rate": 4.2022464993094226e-05, |
|
"loss": 0.8945, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.15508442928638333, |
|
"learning_rate": 4.184231569152802e-05, |
|
"loss": 0.8971, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.1498552324218163, |
|
"learning_rate": 4.166165910457187e-05, |
|
"loss": 0.891, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.13461134175675027, |
|
"learning_rate": 4.14805029709527e-05, |
|
"loss": 0.8877, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.14946029731016597, |
|
"learning_rate": 4.1298855050796324e-05, |
|
"loss": 0.8903, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.13552355920453768, |
|
"learning_rate": 4.1116723125295094e-05, |
|
"loss": 0.8973, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.14705105872966423, |
|
"learning_rate": 4.09341149963745e-05, |
|
"loss": 0.8982, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.16411157831669457, |
|
"learning_rate": 4.0751038486359e-05, |
|
"loss": 0.8982, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.13269150518487338, |
|
"learning_rate": 4.056750143763701e-05, |
|
"loss": 0.8895, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.14962049546760073, |
|
"learning_rate": 4.038351171232479e-05, |
|
"loss": 0.8854, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.16086784383385563, |
|
"learning_rate": 4.019907719192982e-05, |
|
"loss": 0.8964, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.14444863678847175, |
|
"learning_rate": 4.0014205777013125e-05, |
|
"loss": 0.8847, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.12452999678280204, |
|
"learning_rate": 3.982890538685081e-05, |
|
"loss": 0.8896, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.15069697551107591, |
|
"learning_rate": 3.964318395909485e-05, |
|
"loss": 0.8986, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.12577792786818992, |
|
"learning_rate": 3.945704944943309e-05, |
|
"loss": 0.884, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.12286285953881854, |
|
"learning_rate": 3.927050983124842e-05, |
|
"loss": 0.8818, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.13929979955363045, |
|
"learning_rate": 3.908357309527724e-05, |
|
"loss": 0.8866, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.12285830808074812, |
|
"learning_rate": 3.889624724926713e-05, |
|
"loss": 0.8812, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.12424668641448619, |
|
"learning_rate": 3.870854031763387e-05, |
|
"loss": 0.8928, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.12954214774856612, |
|
"learning_rate": 3.852046034111769e-05, |
|
"loss": 0.8853, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.1266013073117186, |
|
"learning_rate": 3.8332015376438775e-05, |
|
"loss": 0.8844, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.12067596509699888, |
|
"learning_rate": 3.8143213495952224e-05, |
|
"loss": 0.8916, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.11623622829833351, |
|
"learning_rate": 3.795406278730224e-05, |
|
"loss": 0.8859, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.11551209984961364, |
|
"learning_rate": 3.776457135307562e-05, |
|
"loss": 0.8868, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.11536856636572211, |
|
"learning_rate": 3.757474731045474e-05, |
|
"loss": 0.8828, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.12375743852295125, |
|
"learning_rate": 3.738459879086979e-05, |
|
"loss": 0.8902, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.12571384064192775, |
|
"learning_rate": 3.71941339396505e-05, |
|
"loss": 0.8885, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.13081609687043957, |
|
"learning_rate": 3.7003360915677164e-05, |
|
"loss": 0.8954, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.12420083605022122, |
|
"learning_rate": 3.68122878910312e-05, |
|
"loss": 0.8914, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.13533339586556395, |
|
"learning_rate": 3.6620923050645045e-05, |
|
"loss": 0.8901, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.11714010649732222, |
|
"learning_rate": 3.6429274591951526e-05, |
|
"loss": 0.899, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.13060719113804842, |
|
"learning_rate": 3.6237350724532775e-05, |
|
"loss": 0.8987, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.11664362421142428, |
|
"learning_rate": 3.6045159669768514e-05, |
|
"loss": 0.8835, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.12473979951909206, |
|
"learning_rate": 3.5852709660483855e-05, |
|
"loss": 0.8824, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.12297968050330141, |
|
"learning_rate": 3.566000894059666e-05, |
|
"loss": 0.8863, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.13406322104382662, |
|
"learning_rate": 3.5467065764764434e-05, |
|
"loss": 0.8884, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.12098972585519244, |
|
"learning_rate": 3.527388839803064e-05, |
|
"loss": 0.8857, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.12310957119462022, |
|
"learning_rate": 3.508048511547073e-05, |
|
"loss": 0.8835, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.11925523073706316, |
|
"learning_rate": 3.4886864201837666e-05, |
|
"loss": 0.8814, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.13519867806471875, |
|
"learning_rate": 3.469303395120693e-05, |
|
"loss": 0.8783, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.13530142559876104, |
|
"learning_rate": 3.449900266662135e-05, |
|
"loss": 0.8914, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.14035599743211918, |
|
"learning_rate": 3.430477865973538e-05, |
|
"loss": 0.8884, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.1405423762523139, |
|
"learning_rate": 3.4110370250459046e-05, |
|
"loss": 0.8803, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.13704705253451768, |
|
"learning_rate": 3.3915785766601555e-05, |
|
"loss": 0.8773, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.13457615586156477, |
|
"learning_rate": 3.372103354351456e-05, |
|
"loss": 0.8749, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.11095858575701167, |
|
"learning_rate": 3.3526121923735136e-05, |
|
"loss": 0.8845, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.13597531439993377, |
|
"learning_rate": 3.333105925662833e-05, |
|
"loss": 0.8928, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.10509488873902952, |
|
"learning_rate": 3.313585389802961e-05, |
|
"loss": 0.8949, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.13788943155574296, |
|
"learning_rate": 3.294051420988683e-05, |
|
"loss": 0.8848, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.1128347052438993, |
|
"learning_rate": 3.274504855990208e-05, |
|
"loss": 0.89, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.129080918181206, |
|
"learning_rate": 3.254946532117325e-05, |
|
"loss": 0.889, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.13286996791565725, |
|
"learning_rate": 3.235377287183535e-05, |
|
"loss": 0.8852, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.13354457708434664, |
|
"learning_rate": 3.2157979594701584e-05, |
|
"loss": 0.8788, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.1336273953738305, |
|
"learning_rate": 3.1962093876904294e-05, |
|
"loss": 0.8878, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.13151737304049846, |
|
"learning_rate": 3.176612410953567e-05, |
|
"loss": 0.8844, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.1313340614662638, |
|
"learning_rate": 3.157007868728832e-05, |
|
"loss": 0.8882, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.12675708891920084, |
|
"learning_rate": 3.1373966008095624e-05, |
|
"loss": 0.876, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.13203447973990534, |
|
"learning_rate": 3.117779447277206e-05, |
|
"loss": 0.8866, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.11627365297753998, |
|
"learning_rate": 3.098157248465329e-05, |
|
"loss": 0.8797, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.12182727484026588, |
|
"learning_rate": 3.07853084492362e-05, |
|
"loss": 0.8867, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.11050534350252515, |
|
"learning_rate": 3.0589010773818843e-05, |
|
"loss": 0.8714, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.11908194959446168, |
|
"learning_rate": 3.0392687867140333e-05, |
|
"loss": 0.8805, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.13824206533219605, |
|
"learning_rate": 3.019634813902056e-05, |
|
"loss": 0.8919, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.10624499343917188, |
|
"learning_rate": 3e-05, |
|
"loss": 0.8872, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.13416938377497178, |
|
"learning_rate": 2.9803651860979446e-05, |
|
"loss": 0.8882, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.10669414306939697, |
|
"learning_rate": 2.9607312132859672e-05, |
|
"loss": 0.8886, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.13304203015528449, |
|
"learning_rate": 2.9410989226181155e-05, |
|
"loss": 0.8858, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.10943791939152962, |
|
"learning_rate": 2.9214691550763813e-05, |
|
"loss": 0.8862, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.12414652355477009, |
|
"learning_rate": 2.901842751534672e-05, |
|
"loss": 0.8771, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.12513686726484888, |
|
"learning_rate": 2.882220552722795e-05, |
|
"loss": 0.8884, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.12991814913620842, |
|
"learning_rate": 2.8626033991904384e-05, |
|
"loss": 0.8912, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.12469358939363877, |
|
"learning_rate": 2.8429921312711687e-05, |
|
"loss": 0.8755, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.12431368690357643, |
|
"learning_rate": 2.8233875890464327e-05, |
|
"loss": 0.8758, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.13987522452787626, |
|
"learning_rate": 2.8037906123095708e-05, |
|
"loss": 0.8869, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.1238266615524893, |
|
"learning_rate": 2.7842020405298415e-05, |
|
"loss": 0.8783, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.13322189651231403, |
|
"learning_rate": 2.7646227128164657e-05, |
|
"loss": 0.885, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.11424790429145636, |
|
"learning_rate": 2.7450534678826753e-05, |
|
"loss": 0.8763, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.13119287845211466, |
|
"learning_rate": 2.725495144009793e-05, |
|
"loss": 0.8767, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.11575683203124575, |
|
"learning_rate": 2.705948579011318e-05, |
|
"loss": 0.8729, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.14050128592489014, |
|
"learning_rate": 2.6864146101970402e-05, |
|
"loss": 0.8798, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.11824436396680635, |
|
"learning_rate": 2.6668940743371674e-05, |
|
"loss": 0.8835, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.12959459755274583, |
|
"learning_rate": 2.6473878076264875e-05, |
|
"loss": 0.8751, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.11622350222131687, |
|
"learning_rate": 2.627896645648545e-05, |
|
"loss": 0.8931, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.12314437146185572, |
|
"learning_rate": 2.608421423339846e-05, |
|
"loss": 0.8865, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.11421490206501421, |
|
"learning_rate": 2.5889629749540966e-05, |
|
"loss": 0.8824, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.1116631859004068, |
|
"learning_rate": 2.5695221340264626e-05, |
|
"loss": 0.8803, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.10817788865943528, |
|
"learning_rate": 2.5500997333378646e-05, |
|
"loss": 0.8794, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.11523773392162422, |
|
"learning_rate": 2.530696604879307e-05, |
|
"loss": 0.8832, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.10502311886881133, |
|
"learning_rate": 2.5113135798162342e-05, |
|
"loss": 0.8816, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.11059154509363399, |
|
"learning_rate": 2.4919514884529262e-05, |
|
"loss": 0.8787, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.10388990188288608, |
|
"learning_rate": 2.4726111601969365e-05, |
|
"loss": 0.8824, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.112930784152048, |
|
"learning_rate": 2.4532934235235574e-05, |
|
"loss": 0.8744, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.11158721627694267, |
|
"learning_rate": 2.433999105940335e-05, |
|
"loss": 0.8847, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.10338686368057544, |
|
"learning_rate": 2.4147290339516156e-05, |
|
"loss": 0.877, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.1071678174366947, |
|
"learning_rate": 2.3954840330231487e-05, |
|
"loss": 0.884, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.11226132071651221, |
|
"learning_rate": 2.3762649275467226e-05, |
|
"loss": 0.8747, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.10930430786464529, |
|
"learning_rate": 2.3570725408048483e-05, |
|
"loss": 0.883, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.10447714161398361, |
|
"learning_rate": 2.337907694935497e-05, |
|
"loss": 0.8797, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.10888020274939628, |
|
"learning_rate": 2.3187712108968808e-05, |
|
"loss": 0.8779, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.10873276266965934, |
|
"learning_rate": 2.2996639084322848e-05, |
|
"loss": 0.8716, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.1051804371547165, |
|
"learning_rate": 2.2805866060349513e-05, |
|
"loss": 0.8925, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.1073547759858754, |
|
"learning_rate": 2.261540120913021e-05, |
|
"loss": 0.8763, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.10582013545010713, |
|
"learning_rate": 2.242525268954526e-05, |
|
"loss": 0.8801, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.11360898822883193, |
|
"learning_rate": 2.2235428646924375e-05, |
|
"loss": 0.8861, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.10517104031304181, |
|
"learning_rate": 2.2045937212697755e-05, |
|
"loss": 0.875, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.11733374572446251, |
|
"learning_rate": 2.1856786504047774e-05, |
|
"loss": 0.8771, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.10165485322637723, |
|
"learning_rate": 2.1667984623561237e-05, |
|
"loss": 0.8772, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.12525581851096768, |
|
"learning_rate": 2.147953965888232e-05, |
|
"loss": 0.8893, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.0991811666289482, |
|
"learning_rate": 2.1291459682366136e-05, |
|
"loss": 0.8777, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.10828713812832842, |
|
"learning_rate": 2.1103752750732875e-05, |
|
"loss": 0.8834, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.11106845149741414, |
|
"learning_rate": 2.091642690472277e-05, |
|
"loss": 0.874, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.10116959541779709, |
|
"learning_rate": 2.072949016875158e-05, |
|
"loss": 0.8677, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.11266898014907221, |
|
"learning_rate": 2.054295055056692e-05, |
|
"loss": 0.8926, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.10220798261268464, |
|
"learning_rate": 2.035681604090516e-05, |
|
"loss": 0.8645, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.1060144270004459, |
|
"learning_rate": 2.0171094613149198e-05, |
|
"loss": 0.8799, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.10968581060830482, |
|
"learning_rate": 1.9985794222986876e-05, |
|
"loss": 0.8763, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.11650138283261792, |
|
"learning_rate": 1.980092280807017e-05, |
|
"loss": 0.884, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.10120824187747386, |
|
"learning_rate": 1.9616488287675206e-05, |
|
"loss": 0.8749, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.11604072229024492, |
|
"learning_rate": 1.9432498562362997e-05, |
|
"loss": 0.8796, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.10278157857112963, |
|
"learning_rate": 1.924896151364099e-05, |
|
"loss": 0.8793, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.10712761878171682, |
|
"learning_rate": 1.906588500362551e-05, |
|
"loss": 0.8801, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.10593823683108748, |
|
"learning_rate": 1.888327687470491e-05, |
|
"loss": 0.8783, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.10348796531289477, |
|
"learning_rate": 1.8701144949203677e-05, |
|
"loss": 0.8786, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.1074116166391481, |
|
"learning_rate": 1.8519497029047307e-05, |
|
"loss": 0.8778, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.11058482340970019, |
|
"learning_rate": 1.833834089542813e-05, |
|
"loss": 0.8713, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.11020651242885636, |
|
"learning_rate": 1.8157684308471988e-05, |
|
"loss": 0.8687, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.8850164413452148, |
|
"eval_runtime": 325.5979, |
|
"eval_samples_per_second": 40.218, |
|
"eval_steps_per_second": 0.061, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.12003832997057942, |
|
"learning_rate": 1.7977535006905776e-05, |
|
"loss": 0.8401, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.10675891351152236, |
|
"learning_rate": 1.7797900707726e-05, |
|
"loss": 0.8486, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.13479155868015855, |
|
"learning_rate": 1.761878910586816e-05, |
|
"loss": 0.8508, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.11372338660989749, |
|
"learning_rate": 1.7440207873877165e-05, |
|
"loss": 0.8415, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.11106742867861737, |
|
"learning_rate": 1.7262164661578614e-05, |
|
"loss": 0.8546, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.10946128209473692, |
|
"learning_rate": 1.708466709575114e-05, |
|
"loss": 0.8477, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.10202238774555161, |
|
"learning_rate": 1.6907722779799732e-05, |
|
"loss": 0.8498, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.11732343725531236, |
|
"learning_rate": 1.6731339293429967e-05, |
|
"loss": 0.8462, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.10655979023984713, |
|
"learning_rate": 1.6555524192323327e-05, |
|
"loss": 0.8497, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.1124168518526382, |
|
"learning_rate": 1.6380285007813596e-05, |
|
"loss": 0.8479, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.10356449268514911, |
|
"learning_rate": 1.6205629246564205e-05, |
|
"loss": 0.852, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.11466802705572018, |
|
"learning_rate": 1.6031564390246658e-05, |
|
"loss": 0.8469, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.10022099638563725, |
|
"learning_rate": 1.585809789522007e-05, |
|
"loss": 0.8553, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.11768853322439309, |
|
"learning_rate": 1.5685237192211747e-05, |
|
"loss": 0.8587, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.09746799497107858, |
|
"learning_rate": 1.551298968599889e-05, |
|
"loss": 0.8461, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.10423547308687264, |
|
"learning_rate": 1.534136275509136e-05, |
|
"loss": 0.8485, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.1064669106822173, |
|
"learning_rate": 1.517036375141567e-05, |
|
"loss": 0.8495, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.09616752021785893, |
|
"learning_rate": 1.5000000000000007e-05, |
|
"loss": 0.8438, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.11068107117718663, |
|
"learning_rate": 1.4830278798660467e-05, |
|
"loss": 0.8563, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.11633212251432552, |
|
"learning_rate": 1.4661207417688442e-05, |
|
"loss": 0.8541, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.09985284219816701, |
|
"learning_rate": 1.4492793099539175e-05, |
|
"loss": 0.855, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.11580112488775175, |
|
"learning_rate": 1.4325043058521537e-05, |
|
"loss": 0.8563, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.0968466939956641, |
|
"learning_rate": 1.415796448048896e-05, |
|
"loss": 0.8526, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.10786154660093858, |
|
"learning_rate": 1.3991564522531655e-05, |
|
"loss": 0.8485, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.09923333146234646, |
|
"learning_rate": 1.3825850312669992e-05, |
|
"loss": 0.8513, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.10427648708884239, |
|
"learning_rate": 1.3660828949549189e-05, |
|
"loss": 0.8486, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.12292462282144442, |
|
"learning_rate": 1.34965075021352e-05, |
|
"loss": 0.8481, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.1028967402644067, |
|
"learning_rate": 1.3332893009411942e-05, |
|
"loss": 0.8505, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.10410540672946561, |
|
"learning_rate": 1.3169992480079712e-05, |
|
"loss": 0.8513, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.09846244373078215, |
|
"learning_rate": 1.3007812892255022e-05, |
|
"loss": 0.8474, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.1030135486730131, |
|
"learning_rate": 1.2846361193171636e-05, |
|
"loss": 0.8539, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.09722668637542492, |
|
"learning_rate": 1.2685644298882995e-05, |
|
"loss": 0.8469, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.10002106907707546, |
|
"learning_rate": 1.2525669093965938e-05, |
|
"loss": 0.8538, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.1013552416026544, |
|
"learning_rate": 1.2366442431225809e-05, |
|
"loss": 0.8402, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.10045607980672773, |
|
"learning_rate": 1.2207971131402889e-05, |
|
"loss": 0.8538, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.10092477315351785, |
|
"learning_rate": 1.2050261982880229e-05, |
|
"loss": 0.8493, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.09631205966968917, |
|
"learning_rate": 1.1893321741392857e-05, |
|
"loss": 0.844, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.09480564837357011, |
|
"learning_rate": 1.173715712973838e-05, |
|
"loss": 0.8516, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.10519104968879345, |
|
"learning_rate": 1.1581774837489004e-05, |
|
"loss": 0.8489, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.09383687719586851, |
|
"learning_rate": 1.1427181520704977e-05, |
|
"loss": 0.8423, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.09314803679639443, |
|
"learning_rate": 1.1273383801649465e-05, |
|
"loss": 0.855, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.09785284085871111, |
|
"learning_rate": 1.1120388268504882e-05, |
|
"loss": 0.8592, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.10014383710533643, |
|
"learning_rate": 1.0968201475090638e-05, |
|
"loss": 0.8527, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.09342386714963961, |
|
"learning_rate": 1.081682994058246e-05, |
|
"loss": 0.8518, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.09637941328010605, |
|
"learning_rate": 1.0666280149233084e-05, |
|
"loss": 0.8611, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.09382962878187683, |
|
"learning_rate": 1.0516558550094494e-05, |
|
"loss": 0.8534, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.09961841204532747, |
|
"learning_rate": 1.036767155674169e-05, |
|
"loss": 0.8616, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.09436634787644145, |
|
"learning_rate": 1.0219625546997936e-05, |
|
"loss": 0.8484, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.09434782225624004, |
|
"learning_rate": 1.0072426862661559e-05, |
|
"loss": 0.8543, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.09240933321648483, |
|
"learning_rate": 9.926081809234262e-06, |
|
"loss": 0.8521, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.09529963951322644, |
|
"learning_rate": 9.780596655651062e-06, |
|
"loss": 0.8502, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.09779115542409987, |
|
"learning_rate": 9.635977634011746e-06, |
|
"loss": 0.8538, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.0953318034742421, |
|
"learning_rate": 9.492230939313859e-06, |
|
"loss": 0.8462, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.09533554193414427, |
|
"learning_rate": 9.349362729187376e-06, |
|
"loss": 0.8505, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.09108747344016899, |
|
"learning_rate": 9.207379123630928e-06, |
|
"loss": 0.8364, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.09157394780369447, |
|
"learning_rate": 9.066286204749602e-06, |
|
"loss": 0.8542, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.09626988034418861, |
|
"learning_rate": 8.926090016494452e-06, |
|
"loss": 0.8395, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.0932671686945301, |
|
"learning_rate": 8.786796564403577e-06, |
|
"loss": 0.8545, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.09177787384063951, |
|
"learning_rate": 8.648411815344862e-06, |
|
"loss": 0.8478, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.09283856078392008, |
|
"learning_rate": 8.510941697260372e-06, |
|
"loss": 0.8482, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.09505027194291998, |
|
"learning_rate": 8.374392098912435e-06, |
|
"loss": 0.8515, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.09061937534466419, |
|
"learning_rate": 8.238768869631379e-06, |
|
"loss": 0.8419, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.09687410932411479, |
|
"learning_rate": 8.104077819064939e-06, |
|
"loss": 0.8455, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.0919488286995836, |
|
"learning_rate": 7.97032471692944e-06, |
|
"loss": 0.8543, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.0917348716297872, |
|
"learning_rate": 7.837515292762618e-06, |
|
"loss": 0.84, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.0970295444546269, |
|
"learning_rate": 7.70565523567817e-06, |
|
"loss": 0.8483, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.09120283035787119, |
|
"learning_rate": 7.5747501941220924e-06, |
|
"loss": 0.8495, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.09237994609930707, |
|
"learning_rate": 7.444805775630682e-06, |
|
"loss": 0.8426, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.0913397657548416, |
|
"learning_rate": 7.315827546590318e-06, |
|
"loss": 0.8512, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.09498181364054688, |
|
"learning_rate": 7.187821031999073e-06, |
|
"loss": 0.8481, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.10078078724917741, |
|
"learning_rate": 7.0607917152299905e-06, |
|
"loss": 0.8455, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.09145562291129593, |
|
"learning_rate": 6.9347450377962165e-06, |
|
"loss": 0.8491, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.09158338626328179, |
|
"learning_rate": 6.8096863991178906e-06, |
|
"loss": 0.8561, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.09207229087759045, |
|
"learning_rate": 6.685621156290873e-06, |
|
"loss": 0.8467, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.08985403499699009, |
|
"learning_rate": 6.562554623857251e-06, |
|
"loss": 0.8446, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.09232970248222189, |
|
"learning_rate": 6.440492073577659e-06, |
|
"loss": 0.8412, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.08869732511140344, |
|
"learning_rate": 6.319438734205503e-06, |
|
"loss": 0.8533, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.09159355570132911, |
|
"learning_rate": 6.199399791262949e-06, |
|
"loss": 0.8426, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.08776078153410961, |
|
"learning_rate": 6.08038038681879e-06, |
|
"loss": 0.8468, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.08961006632692203, |
|
"learning_rate": 5.962385619268184e-06, |
|
"loss": 0.8443, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.09327133481154146, |
|
"learning_rate": 5.845420543114255e-06, |
|
"loss": 0.8507, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.08783358817348458, |
|
"learning_rate": 5.72949016875158e-06, |
|
"loss": 0.8444, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.09080799921546491, |
|
"learning_rate": 5.614599462251546e-06, |
|
"loss": 0.8504, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.09120455202673097, |
|
"learning_rate": 5.500753345149633e-06, |
|
"loss": 0.8511, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.0883979066402917, |
|
"learning_rate": 5.387956694234592e-06, |
|
"loss": 0.8346, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.08782824261068842, |
|
"learning_rate": 5.2762143413395296e-06, |
|
"loss": 0.8499, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.09088620542561385, |
|
"learning_rate": 5.165531073134936e-06, |
|
"loss": 0.8496, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.08784209719890995, |
|
"learning_rate": 5.05591163092364e-06, |
|
"loss": 0.8406, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.09132953206887262, |
|
"learning_rate": 4.9473607104377105e-06, |
|
"loss": 0.8459, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.08772978666632109, |
|
"learning_rate": 4.839882961637282e-06, |
|
"loss": 0.8505, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.09985768462831238, |
|
"learning_rate": 4.733482988511407e-06, |
|
"loss": 0.8534, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.08761275010174724, |
|
"learning_rate": 4.628165348880804e-06, |
|
"loss": 0.8524, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.09041594806081453, |
|
"learning_rate": 4.523934554202636e-06, |
|
"loss": 0.8529, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.08661829609254855, |
|
"learning_rate": 4.4207950693772345e-06, |
|
"loss": 0.8409, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.0895302020625196, |
|
"learning_rate": 4.3187513125568586e-06, |
|
"loss": 0.8453, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.08974158495627368, |
|
"learning_rate": 4.217807654956419e-06, |
|
"loss": 0.8427, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.0881031632024463, |
|
"learning_rate": 4.117968420666245e-06, |
|
"loss": 0.8518, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.08911231289710217, |
|
"learning_rate": 4.019237886466839e-06, |
|
"loss": 0.8479, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.08758751320687048, |
|
"learning_rate": 3.921620281645688e-06, |
|
"loss": 0.8428, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.08762936227381564, |
|
"learning_rate": 3.825119787816085e-06, |
|
"loss": 0.8541, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.09070004531926247, |
|
"learning_rate": 3.7297405387380066e-06, |
|
"loss": 0.8606, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.0901394728812162, |
|
"learning_rate": 3.635486620141042e-06, |
|
"loss": 0.8439, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.09035508650424855, |
|
"learning_rate": 3.542362069549352e-06, |
|
"loss": 0.8597, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.08987208626466826, |
|
"learning_rate": 3.450370876108747e-06, |
|
"loss": 0.8549, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.08584439292379578, |
|
"learning_rate": 3.3595169804157834e-06, |
|
"loss": 0.8511, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.0885850408197027, |
|
"learning_rate": 3.2698042743489666e-06, |
|
"loss": 0.8538, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.09187965878611364, |
|
"learning_rate": 3.1812366009020366e-06, |
|
"loss": 0.8509, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.08932729014366118, |
|
"learning_rate": 3.0938177540193523e-06, |
|
"loss": 0.8422, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.0850824069799195, |
|
"learning_rate": 3.0075514784333613e-06, |
|
"loss": 0.8388, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.08738926155401434, |
|
"learning_rate": 2.922441469504188e-06, |
|
"loss": 0.8561, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.08920484001133727, |
|
"learning_rate": 2.8384913730613404e-06, |
|
"loss": 0.8557, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.08819170118822395, |
|
"learning_rate": 2.7557047852475594e-06, |
|
"loss": 0.858, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.08869175897009467, |
|
"learning_rate": 2.674085252364723e-06, |
|
"loss": 0.8536, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.09053609127800862, |
|
"learning_rate": 2.5936362707219708e-06, |
|
"loss": 0.8483, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.08867818730722615, |
|
"learning_rate": 2.5143612864859246e-06, |
|
"loss": 0.8532, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.09139340482992017, |
|
"learning_rate": 2.4362636955330543e-06, |
|
"loss": 0.8498, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.09525892817222975, |
|
"learning_rate": 2.3593468433042278e-06, |
|
"loss": 0.8529, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.08789827680089286, |
|
"learning_rate": 2.2836140246613977e-06, |
|
"loss": 0.8521, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.08648818427629963, |
|
"learning_rate": 2.209068483746457e-06, |
|
"loss": 0.8422, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.08960144249428664, |
|
"learning_rate": 2.135713413842273e-06, |
|
"loss": 0.8505, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.08754870519933383, |
|
"learning_rate": 2.063551957235893e-06, |
|
"loss": 0.8402, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.08550557755721046, |
|
"learning_rate": 1.992587205083951e-06, |
|
"loss": 0.8575, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.08826896212264684, |
|
"learning_rate": 1.922822197280234e-06, |
|
"loss": 0.8541, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.08668882825496892, |
|
"learning_rate": 1.8542599223254786e-06, |
|
"loss": 0.846, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.08475522972496816, |
|
"learning_rate": 1.7869033171993575e-06, |
|
"loss": 0.8407, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.08771710800870834, |
|
"learning_rate": 1.7207552672346471e-06, |
|
"loss": 0.8503, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.09111144297342101, |
|
"learning_rate": 1.6558186059936587e-06, |
|
"loss": 0.8479, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.08620883215580287, |
|
"learning_rate": 1.5920961151468327e-06, |
|
"loss": 0.8458, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.08471255274938502, |
|
"learning_rate": 1.5295905243535847e-06, |
|
"loss": 0.8537, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.08449994501906344, |
|
"learning_rate": 1.4683045111453942e-06, |
|
"loss": 0.8492, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.08739419896581681, |
|
"learning_rate": 1.408240700811091e-06, |
|
"loss": 0.8524, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.08352102275911637, |
|
"learning_rate": 1.3494016662844011e-06, |
|
"loss": 0.8476, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.08352827574514726, |
|
"learning_rate": 1.2917899280337354e-06, |
|
"loss": 0.8551, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.08439329456211196, |
|
"learning_rate": 1.2354079539542085e-06, |
|
"loss": 0.8416, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.08562104303556034, |
|
"learning_rate": 1.1802581592619444e-06, |
|
"loss": 0.8501, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.0896431543921245, |
|
"learning_rate": 1.126342906390585e-06, |
|
"loss": 0.8461, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.08543853113029239, |
|
"learning_rate": 1.0736645048901217e-06, |
|
"loss": 0.8586, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.08898225127672299, |
|
"learning_rate": 1.022225211327954e-06, |
|
"loss": 0.846, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.08623358584808383, |
|
"learning_rate": 9.720272291922072e-07, |
|
"loss": 0.8406, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.08391526822617813, |
|
"learning_rate": 9.230727087973712e-07, |
|
"loss": 0.8451, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.08567392350575599, |
|
"learning_rate": 8.753637471921572e-07, |
|
"loss": 0.8408, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.08908923286438868, |
|
"learning_rate": 8.289023880697033e-07, |
|
"loss": 0.8418, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.08472167209293709, |
|
"learning_rate": 7.83690621679991e-07, |
|
"loss": 0.8448, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.0841111177111636, |
|
"learning_rate": 7.397303847446202e-07, |
|
"loss": 0.8482, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.09207138444341484, |
|
"learning_rate": 6.970235603738284e-07, |
|
"loss": 0.8455, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.08300698846765277, |
|
"learning_rate": 6.555719779858294e-07, |
|
"loss": 0.8393, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.08410038120955553, |
|
"learning_rate": 6.153774132284584e-07, |
|
"loss": 0.8426, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.08542102355669302, |
|
"learning_rate": 5.764415879030871e-07, |
|
"loss": 0.8538, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.08431102358812953, |
|
"learning_rate": 5.387661698908852e-07, |
|
"loss": 0.8454, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.08201034861685735, |
|
"learning_rate": 5.023527730813649e-07, |
|
"loss": 0.8492, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.08426514879217514, |
|
"learning_rate": 4.672029573032521e-07, |
|
"loss": 0.8539, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.08363012777621809, |
|
"learning_rate": 4.333182282576675e-07, |
|
"loss": 0.8507, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.08403404035574012, |
|
"learning_rate": 4.0070003745363073e-07, |
|
"loss": 0.8417, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.08556297885057308, |
|
"learning_rate": 3.6934978214587026e-07, |
|
"loss": 0.8542, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.0830271098839072, |
|
"learning_rate": 3.392688052749782e-07, |
|
"loss": 0.8384, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.08332845498822317, |
|
"learning_rate": 3.1045839540989273e-07, |
|
"loss": 0.8497, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.08163756862158367, |
|
"learning_rate": 2.829197866926825e-07, |
|
"loss": 0.8436, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.08239779756966806, |
|
"learning_rate": 2.5665415878568855e-07, |
|
"loss": 0.8419, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.0846233275518859, |
|
"learning_rate": 2.3166263682098844e-07, |
|
"loss": 0.8569, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.08295765011201144, |
|
"learning_rate": 2.0794629135221123e-07, |
|
"loss": 0.852, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.08898478620134848, |
|
"learning_rate": 1.8550613830865758e-07, |
|
"loss": 0.8524, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.08253980942709409, |
|
"learning_rate": 1.6434313895180132e-07, |
|
"loss": 0.8439, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.08658080433487944, |
|
"learning_rate": 1.4445819983409546e-07, |
|
"loss": 0.8478, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.085494836912567, |
|
"learning_rate": 1.2585217276015026e-07, |
|
"loss": 0.854, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.08520015820412867, |
|
"learning_rate": 1.085258547502388e-07, |
|
"loss": 0.8527, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.0835695495145572, |
|
"learning_rate": 9.247998800616108e-08, |
|
"loss": 0.8514, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.08360415935414345, |
|
"learning_rate": 7.771525987944284e-08, |
|
"loss": 0.8497, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.0828425664078141, |
|
"learning_rate": 6.423230284189563e-08, |
|
"loss": 0.8495, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.08252226826743013, |
|
"learning_rate": 5.203169445852529e-08, |
|
"loss": 0.8412, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.08292638372084292, |
|
"learning_rate": 4.1113957362785e-08, |
|
"loss": 0.8575, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.08188311347471468, |
|
"learning_rate": 3.147955923419654e-08, |
|
"loss": 0.841, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.0836376120816663, |
|
"learning_rate": 2.3128912778312972e-08, |
|
"loss": 0.8484, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.08186296368729573, |
|
"learning_rate": 1.6062375709029465e-08, |
|
"loss": 0.8544, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.08278337963328566, |
|
"learning_rate": 1.0280250733282203e-08, |
|
"loss": 0.8478, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.08279493662178963, |
|
"learning_rate": 5.7827855380554465e-09, |
|
"loss": 0.8465, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.09221939825908573, |
|
"learning_rate": 2.570172779789992e-09, |
|
"loss": 0.8473, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.09063451115383812, |
|
"learning_rate": 6.425500761231274e-10, |
|
"loss": 0.8581, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.08387090563875321, |
|
"learning_rate": 0.0, |
|
"loss": 0.8385, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.8818458318710327, |
|
"eval_runtime": 352.6305, |
|
"eval_samples_per_second": 37.135, |
|
"eval_steps_per_second": 0.057, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 534, |
|
"total_flos": 4217873142644736.0, |
|
"train_loss": 0.9222131907270196, |
|
"train_runtime": 33956.1835, |
|
"train_samples_per_second": 10.545, |
|
"train_steps_per_second": 0.016 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 534, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 4217873142644736.0, |
|
"train_batch_size": 42, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|