|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 1052, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0019011406844106464, |
|
"grad_norm": 3.7587154010656363, |
|
"learning_rate": 1.886792452830189e-07, |
|
"loss": 0.6968, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0038022813688212928, |
|
"grad_norm": 3.362741889686214, |
|
"learning_rate": 3.773584905660378e-07, |
|
"loss": 0.7135, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005703422053231939, |
|
"grad_norm": 3.5896130033094114, |
|
"learning_rate": 5.660377358490567e-07, |
|
"loss": 0.6929, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0076045627376425855, |
|
"grad_norm": 3.129115337194726, |
|
"learning_rate": 7.547169811320755e-07, |
|
"loss": 0.6637, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009505703422053232, |
|
"grad_norm": 3.4664521332726212, |
|
"learning_rate": 9.433962264150944e-07, |
|
"loss": 0.7198, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011406844106463879, |
|
"grad_norm": 3.148856383737707, |
|
"learning_rate": 1.1320754716981133e-06, |
|
"loss": 0.6665, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.013307984790874524, |
|
"grad_norm": 3.2635539186157323, |
|
"learning_rate": 1.3207547169811322e-06, |
|
"loss": 0.692, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.015209125475285171, |
|
"grad_norm": 3.178967596263942, |
|
"learning_rate": 1.509433962264151e-06, |
|
"loss": 0.7163, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.017110266159695818, |
|
"grad_norm": 2.709915317767805, |
|
"learning_rate": 1.6981132075471698e-06, |
|
"loss": 0.6834, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.019011406844106463, |
|
"grad_norm": 2.6001939523532878, |
|
"learning_rate": 1.8867924528301889e-06, |
|
"loss": 0.6529, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02091254752851711, |
|
"grad_norm": 1.8169707835844755, |
|
"learning_rate": 2.075471698113208e-06, |
|
"loss": 0.627, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.022813688212927757, |
|
"grad_norm": 1.6687313319074908, |
|
"learning_rate": 2.2641509433962266e-06, |
|
"loss": 0.5863, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.024714828897338403, |
|
"grad_norm": 1.612437922136354, |
|
"learning_rate": 2.4528301886792453e-06, |
|
"loss": 0.5872, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.026615969581749048, |
|
"grad_norm": 1.4489343505128192, |
|
"learning_rate": 2.6415094339622644e-06, |
|
"loss": 0.5514, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.028517110266159697, |
|
"grad_norm": 1.8584327601242208, |
|
"learning_rate": 2.830188679245283e-06, |
|
"loss": 0.5739, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.030418250950570342, |
|
"grad_norm": 2.2720710394200436, |
|
"learning_rate": 3.018867924528302e-06, |
|
"loss": 0.5628, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03231939163498099, |
|
"grad_norm": 2.1986920489076756, |
|
"learning_rate": 3.207547169811321e-06, |
|
"loss": 0.5692, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.034220532319391636, |
|
"grad_norm": 1.8282542796474943, |
|
"learning_rate": 3.3962264150943395e-06, |
|
"loss": 0.5516, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03612167300380228, |
|
"grad_norm": 1.5661235507670834, |
|
"learning_rate": 3.5849056603773586e-06, |
|
"loss": 0.5442, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03802281368821293, |
|
"grad_norm": 1.3878179965965174, |
|
"learning_rate": 3.7735849056603777e-06, |
|
"loss": 0.535, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.039923954372623575, |
|
"grad_norm": 1.6041640537056563, |
|
"learning_rate": 3.962264150943396e-06, |
|
"loss": 0.5379, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04182509505703422, |
|
"grad_norm": 1.4871921337178629, |
|
"learning_rate": 4.150943396226416e-06, |
|
"loss": 0.5383, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.043726235741444866, |
|
"grad_norm": 1.4050204633759933, |
|
"learning_rate": 4.339622641509435e-06, |
|
"loss": 0.5366, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.045627376425855515, |
|
"grad_norm": 1.1513347181094025, |
|
"learning_rate": 4.528301886792453e-06, |
|
"loss": 0.5246, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04752851711026616, |
|
"grad_norm": 1.1332551448500334, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 0.5386, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.049429657794676805, |
|
"grad_norm": 1.1517027519762522, |
|
"learning_rate": 4.905660377358491e-06, |
|
"loss": 0.5601, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.051330798479087454, |
|
"grad_norm": 1.0689019420342514, |
|
"learning_rate": 5.09433962264151e-06, |
|
"loss": 0.5327, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.053231939163498096, |
|
"grad_norm": 1.0501053721662716, |
|
"learning_rate": 5.283018867924529e-06, |
|
"loss": 0.5293, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.055133079847908745, |
|
"grad_norm": 1.0515665048262461, |
|
"learning_rate": 5.4716981132075475e-06, |
|
"loss": 0.4955, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.057034220532319393, |
|
"grad_norm": 0.9298091255316036, |
|
"learning_rate": 5.660377358490566e-06, |
|
"loss": 0.5081, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.058935361216730035, |
|
"grad_norm": 0.9408762894041993, |
|
"learning_rate": 5.849056603773585e-06, |
|
"loss": 0.5104, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.060836501901140684, |
|
"grad_norm": 0.8935338868366678, |
|
"learning_rate": 6.037735849056604e-06, |
|
"loss": 0.4639, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06273764258555133, |
|
"grad_norm": 0.9721755899098623, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 0.4911, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06463878326996197, |
|
"grad_norm": 0.9581768289411134, |
|
"learning_rate": 6.415094339622642e-06, |
|
"loss": 0.4936, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06653992395437262, |
|
"grad_norm": 0.9752276454649679, |
|
"learning_rate": 6.60377358490566e-06, |
|
"loss": 0.5007, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06844106463878327, |
|
"grad_norm": 0.8733291259301061, |
|
"learning_rate": 6.792452830188679e-06, |
|
"loss": 0.479, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07034220532319392, |
|
"grad_norm": 0.8904954990051419, |
|
"learning_rate": 6.981132075471699e-06, |
|
"loss": 0.4715, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07224334600760456, |
|
"grad_norm": 0.967859573018954, |
|
"learning_rate": 7.169811320754717e-06, |
|
"loss": 0.4843, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0741444866920152, |
|
"grad_norm": 0.9269812233280805, |
|
"learning_rate": 7.358490566037736e-06, |
|
"loss": 0.4892, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07604562737642585, |
|
"grad_norm": 0.9108140699204871, |
|
"learning_rate": 7.5471698113207555e-06, |
|
"loss": 0.4642, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0779467680608365, |
|
"grad_norm": 0.9660717497951608, |
|
"learning_rate": 7.735849056603775e-06, |
|
"loss": 0.4828, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07984790874524715, |
|
"grad_norm": 0.8734941727606541, |
|
"learning_rate": 7.924528301886793e-06, |
|
"loss": 0.4643, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0817490494296578, |
|
"grad_norm": 1.0123926941455381, |
|
"learning_rate": 8.113207547169812e-06, |
|
"loss": 0.4656, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08365019011406843, |
|
"grad_norm": 0.8793753916842706, |
|
"learning_rate": 8.301886792452832e-06, |
|
"loss": 0.4568, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08555133079847908, |
|
"grad_norm": 0.8856372795938922, |
|
"learning_rate": 8.49056603773585e-06, |
|
"loss": 0.4658, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08745247148288973, |
|
"grad_norm": 0.8965193046559465, |
|
"learning_rate": 8.67924528301887e-06, |
|
"loss": 0.4674, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08935361216730038, |
|
"grad_norm": 0.8699265702868152, |
|
"learning_rate": 8.867924528301887e-06, |
|
"loss": 0.4699, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09125475285171103, |
|
"grad_norm": 0.8683493743735327, |
|
"learning_rate": 9.056603773584907e-06, |
|
"loss": 0.471, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09315589353612168, |
|
"grad_norm": 0.9349939464306016, |
|
"learning_rate": 9.245283018867926e-06, |
|
"loss": 0.4767, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09505703422053231, |
|
"grad_norm": 0.8645137385128104, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 0.4699, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09695817490494296, |
|
"grad_norm": 0.9274273429803022, |
|
"learning_rate": 9.622641509433963e-06, |
|
"loss": 0.4634, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09885931558935361, |
|
"grad_norm": 0.9230697151987387, |
|
"learning_rate": 9.811320754716981e-06, |
|
"loss": 0.4713, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10076045627376426, |
|
"grad_norm": 1.1764052162479033, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4634, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.10266159695817491, |
|
"grad_norm": 0.9156676260966872, |
|
"learning_rate": 1.018867924528302e-05, |
|
"loss": 0.4793, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10456273764258556, |
|
"grad_norm": 1.1346300246553258, |
|
"learning_rate": 1.0377358490566038e-05, |
|
"loss": 0.4794, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10646387832699619, |
|
"grad_norm": 0.9634324140489856, |
|
"learning_rate": 1.0566037735849058e-05, |
|
"loss": 0.4643, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10836501901140684, |
|
"grad_norm": 0.8630802969654159, |
|
"learning_rate": 1.0754716981132076e-05, |
|
"loss": 0.4409, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11026615969581749, |
|
"grad_norm": 0.9763312718222703, |
|
"learning_rate": 1.0943396226415095e-05, |
|
"loss": 0.4656, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11216730038022814, |
|
"grad_norm": 0.9816001471655289, |
|
"learning_rate": 1.1132075471698115e-05, |
|
"loss": 0.4513, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11406844106463879, |
|
"grad_norm": 0.9695065756889812, |
|
"learning_rate": 1.1320754716981132e-05, |
|
"loss": 0.4777, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11596958174904944, |
|
"grad_norm": 1.0977384638649899, |
|
"learning_rate": 1.1509433962264152e-05, |
|
"loss": 0.4391, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11787072243346007, |
|
"grad_norm": 0.9593913599738026, |
|
"learning_rate": 1.169811320754717e-05, |
|
"loss": 0.4652, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11977186311787072, |
|
"grad_norm": 0.8957672419005775, |
|
"learning_rate": 1.188679245283019e-05, |
|
"loss": 0.4409, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12167300380228137, |
|
"grad_norm": 1.036557734592175, |
|
"learning_rate": 1.2075471698113209e-05, |
|
"loss": 0.4902, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12357414448669202, |
|
"grad_norm": 0.9159986109184152, |
|
"learning_rate": 1.2264150943396227e-05, |
|
"loss": 0.4711, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12547528517110265, |
|
"grad_norm": 0.9157436424614411, |
|
"learning_rate": 1.2452830188679246e-05, |
|
"loss": 0.4468, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12737642585551331, |
|
"grad_norm": 0.9531601255168413, |
|
"learning_rate": 1.2641509433962264e-05, |
|
"loss": 0.4563, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12927756653992395, |
|
"grad_norm": 0.8928606504716562, |
|
"learning_rate": 1.2830188679245283e-05, |
|
"loss": 0.4648, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1311787072243346, |
|
"grad_norm": 1.294929519516281, |
|
"learning_rate": 1.3018867924528303e-05, |
|
"loss": 0.4845, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13307984790874525, |
|
"grad_norm": 0.9095331465783749, |
|
"learning_rate": 1.320754716981132e-05, |
|
"loss": 0.457, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13498098859315588, |
|
"grad_norm": 0.9759136641098354, |
|
"learning_rate": 1.339622641509434e-05, |
|
"loss": 0.4535, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13688212927756654, |
|
"grad_norm": 1.0288586843539438, |
|
"learning_rate": 1.3584905660377358e-05, |
|
"loss": 0.4617, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13878326996197718, |
|
"grad_norm": 0.8657516488562751, |
|
"learning_rate": 1.3773584905660378e-05, |
|
"loss": 0.4704, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14068441064638784, |
|
"grad_norm": 0.9753927198358316, |
|
"learning_rate": 1.3962264150943397e-05, |
|
"loss": 0.4306, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14258555133079848, |
|
"grad_norm": 0.9090424913964088, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 0.4414, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1444866920152091, |
|
"grad_norm": 0.8707480642407023, |
|
"learning_rate": 1.4339622641509435e-05, |
|
"loss": 0.4598, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14638783269961977, |
|
"grad_norm": 1.0069627349198476, |
|
"learning_rate": 1.4528301886792452e-05, |
|
"loss": 0.4808, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1482889733840304, |
|
"grad_norm": 0.9105067107270177, |
|
"learning_rate": 1.4716981132075472e-05, |
|
"loss": 0.445, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15019011406844107, |
|
"grad_norm": 0.9218190841283949, |
|
"learning_rate": 1.4905660377358491e-05, |
|
"loss": 0.4554, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1520912547528517, |
|
"grad_norm": 1.020573291542075, |
|
"learning_rate": 1.5094339622641511e-05, |
|
"loss": 0.4761, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15399239543726237, |
|
"grad_norm": 0.8435667653721475, |
|
"learning_rate": 1.5283018867924532e-05, |
|
"loss": 0.4659, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.155893536121673, |
|
"grad_norm": 0.9569290276016456, |
|
"learning_rate": 1.547169811320755e-05, |
|
"loss": 0.4407, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15779467680608364, |
|
"grad_norm": 0.9398905495824446, |
|
"learning_rate": 1.5660377358490568e-05, |
|
"loss": 0.4645, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1596958174904943, |
|
"grad_norm": 1.1018200288551356, |
|
"learning_rate": 1.5849056603773586e-05, |
|
"loss": 0.4708, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.16159695817490494, |
|
"grad_norm": 1.0624840571756866, |
|
"learning_rate": 1.6037735849056607e-05, |
|
"loss": 0.4432, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1634980988593156, |
|
"grad_norm": 1.0706654234468287, |
|
"learning_rate": 1.6226415094339625e-05, |
|
"loss": 0.4339, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.16539923954372623, |
|
"grad_norm": 0.9832014045257631, |
|
"learning_rate": 1.6415094339622643e-05, |
|
"loss": 0.4598, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16730038022813687, |
|
"grad_norm": 0.8606511269860765, |
|
"learning_rate": 1.6603773584905664e-05, |
|
"loss": 0.4491, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16920152091254753, |
|
"grad_norm": 0.9758920091640942, |
|
"learning_rate": 1.679245283018868e-05, |
|
"loss": 0.4734, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17110266159695817, |
|
"grad_norm": 0.988312178116455, |
|
"learning_rate": 1.69811320754717e-05, |
|
"loss": 0.465, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17300380228136883, |
|
"grad_norm": 0.9669130125766525, |
|
"learning_rate": 1.716981132075472e-05, |
|
"loss": 0.4671, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17490494296577946, |
|
"grad_norm": 1.274369798841813, |
|
"learning_rate": 1.735849056603774e-05, |
|
"loss": 0.4829, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17680608365019013, |
|
"grad_norm": 0.8098281325891173, |
|
"learning_rate": 1.7547169811320756e-05, |
|
"loss": 0.4362, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17870722433460076, |
|
"grad_norm": 1.0052478570017351, |
|
"learning_rate": 1.7735849056603774e-05, |
|
"loss": 0.4574, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1806083650190114, |
|
"grad_norm": 1.0812592927912872, |
|
"learning_rate": 1.7924528301886795e-05, |
|
"loss": 0.4703, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18250950570342206, |
|
"grad_norm": 0.896344657500704, |
|
"learning_rate": 1.8113207547169813e-05, |
|
"loss": 0.4524, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1844106463878327, |
|
"grad_norm": 1.1592992056764342, |
|
"learning_rate": 1.830188679245283e-05, |
|
"loss": 0.4644, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.18631178707224336, |
|
"grad_norm": 0.8739226310543647, |
|
"learning_rate": 1.8490566037735852e-05, |
|
"loss": 0.4224, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.188212927756654, |
|
"grad_norm": 0.8922170029831241, |
|
"learning_rate": 1.867924528301887e-05, |
|
"loss": 0.4565, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19011406844106463, |
|
"grad_norm": 1.1833096225117925, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 0.4548, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1920152091254753, |
|
"grad_norm": 0.8774292577559527, |
|
"learning_rate": 1.905660377358491e-05, |
|
"loss": 0.4713, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19391634980988592, |
|
"grad_norm": 1.011387332375806, |
|
"learning_rate": 1.9245283018867927e-05, |
|
"loss": 0.4431, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1958174904942966, |
|
"grad_norm": 0.821841067276584, |
|
"learning_rate": 1.9433962264150945e-05, |
|
"loss": 0.4786, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.19771863117870722, |
|
"grad_norm": 0.9293558236221588, |
|
"learning_rate": 1.9622641509433963e-05, |
|
"loss": 0.4425, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19961977186311788, |
|
"grad_norm": 0.9272021075252388, |
|
"learning_rate": 1.9811320754716984e-05, |
|
"loss": 0.4667, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.20152091254752852, |
|
"grad_norm": 0.834595094641444, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4571, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.20342205323193915, |
|
"grad_norm": 0.9335242186265841, |
|
"learning_rate": 1.9999944857420527e-05, |
|
"loss": 0.4365, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.20532319391634982, |
|
"grad_norm": 0.8700522970125515, |
|
"learning_rate": 1.9999779430290247e-05, |
|
"loss": 0.4491, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.20722433460076045, |
|
"grad_norm": 0.9238091269927581, |
|
"learning_rate": 1.9999503720433575e-05, |
|
"loss": 0.4617, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.20912547528517111, |
|
"grad_norm": 0.9084779069556055, |
|
"learning_rate": 1.999911773089118e-05, |
|
"loss": 0.4399, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21102661596958175, |
|
"grad_norm": 0.867661445176709, |
|
"learning_rate": 1.999862146591996e-05, |
|
"loss": 0.4377, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.21292775665399238, |
|
"grad_norm": 0.9440495894897893, |
|
"learning_rate": 1.9998014930992976e-05, |
|
"loss": 0.4478, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21482889733840305, |
|
"grad_norm": 0.9160613556830745, |
|
"learning_rate": 1.9997298132799408e-05, |
|
"loss": 0.4682, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.21673003802281368, |
|
"grad_norm": 0.9204449054828825, |
|
"learning_rate": 1.9996471079244477e-05, |
|
"loss": 0.4798, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.21863117870722434, |
|
"grad_norm": 1.0162735057047352, |
|
"learning_rate": 1.999553377944936e-05, |
|
"loss": 0.4386, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22053231939163498, |
|
"grad_norm": 0.8387932169822102, |
|
"learning_rate": 1.9994486243751076e-05, |
|
"loss": 0.4644, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2224334600760456, |
|
"grad_norm": 0.9805193927104513, |
|
"learning_rate": 1.9993328483702393e-05, |
|
"loss": 0.4509, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22433460076045628, |
|
"grad_norm": 0.8794007625259703, |
|
"learning_rate": 1.999206051207169e-05, |
|
"loss": 0.4663, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2262357414448669, |
|
"grad_norm": 0.8715342892907946, |
|
"learning_rate": 1.9990682342842805e-05, |
|
"loss": 0.4566, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.22813688212927757, |
|
"grad_norm": 0.9487738970190361, |
|
"learning_rate": 1.99891939912149e-05, |
|
"loss": 0.4396, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2300380228136882, |
|
"grad_norm": 1.1915130795368503, |
|
"learning_rate": 1.9987595473602292e-05, |
|
"loss": 0.4644, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23193916349809887, |
|
"grad_norm": 0.8943531299965776, |
|
"learning_rate": 1.9985886807634246e-05, |
|
"loss": 0.4862, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2338403041825095, |
|
"grad_norm": 0.9505181701096156, |
|
"learning_rate": 1.9984068012154824e-05, |
|
"loss": 0.4382, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.23574144486692014, |
|
"grad_norm": 1.0213990009679197, |
|
"learning_rate": 1.9982139107222634e-05, |
|
"loss": 0.4561, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2376425855513308, |
|
"grad_norm": 1.0001814161308735, |
|
"learning_rate": 1.9980100114110637e-05, |
|
"loss": 0.4505, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.23954372623574144, |
|
"grad_norm": 1.0965677177589834, |
|
"learning_rate": 1.99779510553059e-05, |
|
"loss": 0.4584, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2414448669201521, |
|
"grad_norm": 0.8305227098716941, |
|
"learning_rate": 1.9975691954509347e-05, |
|
"loss": 0.4751, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24334600760456274, |
|
"grad_norm": 1.0328532350738455, |
|
"learning_rate": 1.9973322836635517e-05, |
|
"loss": 0.4454, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.24524714828897337, |
|
"grad_norm": 0.9131979487268771, |
|
"learning_rate": 1.997084372781226e-05, |
|
"loss": 0.4765, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24714828897338403, |
|
"grad_norm": 0.8997989471521403, |
|
"learning_rate": 1.9968254655380465e-05, |
|
"loss": 0.4382, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.24904942965779467, |
|
"grad_norm": 1.022913186430162, |
|
"learning_rate": 1.996555564789376e-05, |
|
"loss": 0.455, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2509505703422053, |
|
"grad_norm": 0.8454141967612432, |
|
"learning_rate": 1.996274673511819e-05, |
|
"loss": 0.4257, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.25285171102661597, |
|
"grad_norm": 0.9496397628574214, |
|
"learning_rate": 1.99598279480319e-05, |
|
"loss": 0.4527, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.25475285171102663, |
|
"grad_norm": 0.8672892296978352, |
|
"learning_rate": 1.9956799318824776e-05, |
|
"loss": 0.4354, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.25665399239543724, |
|
"grad_norm": 0.9602041893539319, |
|
"learning_rate": 1.99536608808981e-05, |
|
"loss": 0.4619, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2585551330798479, |
|
"grad_norm": 1.0102999738185805, |
|
"learning_rate": 1.995041266886419e-05, |
|
"loss": 0.4575, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.26045627376425856, |
|
"grad_norm": 0.8676619014978241, |
|
"learning_rate": 1.9947054718545996e-05, |
|
"loss": 0.4545, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2623574144486692, |
|
"grad_norm": 0.9435908473741579, |
|
"learning_rate": 1.994358706697674e-05, |
|
"loss": 0.4631, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.26425855513307983, |
|
"grad_norm": 0.9011850195999636, |
|
"learning_rate": 1.9940009752399462e-05, |
|
"loss": 0.4673, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2661596958174905, |
|
"grad_norm": 1.0201988433006477, |
|
"learning_rate": 1.9936322814266634e-05, |
|
"loss": 0.4661, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26806083650190116, |
|
"grad_norm": 0.9070023246839101, |
|
"learning_rate": 1.9932526293239713e-05, |
|
"loss": 0.4503, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.26996197718631176, |
|
"grad_norm": 0.833565267357631, |
|
"learning_rate": 1.9928620231188694e-05, |
|
"loss": 0.4419, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2718631178707224, |
|
"grad_norm": 1.0825464254239512, |
|
"learning_rate": 1.992460467119164e-05, |
|
"loss": 0.4639, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2737642585551331, |
|
"grad_norm": 0.8848149746232583, |
|
"learning_rate": 1.992047965753422e-05, |
|
"loss": 0.4767, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.27566539923954375, |
|
"grad_norm": 0.8922043871545696, |
|
"learning_rate": 1.991624523570922e-05, |
|
"loss": 0.4585, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.27756653992395436, |
|
"grad_norm": 1.0049154506291247, |
|
"learning_rate": 1.9911901452416012e-05, |
|
"loss": 0.4424, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.279467680608365, |
|
"grad_norm": 0.8240094093567454, |
|
"learning_rate": 1.9907448355560094e-05, |
|
"loss": 0.4452, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2813688212927757, |
|
"grad_norm": 1.0600074463699127, |
|
"learning_rate": 1.9902885994252506e-05, |
|
"loss": 0.4578, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2832699619771863, |
|
"grad_norm": 0.8846660827117652, |
|
"learning_rate": 1.989821441880933e-05, |
|
"loss": 0.4564, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.28517110266159695, |
|
"grad_norm": 0.9961956949468344, |
|
"learning_rate": 1.9893433680751105e-05, |
|
"loss": 0.4374, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2870722433460076, |
|
"grad_norm": 0.8310760138947617, |
|
"learning_rate": 1.9888543832802277e-05, |
|
"loss": 0.4263, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2889733840304182, |
|
"grad_norm": 0.8268687504317589, |
|
"learning_rate": 1.9883544928890612e-05, |
|
"loss": 0.4508, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2908745247148289, |
|
"grad_norm": 0.8841602005584022, |
|
"learning_rate": 1.9878437024146603e-05, |
|
"loss": 0.4885, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.29277566539923955, |
|
"grad_norm": 0.9321271483481541, |
|
"learning_rate": 1.9873220174902857e-05, |
|
"loss": 0.4457, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2946768060836502, |
|
"grad_norm": 0.9873670559376374, |
|
"learning_rate": 1.986789443869348e-05, |
|
"loss": 0.4576, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2965779467680608, |
|
"grad_norm": 0.8900834790852145, |
|
"learning_rate": 1.9862459874253438e-05, |
|
"loss": 0.4228, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2984790874524715, |
|
"grad_norm": 0.9059448973047894, |
|
"learning_rate": 1.985691654151791e-05, |
|
"loss": 0.4338, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.30038022813688214, |
|
"grad_norm": 0.7832858462149278, |
|
"learning_rate": 1.9851264501621635e-05, |
|
"loss": 0.4578, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.30228136882129275, |
|
"grad_norm": 0.936383302395317, |
|
"learning_rate": 1.984550381689822e-05, |
|
"loss": 0.4588, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3041825095057034, |
|
"grad_norm": 0.8217133091416859, |
|
"learning_rate": 1.983963455087946e-05, |
|
"loss": 0.4566, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3060836501901141, |
|
"grad_norm": 0.8498115958843986, |
|
"learning_rate": 1.983365676829466e-05, |
|
"loss": 0.4558, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.30798479087452474, |
|
"grad_norm": 0.8016402808974186, |
|
"learning_rate": 1.982757053506989e-05, |
|
"loss": 0.4489, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.30988593155893535, |
|
"grad_norm": 0.7751229367129853, |
|
"learning_rate": 1.9821375918327268e-05, |
|
"loss": 0.443, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.311787072243346, |
|
"grad_norm": 0.8059104159920085, |
|
"learning_rate": 1.981507298638422e-05, |
|
"loss": 0.4147, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.31368821292775667, |
|
"grad_norm": 0.8536694610287676, |
|
"learning_rate": 1.9808661808752735e-05, |
|
"loss": 0.4574, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3155893536121673, |
|
"grad_norm": 0.9206053777136621, |
|
"learning_rate": 1.980214245613858e-05, |
|
"loss": 0.4484, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.31749049429657794, |
|
"grad_norm": 0.8391503913062355, |
|
"learning_rate": 1.979551500044055e-05, |
|
"loss": 0.4528, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3193916349809886, |
|
"grad_norm": 0.8623820469757025, |
|
"learning_rate": 1.9788779514749635e-05, |
|
"loss": 0.4686, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.32129277566539927, |
|
"grad_norm": 0.886429911271713, |
|
"learning_rate": 1.978193607334826e-05, |
|
"loss": 0.4385, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3231939163498099, |
|
"grad_norm": 1.1241448343196239, |
|
"learning_rate": 1.977498475170941e-05, |
|
"loss": 0.4603, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32509505703422054, |
|
"grad_norm": 0.8133920725428255, |
|
"learning_rate": 1.9767925626495857e-05, |
|
"loss": 0.4475, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3269961977186312, |
|
"grad_norm": 0.9167370292690682, |
|
"learning_rate": 1.9760758775559275e-05, |
|
"loss": 0.4469, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3288973384030418, |
|
"grad_norm": 0.9596197488154138, |
|
"learning_rate": 1.975348427793939e-05, |
|
"loss": 0.4588, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.33079847908745247, |
|
"grad_norm": 0.8172335619675806, |
|
"learning_rate": 1.9746102213863113e-05, |
|
"loss": 0.4518, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.33269961977186313, |
|
"grad_norm": 1.0364959651182555, |
|
"learning_rate": 1.973861266474366e-05, |
|
"loss": 0.4525, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33460076045627374, |
|
"grad_norm": 0.7896657449125826, |
|
"learning_rate": 1.9731015713179643e-05, |
|
"loss": 0.4544, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3365019011406844, |
|
"grad_norm": 0.9376942547994319, |
|
"learning_rate": 1.9723311442954163e-05, |
|
"loss": 0.4588, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.33840304182509506, |
|
"grad_norm": 0.8415897873276912, |
|
"learning_rate": 1.9715499939033883e-05, |
|
"loss": 0.4329, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3403041825095057, |
|
"grad_norm": 0.7491819624029536, |
|
"learning_rate": 1.9707581287568094e-05, |
|
"loss": 0.4336, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.34220532319391633, |
|
"grad_norm": 1.069033618481511, |
|
"learning_rate": 1.969955557588778e-05, |
|
"loss": 0.4327, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.344106463878327, |
|
"grad_norm": 0.7702422975501193, |
|
"learning_rate": 1.9691422892504626e-05, |
|
"loss": 0.4306, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.34600760456273766, |
|
"grad_norm": 1.033680894215674, |
|
"learning_rate": 1.968318332711006e-05, |
|
"loss": 0.46, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.34790874524714827, |
|
"grad_norm": 0.9378116290695766, |
|
"learning_rate": 1.9674836970574253e-05, |
|
"loss": 0.4818, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.34980988593155893, |
|
"grad_norm": 0.8387791048659992, |
|
"learning_rate": 1.966638391494514e-05, |
|
"loss": 0.4475, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3517110266159696, |
|
"grad_norm": 1.0621849806400059, |
|
"learning_rate": 1.9657824253447378e-05, |
|
"loss": 0.4277, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.35361216730038025, |
|
"grad_norm": 0.8872428493389745, |
|
"learning_rate": 1.9649158080481327e-05, |
|
"loss": 0.4584, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.35551330798479086, |
|
"grad_norm": 0.8385107279025317, |
|
"learning_rate": 1.964038549162201e-05, |
|
"loss": 0.4253, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3574144486692015, |
|
"grad_norm": 1.1342322954222368, |
|
"learning_rate": 1.963150658361807e-05, |
|
"loss": 0.4383, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3593155893536122, |
|
"grad_norm": 0.7710295766471317, |
|
"learning_rate": 1.962252145439068e-05, |
|
"loss": 0.4272, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3612167300380228, |
|
"grad_norm": 0.943614976507202, |
|
"learning_rate": 1.9613430203032486e-05, |
|
"loss": 0.4345, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36311787072243346, |
|
"grad_norm": 1.0747882871628456, |
|
"learning_rate": 1.9604232929806493e-05, |
|
"loss": 0.4407, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3650190114068441, |
|
"grad_norm": 0.8703814589438721, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.4321, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3669201520912547, |
|
"grad_norm": 0.8964800239866031, |
|
"learning_rate": 1.9585520724648354e-05, |
|
"loss": 0.4337, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3688212927756654, |
|
"grad_norm": 0.846801285526771, |
|
"learning_rate": 1.957600599908406e-05, |
|
"loss": 0.4368, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.37072243346007605, |
|
"grad_norm": 0.9190940443815219, |
|
"learning_rate": 1.95663856643854e-05, |
|
"loss": 0.4438, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3726235741444867, |
|
"grad_norm": 0.8852933595722409, |
|
"learning_rate": 1.955665982665038e-05, |
|
"loss": 0.4278, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3745247148288973, |
|
"grad_norm": 0.7338028012229248, |
|
"learning_rate": 1.9546828593140565e-05, |
|
"loss": 0.4204, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.376425855513308, |
|
"grad_norm": 0.8880827920619041, |
|
"learning_rate": 1.9536892072279863e-05, |
|
"loss": 0.4505, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.37832699619771865, |
|
"grad_norm": 0.8055465213716715, |
|
"learning_rate": 1.9526850373653356e-05, |
|
"loss": 0.4692, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.38022813688212925, |
|
"grad_norm": 0.7385860605150167, |
|
"learning_rate": 1.9516703608006074e-05, |
|
"loss": 0.3969, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3821292775665399, |
|
"grad_norm": 0.7998871780527864, |
|
"learning_rate": 1.9506451887241787e-05, |
|
"loss": 0.43, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3840304182509506, |
|
"grad_norm": 0.7729734293304378, |
|
"learning_rate": 1.949609532442176e-05, |
|
"loss": 0.4433, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.38593155893536124, |
|
"grad_norm": 0.8075690292130411, |
|
"learning_rate": 1.9485634033763507e-05, |
|
"loss": 0.4199, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.38783269961977185, |
|
"grad_norm": 0.8006024972032324, |
|
"learning_rate": 1.9475068130639543e-05, |
|
"loss": 0.4157, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3897338403041825, |
|
"grad_norm": 0.805198471242757, |
|
"learning_rate": 1.9464397731576093e-05, |
|
"loss": 0.448, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3916349809885932, |
|
"grad_norm": 0.8198823759777583, |
|
"learning_rate": 1.945362295425183e-05, |
|
"loss": 0.4411, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3935361216730038, |
|
"grad_norm": 0.8376863963373794, |
|
"learning_rate": 1.944274391749655e-05, |
|
"loss": 0.4281, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.39543726235741444, |
|
"grad_norm": 0.8182021117836741, |
|
"learning_rate": 1.9431760741289886e-05, |
|
"loss": 0.4209, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3973384030418251, |
|
"grad_norm": 0.7919911464163376, |
|
"learning_rate": 1.942067354675997e-05, |
|
"loss": 0.4439, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.39923954372623577, |
|
"grad_norm": 0.7881627781393306, |
|
"learning_rate": 1.9409482456182105e-05, |
|
"loss": 0.4284, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4011406844106464, |
|
"grad_norm": 0.7655466240593265, |
|
"learning_rate": 1.939818759297741e-05, |
|
"loss": 0.4241, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.40304182509505704, |
|
"grad_norm": 0.7874252908247497, |
|
"learning_rate": 1.9386789081711465e-05, |
|
"loss": 0.4321, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.4049429657794677, |
|
"grad_norm": 0.7823202283489393, |
|
"learning_rate": 1.9375287048092927e-05, |
|
"loss": 0.448, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4068441064638783, |
|
"grad_norm": 0.7545510398894911, |
|
"learning_rate": 1.9363681618972166e-05, |
|
"loss": 0.4328, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.40874524714828897, |
|
"grad_norm": 0.7889241019066625, |
|
"learning_rate": 1.9351972922339835e-05, |
|
"loss": 0.4367, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.41064638783269963, |
|
"grad_norm": 0.7483946443939699, |
|
"learning_rate": 1.9340161087325483e-05, |
|
"loss": 0.4397, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.41254752851711024, |
|
"grad_norm": 0.7660466250314436, |
|
"learning_rate": 1.9328246244196117e-05, |
|
"loss": 0.4303, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4144486692015209, |
|
"grad_norm": 0.776619474237903, |
|
"learning_rate": 1.931622852435478e-05, |
|
"loss": 0.4416, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.41634980988593157, |
|
"grad_norm": 0.7616351223608897, |
|
"learning_rate": 1.930410806033908e-05, |
|
"loss": 0.4345, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.41825095057034223, |
|
"grad_norm": 0.9384742141181847, |
|
"learning_rate": 1.929188498581975e-05, |
|
"loss": 0.432, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.42015209125475284, |
|
"grad_norm": 0.8855190946497012, |
|
"learning_rate": 1.9279559435599164e-05, |
|
"loss": 0.447, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4220532319391635, |
|
"grad_norm": 0.9801225564460324, |
|
"learning_rate": 1.926713154560984e-05, |
|
"loss": 0.453, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.42395437262357416, |
|
"grad_norm": 0.8039052999022875, |
|
"learning_rate": 1.9254601452912972e-05, |
|
"loss": 0.4596, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.42585551330798477, |
|
"grad_norm": 1.026094464979351, |
|
"learning_rate": 1.924196929569688e-05, |
|
"loss": 0.4348, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.42775665399239543, |
|
"grad_norm": 0.7601287164420519, |
|
"learning_rate": 1.922923521327551e-05, |
|
"loss": 0.4407, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4296577946768061, |
|
"grad_norm": 0.9912810781784912, |
|
"learning_rate": 1.9216399346086893e-05, |
|
"loss": 0.4362, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.43155893536121676, |
|
"grad_norm": 0.7607922604900124, |
|
"learning_rate": 1.9203461835691596e-05, |
|
"loss": 0.44, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.43346007604562736, |
|
"grad_norm": 0.8824871863136691, |
|
"learning_rate": 1.9190422824771158e-05, |
|
"loss": 0.435, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.435361216730038, |
|
"grad_norm": 0.7919343571024645, |
|
"learning_rate": 1.9177282457126515e-05, |
|
"loss": 0.4158, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4372623574144487, |
|
"grad_norm": 0.768369683318518, |
|
"learning_rate": 1.9164040877676425e-05, |
|
"loss": 0.4294, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4391634980988593, |
|
"grad_norm": 1.016935123578636, |
|
"learning_rate": 1.9150698232455853e-05, |
|
"loss": 0.3963, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.44106463878326996, |
|
"grad_norm": 0.7582803051677965, |
|
"learning_rate": 1.913725466861438e-05, |
|
"loss": 0.4278, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4429657794676806, |
|
"grad_norm": 0.8946850217393366, |
|
"learning_rate": 1.9123710334414552e-05, |
|
"loss": 0.4585, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4448669201520912, |
|
"grad_norm": 0.7805131342935635, |
|
"learning_rate": 1.911006537923029e-05, |
|
"loss": 0.4274, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4467680608365019, |
|
"grad_norm": 0.7342344209154682, |
|
"learning_rate": 1.9096319953545186e-05, |
|
"loss": 0.4356, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.44866920152091255, |
|
"grad_norm": 0.8179223681628749, |
|
"learning_rate": 1.908247420895089e-05, |
|
"loss": 0.4306, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4505703422053232, |
|
"grad_norm": 0.9193102485800642, |
|
"learning_rate": 1.9068528298145418e-05, |
|
"loss": 0.424, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4524714828897338, |
|
"grad_norm": 0.7656374155303313, |
|
"learning_rate": 1.905448237493147e-05, |
|
"loss": 0.4416, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4543726235741445, |
|
"grad_norm": 0.9885795674447727, |
|
"learning_rate": 1.9040336594214727e-05, |
|
"loss": 0.4073, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.45627376425855515, |
|
"grad_norm": 0.7984749132802632, |
|
"learning_rate": 1.9026091112002163e-05, |
|
"loss": 0.4249, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45817490494296575, |
|
"grad_norm": 0.9282796120941919, |
|
"learning_rate": 1.90117460854003e-05, |
|
"loss": 0.4299, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4600760456273764, |
|
"grad_norm": 0.9516443166975507, |
|
"learning_rate": 1.8997301672613496e-05, |
|
"loss": 0.4605, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.4619771863117871, |
|
"grad_norm": 0.8851837571478424, |
|
"learning_rate": 1.8982758032942184e-05, |
|
"loss": 0.4431, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.46387832699619774, |
|
"grad_norm": 0.8934412546734641, |
|
"learning_rate": 1.896811532678113e-05, |
|
"loss": 0.4486, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.46577946768060835, |
|
"grad_norm": 0.7280065680634561, |
|
"learning_rate": 1.8953373715617646e-05, |
|
"loss": 0.4369, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.467680608365019, |
|
"grad_norm": 0.8010993716940912, |
|
"learning_rate": 1.893853336202983e-05, |
|
"loss": 0.4235, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4695817490494297, |
|
"grad_norm": 0.8899457650418342, |
|
"learning_rate": 1.892359442968475e-05, |
|
"loss": 0.405, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4714828897338403, |
|
"grad_norm": 0.7812008992593449, |
|
"learning_rate": 1.8908557083336668e-05, |
|
"loss": 0.4266, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.47338403041825095, |
|
"grad_norm": 0.8451147767780671, |
|
"learning_rate": 1.889342148882519e-05, |
|
"loss": 0.4221, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.4752851711026616, |
|
"grad_norm": 0.7808305640950098, |
|
"learning_rate": 1.8878187813073465e-05, |
|
"loss": 0.437, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.47718631178707227, |
|
"grad_norm": 0.7494563001346748, |
|
"learning_rate": 1.886285622408633e-05, |
|
"loss": 0.4195, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4790874524714829, |
|
"grad_norm": 0.8477515641050383, |
|
"learning_rate": 1.8847426890948447e-05, |
|
"loss": 0.4271, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.48098859315589354, |
|
"grad_norm": 0.754403803693056, |
|
"learning_rate": 1.8831899983822475e-05, |
|
"loss": 0.4437, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4828897338403042, |
|
"grad_norm": 0.8086680469988717, |
|
"learning_rate": 1.8816275673947148e-05, |
|
"loss": 0.4107, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4847908745247148, |
|
"grad_norm": 0.7925479551334843, |
|
"learning_rate": 1.8800554133635417e-05, |
|
"loss": 0.4152, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4866920152091255, |
|
"grad_norm": 0.7626048336664352, |
|
"learning_rate": 1.8784735536272543e-05, |
|
"loss": 0.433, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.48859315589353614, |
|
"grad_norm": 0.8603560876138445, |
|
"learning_rate": 1.8768820056314173e-05, |
|
"loss": 0.4142, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.49049429657794674, |
|
"grad_norm": 0.736530696765543, |
|
"learning_rate": 1.875280786928444e-05, |
|
"loss": 0.4366, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4923954372623574, |
|
"grad_norm": 0.8741756327011199, |
|
"learning_rate": 1.873669915177399e-05, |
|
"loss": 0.4361, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.49429657794676807, |
|
"grad_norm": 0.8008511094673902, |
|
"learning_rate": 1.872049408143808e-05, |
|
"loss": 0.4142, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.49619771863117873, |
|
"grad_norm": 0.7782774124949869, |
|
"learning_rate": 1.8704192836994578e-05, |
|
"loss": 0.4267, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.49809885931558934, |
|
"grad_norm": 0.835772295394541, |
|
"learning_rate": 1.8687795598222024e-05, |
|
"loss": 0.4484, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.7161910677629045, |
|
"learning_rate": 1.8671302545957628e-05, |
|
"loss": 0.433, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5019011406844106, |
|
"grad_norm": 0.7129409060749992, |
|
"learning_rate": 1.8654713862095272e-05, |
|
"loss": 0.4268, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5038022813688213, |
|
"grad_norm": 0.8746936224004868, |
|
"learning_rate": 1.8638029729583524e-05, |
|
"loss": 0.426, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5057034220532319, |
|
"grad_norm": 0.7199661881084182, |
|
"learning_rate": 1.8621250332423603e-05, |
|
"loss": 0.4206, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5076045627376425, |
|
"grad_norm": 0.8776639159854762, |
|
"learning_rate": 1.860437585566736e-05, |
|
"loss": 0.4209, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5095057034220533, |
|
"grad_norm": 0.7038553668788641, |
|
"learning_rate": 1.8587406485415226e-05, |
|
"loss": 0.4275, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5114068441064639, |
|
"grad_norm": 0.8246952846777812, |
|
"learning_rate": 1.8570342408814173e-05, |
|
"loss": 0.4359, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5133079847908745, |
|
"grad_norm": 0.8420390392108025, |
|
"learning_rate": 1.855318381405564e-05, |
|
"loss": 0.4478, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5152091254752852, |
|
"grad_norm": 0.8281462911560388, |
|
"learning_rate": 1.8535930890373467e-05, |
|
"loss": 0.4264, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5171102661596958, |
|
"grad_norm": 0.7843657243577942, |
|
"learning_rate": 1.8518583828041787e-05, |
|
"loss": 0.4364, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5190114068441065, |
|
"grad_norm": 0.7765503284681777, |
|
"learning_rate": 1.8501142818372964e-05, |
|
"loss": 0.4195, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5209125475285171, |
|
"grad_norm": 0.7744347795836504, |
|
"learning_rate": 1.848360805371544e-05, |
|
"loss": 0.4371, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5228136882129277, |
|
"grad_norm": 0.7931467086003418, |
|
"learning_rate": 1.8465979727451653e-05, |
|
"loss": 0.4192, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5247148288973384, |
|
"grad_norm": 0.759121399714454, |
|
"learning_rate": 1.8448258033995877e-05, |
|
"loss": 0.4266, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.526615969581749, |
|
"grad_norm": 0.7701454848778623, |
|
"learning_rate": 1.8430443168792087e-05, |
|
"loss": 0.4029, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5285171102661597, |
|
"grad_norm": 0.8947519037395579, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.4264, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5304182509505704, |
|
"grad_norm": 0.764683578264793, |
|
"learning_rate": 1.8394534710051956e-05, |
|
"loss": 0.4293, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.532319391634981, |
|
"grad_norm": 0.9986327813906521, |
|
"learning_rate": 1.8376441512532617e-05, |
|
"loss": 0.4304, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5342205323193916, |
|
"grad_norm": 0.7530244817990461, |
|
"learning_rate": 1.835825593529492e-05, |
|
"loss": 0.4105, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5361216730038023, |
|
"grad_norm": 0.8782003043844607, |
|
"learning_rate": 1.833997817889878e-05, |
|
"loss": 0.417, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5380228136882129, |
|
"grad_norm": 0.8916898317005598, |
|
"learning_rate": 1.8321608444920738e-05, |
|
"loss": 0.4089, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5399239543726235, |
|
"grad_norm": 0.7931488574412902, |
|
"learning_rate": 1.830314693595169e-05, |
|
"loss": 0.4084, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5418250950570342, |
|
"grad_norm": 0.8743392401753514, |
|
"learning_rate": 1.828459385559468e-05, |
|
"loss": 0.4163, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5437262357414449, |
|
"grad_norm": 0.6993379251712359, |
|
"learning_rate": 1.8265949408462657e-05, |
|
"loss": 0.4215, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5456273764258555, |
|
"grad_norm": 0.8115000089946518, |
|
"learning_rate": 1.8247213800176192e-05, |
|
"loss": 0.4206, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5475285171102662, |
|
"grad_norm": 0.6832345985766087, |
|
"learning_rate": 1.8228387237361245e-05, |
|
"loss": 0.4184, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5494296577946768, |
|
"grad_norm": 0.6885893557235931, |
|
"learning_rate": 1.8209469927646863e-05, |
|
"loss": 0.4129, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5513307984790875, |
|
"grad_norm": 0.6951032366993848, |
|
"learning_rate": 1.8190462079662897e-05, |
|
"loss": 0.4276, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5532319391634981, |
|
"grad_norm": 0.7208573115005421, |
|
"learning_rate": 1.81713639030377e-05, |
|
"loss": 0.4158, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5551330798479087, |
|
"grad_norm": 0.7151189411118184, |
|
"learning_rate": 1.8152175608395814e-05, |
|
"loss": 0.4063, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5570342205323194, |
|
"grad_norm": 0.7618151350052784, |
|
"learning_rate": 1.8132897407355657e-05, |
|
"loss": 0.4228, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.55893536121673, |
|
"grad_norm": 0.7280925750642289, |
|
"learning_rate": 1.811352951252717e-05, |
|
"loss": 0.4118, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5608365019011406, |
|
"grad_norm": 0.6801705394482089, |
|
"learning_rate": 1.809407213750949e-05, |
|
"loss": 0.4211, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5627376425855514, |
|
"grad_norm": 0.7856044506860411, |
|
"learning_rate": 1.807452549688859e-05, |
|
"loss": 0.4388, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.564638783269962, |
|
"grad_norm": 0.794800402247415, |
|
"learning_rate": 1.8054889806234906e-05, |
|
"loss": 0.4566, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5665399239543726, |
|
"grad_norm": 0.8131356260561907, |
|
"learning_rate": 1.8035165282100963e-05, |
|
"loss": 0.4248, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5684410646387833, |
|
"grad_norm": 0.7464654000588856, |
|
"learning_rate": 1.8015352142018984e-05, |
|
"loss": 0.421, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5703422053231939, |
|
"grad_norm": 0.7131841313678775, |
|
"learning_rate": 1.799545060449851e-05, |
|
"loss": 0.4211, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5722433460076045, |
|
"grad_norm": 0.7387170406076679, |
|
"learning_rate": 1.797546088902396e-05, |
|
"loss": 0.4081, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5741444866920152, |
|
"grad_norm": 0.792108751509653, |
|
"learning_rate": 1.7955383216052224e-05, |
|
"loss": 0.4276, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5760456273764258, |
|
"grad_norm": 0.7300887999445866, |
|
"learning_rate": 1.7935217807010238e-05, |
|
"loss": 0.4246, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5779467680608364, |
|
"grad_norm": 0.8271445399660962, |
|
"learning_rate": 1.7914964884292543e-05, |
|
"loss": 0.4373, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5798479087452472, |
|
"grad_norm": 0.7178525468830346, |
|
"learning_rate": 1.7894624671258813e-05, |
|
"loss": 0.4353, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5817490494296578, |
|
"grad_norm": 0.7525552514804682, |
|
"learning_rate": 1.7874197392231414e-05, |
|
"loss": 0.4231, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5836501901140685, |
|
"grad_norm": 0.8362327721180475, |
|
"learning_rate": 1.7853683272492913e-05, |
|
"loss": 0.4215, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5855513307984791, |
|
"grad_norm": 0.839930364021564, |
|
"learning_rate": 1.7833082538283615e-05, |
|
"loss": 0.4125, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5874524714828897, |
|
"grad_norm": 0.776281344866687, |
|
"learning_rate": 1.7812395416799034e-05, |
|
"loss": 0.4047, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5893536121673004, |
|
"grad_norm": 0.7637957154344026, |
|
"learning_rate": 1.7791622136187422e-05, |
|
"loss": 0.4522, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.591254752851711, |
|
"grad_norm": 0.8049321143956409, |
|
"learning_rate": 1.7770762925547235e-05, |
|
"loss": 0.422, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5931558935361216, |
|
"grad_norm": 0.7253015248660504, |
|
"learning_rate": 1.7749818014924612e-05, |
|
"loss": 0.4286, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5950570342205324, |
|
"grad_norm": 0.7059888261108643, |
|
"learning_rate": 1.7728787635310828e-05, |
|
"loss": 0.4047, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.596958174904943, |
|
"grad_norm": 0.7824517701597381, |
|
"learning_rate": 1.770767201863976e-05, |
|
"loss": 0.4212, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5988593155893536, |
|
"grad_norm": 0.7268105386751071, |
|
"learning_rate": 1.7686471397785322e-05, |
|
"loss": 0.414, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6007604562737643, |
|
"grad_norm": 0.7465031606207332, |
|
"learning_rate": 1.76651860065589e-05, |
|
"loss": 0.4212, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6026615969581749, |
|
"grad_norm": 0.7793014868165488, |
|
"learning_rate": 1.764381607970677e-05, |
|
"loss": 0.4199, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6045627376425855, |
|
"grad_norm": 0.776969130282458, |
|
"learning_rate": 1.7622361852907506e-05, |
|
"loss": 0.414, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.6064638783269962, |
|
"grad_norm": 0.8146335121895256, |
|
"learning_rate": 1.760082356276939e-05, |
|
"loss": 0.4403, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6083650190114068, |
|
"grad_norm": 0.7441862022739212, |
|
"learning_rate": 1.75792014468278e-05, |
|
"loss": 0.419, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6102661596958175, |
|
"grad_norm": 0.7901745117506392, |
|
"learning_rate": 1.7557495743542586e-05, |
|
"loss": 0.4398, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.6121673003802282, |
|
"grad_norm": 0.739303646326241, |
|
"learning_rate": 1.7535706692295436e-05, |
|
"loss": 0.419, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.6140684410646388, |
|
"grad_norm": 0.7224930356712501, |
|
"learning_rate": 1.7513834533387256e-05, |
|
"loss": 0.4136, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.6159695817490495, |
|
"grad_norm": 0.7570371896775817, |
|
"learning_rate": 1.7491879508035488e-05, |
|
"loss": 0.4197, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.6178707224334601, |
|
"grad_norm": 0.6797463913573002, |
|
"learning_rate": 1.746984185837149e-05, |
|
"loss": 0.4065, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6197718631178707, |
|
"grad_norm": 0.8680955704544816, |
|
"learning_rate": 1.744772182743782e-05, |
|
"loss": 0.4197, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6216730038022814, |
|
"grad_norm": 0.6916049420985639, |
|
"learning_rate": 1.7425519659185596e-05, |
|
"loss": 0.4101, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.623574144486692, |
|
"grad_norm": 0.8117898782267594, |
|
"learning_rate": 1.740323559847179e-05, |
|
"loss": 0.4132, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6254752851711026, |
|
"grad_norm": 0.6860477601327285, |
|
"learning_rate": 1.738086989105651e-05, |
|
"loss": 0.4079, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.6273764258555133, |
|
"grad_norm": 0.7354356036807334, |
|
"learning_rate": 1.735842278360032e-05, |
|
"loss": 0.435, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.629277566539924, |
|
"grad_norm": 0.7191376650164161, |
|
"learning_rate": 1.73358945236615e-05, |
|
"loss": 0.4161, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6311787072243346, |
|
"grad_norm": 0.6842360700055289, |
|
"learning_rate": 1.7313285359693322e-05, |
|
"loss": 0.4302, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6330798479087453, |
|
"grad_norm": 0.6472886708852534, |
|
"learning_rate": 1.7290595541041312e-05, |
|
"loss": 0.4031, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6349809885931559, |
|
"grad_norm": 0.703931673583881, |
|
"learning_rate": 1.7267825317940494e-05, |
|
"loss": 0.4346, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6368821292775665, |
|
"grad_norm": 0.7678042705630014, |
|
"learning_rate": 1.724497494151264e-05, |
|
"loss": 0.4104, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6387832699619772, |
|
"grad_norm": 0.7413059337967789, |
|
"learning_rate": 1.7222044663763484e-05, |
|
"loss": 0.4203, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6406844106463878, |
|
"grad_norm": 0.7367710114399577, |
|
"learning_rate": 1.7199034737579962e-05, |
|
"loss": 0.4083, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6425855513307985, |
|
"grad_norm": 0.6881715276339009, |
|
"learning_rate": 1.7175945416727405e-05, |
|
"loss": 0.4141, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6444866920152091, |
|
"grad_norm": 0.7662058535912438, |
|
"learning_rate": 1.7152776955846768e-05, |
|
"loss": 0.4131, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6463878326996197, |
|
"grad_norm": 0.7061712057750617, |
|
"learning_rate": 1.7129529610451775e-05, |
|
"loss": 0.412, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6482889733840305, |
|
"grad_norm": 0.7289089945193655, |
|
"learning_rate": 1.7106203636926154e-05, |
|
"loss": 0.4251, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6501901140684411, |
|
"grad_norm": 0.7324639550070794, |
|
"learning_rate": 1.7082799292520767e-05, |
|
"loss": 0.4155, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6520912547528517, |
|
"grad_norm": 0.791922279082062, |
|
"learning_rate": 1.7059316835350806e-05, |
|
"loss": 0.4273, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6539923954372624, |
|
"grad_norm": 0.7705902947783712, |
|
"learning_rate": 1.7035756524392924e-05, |
|
"loss": 0.4219, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.655893536121673, |
|
"grad_norm": 0.8763456771850822, |
|
"learning_rate": 1.7012118619482376e-05, |
|
"loss": 0.4233, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6577946768060836, |
|
"grad_norm": 0.732442433980309, |
|
"learning_rate": 1.6988403381310177e-05, |
|
"loss": 0.4351, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6596958174904943, |
|
"grad_norm": 0.9826668310848153, |
|
"learning_rate": 1.696461107142021e-05, |
|
"loss": 0.4244, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6615969581749049, |
|
"grad_norm": 0.6852179914988137, |
|
"learning_rate": 1.6940741952206342e-05, |
|
"loss": 0.4023, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6634980988593155, |
|
"grad_norm": 0.7440795370888574, |
|
"learning_rate": 1.691679628690953e-05, |
|
"loss": 0.4253, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6653992395437263, |
|
"grad_norm": 0.7193784150797833, |
|
"learning_rate": 1.6892774339614927e-05, |
|
"loss": 0.4033, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6673003802281369, |
|
"grad_norm": 0.8156600376301576, |
|
"learning_rate": 1.686867637524896e-05, |
|
"loss": 0.4495, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6692015209125475, |
|
"grad_norm": 0.7176786429913768, |
|
"learning_rate": 1.6844502659576414e-05, |
|
"loss": 0.4208, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6711026615969582, |
|
"grad_norm": 0.7659504090904091, |
|
"learning_rate": 1.6820253459197493e-05, |
|
"loss": 0.4388, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6730038022813688, |
|
"grad_norm": 0.7978319359054401, |
|
"learning_rate": 1.679592904154489e-05, |
|
"loss": 0.4365, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6749049429657795, |
|
"grad_norm": 0.644489027594137, |
|
"learning_rate": 1.677152967488084e-05, |
|
"loss": 0.4148, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6768060836501901, |
|
"grad_norm": 0.7495183754977875, |
|
"learning_rate": 1.6747055628294134e-05, |
|
"loss": 0.4212, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6787072243346007, |
|
"grad_norm": 0.6912626966302459, |
|
"learning_rate": 1.6722507171697184e-05, |
|
"loss": 0.4184, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6806083650190115, |
|
"grad_norm": 0.6655628781553221, |
|
"learning_rate": 1.669788457582304e-05, |
|
"loss": 0.4249, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6825095057034221, |
|
"grad_norm": 0.791689238520468, |
|
"learning_rate": 1.6673188112222394e-05, |
|
"loss": 0.4084, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6844106463878327, |
|
"grad_norm": 0.7498636055362748, |
|
"learning_rate": 1.6648418053260585e-05, |
|
"loss": 0.4202, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6863117870722434, |
|
"grad_norm": 0.6809544908914978, |
|
"learning_rate": 1.6623574672114596e-05, |
|
"loss": 0.4311, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.688212927756654, |
|
"grad_norm": 0.707924479551307, |
|
"learning_rate": 1.6598658242770054e-05, |
|
"loss": 0.4226, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6901140684410646, |
|
"grad_norm": 0.7875192296853099, |
|
"learning_rate": 1.6573669040018202e-05, |
|
"loss": 0.4035, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6920152091254753, |
|
"grad_norm": 0.6989927093509163, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.4015, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6939163498098859, |
|
"grad_norm": 0.7613004712226819, |
|
"learning_rate": 1.652347341746737e-05, |
|
"loss": 0.4196, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6958174904942965, |
|
"grad_norm": 0.7801064614311649, |
|
"learning_rate": 1.6498267551251618e-05, |
|
"loss": 0.415, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6977186311787072, |
|
"grad_norm": 0.7143915414824872, |
|
"learning_rate": 1.6472990018788884e-05, |
|
"loss": 0.4007, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6996197718631179, |
|
"grad_norm": 0.7213556382982281, |
|
"learning_rate": 1.644764109885284e-05, |
|
"loss": 0.435, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7015209125475285, |
|
"grad_norm": 0.7937361641182753, |
|
"learning_rate": 1.642222107100446e-05, |
|
"loss": 0.415, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7034220532319392, |
|
"grad_norm": 0.7077243928080609, |
|
"learning_rate": 1.6396730215588913e-05, |
|
"loss": 0.4128, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7053231939163498, |
|
"grad_norm": 0.7891653499369282, |
|
"learning_rate": 1.6371168813732514e-05, |
|
"loss": 0.4179, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.7072243346007605, |
|
"grad_norm": 0.764693769594645, |
|
"learning_rate": 1.6345537147339578e-05, |
|
"loss": 0.4165, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.7091254752851711, |
|
"grad_norm": 0.8271146906155411, |
|
"learning_rate": 1.6319835499089358e-05, |
|
"loss": 0.4042, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.7110266159695817, |
|
"grad_norm": 0.7509192905087371, |
|
"learning_rate": 1.6294064152432878e-05, |
|
"loss": 0.4061, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.7129277566539924, |
|
"grad_norm": 0.8345464270506578, |
|
"learning_rate": 1.626822339158985e-05, |
|
"loss": 0.4025, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.714828897338403, |
|
"grad_norm": 0.7072083557074277, |
|
"learning_rate": 1.6242313501545522e-05, |
|
"loss": 0.4173, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.7167300380228137, |
|
"grad_norm": 0.7293814090642944, |
|
"learning_rate": 1.621633476804752e-05, |
|
"loss": 0.4166, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.7186311787072244, |
|
"grad_norm": 0.846495558869141, |
|
"learning_rate": 1.6190287477602716e-05, |
|
"loss": 0.416, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.720532319391635, |
|
"grad_norm": 0.7085045302214626, |
|
"learning_rate": 1.6164171917474078e-05, |
|
"loss": 0.4009, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.7224334600760456, |
|
"grad_norm": 0.8925969184626525, |
|
"learning_rate": 1.6137988375677466e-05, |
|
"loss": 0.4268, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7243346007604563, |
|
"grad_norm": 0.7253513104874315, |
|
"learning_rate": 1.6111737140978495e-05, |
|
"loss": 0.4089, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7262357414448669, |
|
"grad_norm": 0.8538365232177512, |
|
"learning_rate": 1.6085418502889315e-05, |
|
"loss": 0.4171, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7281368821292775, |
|
"grad_norm": 0.884407137887338, |
|
"learning_rate": 1.6059032751665454e-05, |
|
"loss": 0.4368, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7300380228136882, |
|
"grad_norm": 0.7597979823976849, |
|
"learning_rate": 1.6032580178302585e-05, |
|
"loss": 0.4313, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7319391634980988, |
|
"grad_norm": 0.8665276836369039, |
|
"learning_rate": 1.600606107453333e-05, |
|
"loss": 0.411, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7338403041825095, |
|
"grad_norm": 0.706717491048344, |
|
"learning_rate": 1.597947573282405e-05, |
|
"loss": 0.4067, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7357414448669202, |
|
"grad_norm": 0.6879678689920503, |
|
"learning_rate": 1.5952824446371608e-05, |
|
"loss": 0.4252, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7376425855513308, |
|
"grad_norm": 0.7214214636724359, |
|
"learning_rate": 1.592610750910014e-05, |
|
"loss": 0.4136, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7395437262357415, |
|
"grad_norm": 0.721849313528591, |
|
"learning_rate": 1.589932521565781e-05, |
|
"loss": 0.4, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7414448669201521, |
|
"grad_norm": 0.6864079089694056, |
|
"learning_rate": 1.587247786141358e-05, |
|
"loss": 0.4107, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7433460076045627, |
|
"grad_norm": 0.7168944825196677, |
|
"learning_rate": 1.5845565742453906e-05, |
|
"loss": 0.4338, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7452471482889734, |
|
"grad_norm": 0.7182661630266403, |
|
"learning_rate": 1.581858915557953e-05, |
|
"loss": 0.4103, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.747148288973384, |
|
"grad_norm": 0.7213064699225278, |
|
"learning_rate": 1.5791548398302167e-05, |
|
"loss": 0.4349, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7490494296577946, |
|
"grad_norm": 0.6656926915900984, |
|
"learning_rate": 1.5764443768841234e-05, |
|
"loss": 0.3977, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7509505703422054, |
|
"grad_norm": 0.6587251971187607, |
|
"learning_rate": 1.5737275566120577e-05, |
|
"loss": 0.4188, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.752851711026616, |
|
"grad_norm": 0.6587010876487651, |
|
"learning_rate": 1.5710044089765144e-05, |
|
"loss": 0.4182, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7547528517110266, |
|
"grad_norm": 0.6971765373848173, |
|
"learning_rate": 1.5682749640097708e-05, |
|
"loss": 0.4139, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7566539923954373, |
|
"grad_norm": 0.6814783148658188, |
|
"learning_rate": 1.565539251813554e-05, |
|
"loss": 0.3984, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7585551330798479, |
|
"grad_norm": 0.6987036480754459, |
|
"learning_rate": 1.5627973025587093e-05, |
|
"loss": 0.4254, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7604562737642585, |
|
"grad_norm": 0.6913577141743426, |
|
"learning_rate": 1.560049146484868e-05, |
|
"loss": 0.4134, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7623574144486692, |
|
"grad_norm": 0.7249966354524016, |
|
"learning_rate": 1.5572948139001128e-05, |
|
"loss": 0.4174, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7642585551330798, |
|
"grad_norm": 0.6864586824036938, |
|
"learning_rate": 1.5545343351806443e-05, |
|
"loss": 0.3987, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7661596958174905, |
|
"grad_norm": 0.6909163969072585, |
|
"learning_rate": 1.551767740770446e-05, |
|
"loss": 0.4028, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7680608365019012, |
|
"grad_norm": 0.7665854158952898, |
|
"learning_rate": 1.5489950611809484e-05, |
|
"loss": 0.4036, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7699619771863118, |
|
"grad_norm": 0.6746980733298985, |
|
"learning_rate": 1.5462163269906928e-05, |
|
"loss": 0.4171, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7718631178707225, |
|
"grad_norm": 0.6888109049161354, |
|
"learning_rate": 1.5434315688449924e-05, |
|
"loss": 0.4192, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7737642585551331, |
|
"grad_norm": 0.680295967985405, |
|
"learning_rate": 1.5406408174555978e-05, |
|
"loss": 0.388, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7756653992395437, |
|
"grad_norm": 0.7573720244568496, |
|
"learning_rate": 1.5378441036003543e-05, |
|
"loss": 0.4571, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7775665399239544, |
|
"grad_norm": 0.7263352251395035, |
|
"learning_rate": 1.535041458122865e-05, |
|
"loss": 0.4103, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.779467680608365, |
|
"grad_norm": 0.6611458759741928, |
|
"learning_rate": 1.5322329119321508e-05, |
|
"loss": 0.396, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7813688212927756, |
|
"grad_norm": 0.6702451623154632, |
|
"learning_rate": 1.529418496002308e-05, |
|
"loss": 0.4085, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7832699619771863, |
|
"grad_norm": 0.6708900730058422, |
|
"learning_rate": 1.5265982413721662e-05, |
|
"loss": 0.3866, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.785171102661597, |
|
"grad_norm": 0.7202699620582907, |
|
"learning_rate": 1.5237721791449497e-05, |
|
"loss": 0.4223, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7870722433460076, |
|
"grad_norm": 0.7183698093396678, |
|
"learning_rate": 1.5209403404879305e-05, |
|
"loss": 0.4028, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7889733840304183, |
|
"grad_norm": 0.6544471597225477, |
|
"learning_rate": 1.5181027566320858e-05, |
|
"loss": 0.4041, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7908745247148289, |
|
"grad_norm": 0.7083722634196089, |
|
"learning_rate": 1.5152594588717544e-05, |
|
"loss": 0.42, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7927756653992395, |
|
"grad_norm": 0.712188607476995, |
|
"learning_rate": 1.5124104785642909e-05, |
|
"loss": 0.4121, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7946768060836502, |
|
"grad_norm": 0.706428015235297, |
|
"learning_rate": 1.5095558471297196e-05, |
|
"loss": 0.407, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7965779467680608, |
|
"grad_norm": 0.7290748424935284, |
|
"learning_rate": 1.5066955960503893e-05, |
|
"loss": 0.3899, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7984790874524715, |
|
"grad_norm": 0.6700744527803965, |
|
"learning_rate": 1.5038297568706244e-05, |
|
"loss": 0.3986, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8003802281368821, |
|
"grad_norm": 0.7369358259318646, |
|
"learning_rate": 1.5009583611963772e-05, |
|
"loss": 0.409, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8022813688212928, |
|
"grad_norm": 0.6219125276106352, |
|
"learning_rate": 1.4980814406948806e-05, |
|
"loss": 0.4077, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8041825095057035, |
|
"grad_norm": 0.7010378671600722, |
|
"learning_rate": 1.4951990270942991e-05, |
|
"loss": 0.4077, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.8060836501901141, |
|
"grad_norm": 0.6726478792952649, |
|
"learning_rate": 1.492311152183376e-05, |
|
"loss": 0.3896, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.8079847908745247, |
|
"grad_norm": 0.7064600715429418, |
|
"learning_rate": 1.4894178478110856e-05, |
|
"loss": 0.4135, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8098859315589354, |
|
"grad_norm": 0.6333196467789542, |
|
"learning_rate": 1.4865191458862816e-05, |
|
"loss": 0.3838, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.811787072243346, |
|
"grad_norm": 0.7082130527531607, |
|
"learning_rate": 1.4836150783773442e-05, |
|
"loss": 0.4014, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.8136882129277566, |
|
"grad_norm": 0.6875888043485031, |
|
"learning_rate": 1.4807056773118276e-05, |
|
"loss": 0.4244, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.8155893536121673, |
|
"grad_norm": 0.6320161894647373, |
|
"learning_rate": 1.4777909747761085e-05, |
|
"loss": 0.3772, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.8174904942965779, |
|
"grad_norm": 0.688853241624807, |
|
"learning_rate": 1.4748710029150296e-05, |
|
"loss": 0.4249, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8193916349809885, |
|
"grad_norm": 0.6952309844328408, |
|
"learning_rate": 1.4719457939315468e-05, |
|
"loss": 0.4095, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.8212927756653993, |
|
"grad_norm": 0.6926853800220879, |
|
"learning_rate": 1.4690153800863743e-05, |
|
"loss": 0.3959, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.8231939163498099, |
|
"grad_norm": 0.6882725839401531, |
|
"learning_rate": 1.4660797936976278e-05, |
|
"loss": 0.3914, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.8250950570342205, |
|
"grad_norm": 0.6937884472100325, |
|
"learning_rate": 1.4631390671404682e-05, |
|
"loss": 0.3954, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.8269961977186312, |
|
"grad_norm": 0.6983089912012388, |
|
"learning_rate": 1.460193232846745e-05, |
|
"loss": 0.3944, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8288973384030418, |
|
"grad_norm": 0.7195173335044036, |
|
"learning_rate": 1.4572423233046386e-05, |
|
"loss": 0.4331, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8307984790874525, |
|
"grad_norm": 0.6580077754873673, |
|
"learning_rate": 1.4542863710583022e-05, |
|
"loss": 0.3942, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8326996197718631, |
|
"grad_norm": 0.6733246723166247, |
|
"learning_rate": 1.4513254087075015e-05, |
|
"loss": 0.4085, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8346007604562737, |
|
"grad_norm": 0.6657800290029222, |
|
"learning_rate": 1.4483594689072571e-05, |
|
"loss": 0.4053, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8365019011406845, |
|
"grad_norm": 0.6746881212167681, |
|
"learning_rate": 1.4453885843674837e-05, |
|
"loss": 0.3814, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8384030418250951, |
|
"grad_norm": 0.7547054593243069, |
|
"learning_rate": 1.4424127878526278e-05, |
|
"loss": 0.4032, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.8403041825095057, |
|
"grad_norm": 0.6725366331373717, |
|
"learning_rate": 1.4394321121813093e-05, |
|
"loss": 0.4104, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8422053231939164, |
|
"grad_norm": 0.7565414606466715, |
|
"learning_rate": 1.436446590225957e-05, |
|
"loss": 0.4035, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.844106463878327, |
|
"grad_norm": 0.713176979263959, |
|
"learning_rate": 1.433456254912447e-05, |
|
"loss": 0.3911, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8460076045627376, |
|
"grad_norm": 0.6509118241865428, |
|
"learning_rate": 1.4304611392197399e-05, |
|
"loss": 0.3984, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8479087452471483, |
|
"grad_norm": 0.8340268492547279, |
|
"learning_rate": 1.427461276179517e-05, |
|
"loss": 0.4056, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8498098859315589, |
|
"grad_norm": 0.6758952421736762, |
|
"learning_rate": 1.4244566988758152e-05, |
|
"loss": 0.3999, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8517110266159695, |
|
"grad_norm": 0.6827895953297911, |
|
"learning_rate": 1.4214474404446633e-05, |
|
"loss": 0.4017, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8536121673003803, |
|
"grad_norm": 0.7711863434469166, |
|
"learning_rate": 1.4184335340737158e-05, |
|
"loss": 0.4071, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8555133079847909, |
|
"grad_norm": 0.6781663142258388, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.4044, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8574144486692015, |
|
"grad_norm": 0.7551604766767213, |
|
"learning_rate": 1.4123919105189836e-05, |
|
"loss": 0.4213, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8593155893536122, |
|
"grad_norm": 0.7250490782553661, |
|
"learning_rate": 1.4093642599653406e-05, |
|
"loss": 0.3896, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8612167300380228, |
|
"grad_norm": 0.6783847587289916, |
|
"learning_rate": 1.40633209473145e-05, |
|
"loss": 0.4228, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8631178707224335, |
|
"grad_norm": 0.6679146169714046, |
|
"learning_rate": 1.4032954482575938e-05, |
|
"loss": 0.4171, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8650190114068441, |
|
"grad_norm": 0.6940343012185746, |
|
"learning_rate": 1.4002543540334766e-05, |
|
"loss": 0.4151, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8669201520912547, |
|
"grad_norm": 0.6984027602326602, |
|
"learning_rate": 1.3972088455978537e-05, |
|
"loss": 0.3984, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8688212927756654, |
|
"grad_norm": 0.6610983027702386, |
|
"learning_rate": 1.3941589565381635e-05, |
|
"loss": 0.407, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.870722433460076, |
|
"grad_norm": 0.7624260952295645, |
|
"learning_rate": 1.391104720490156e-05, |
|
"loss": 0.4152, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8726235741444867, |
|
"grad_norm": 0.6725609036577359, |
|
"learning_rate": 1.3880461711375224e-05, |
|
"loss": 0.3933, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8745247148288974, |
|
"grad_norm": 0.718319776967452, |
|
"learning_rate": 1.3849833422115221e-05, |
|
"loss": 0.4071, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.876425855513308, |
|
"grad_norm": 0.6981976290895487, |
|
"learning_rate": 1.3819162674906134e-05, |
|
"loss": 0.4017, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8783269961977186, |
|
"grad_norm": 0.6494752933923883, |
|
"learning_rate": 1.378844980800078e-05, |
|
"loss": 0.392, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8802281368821293, |
|
"grad_norm": 0.6854761697231652, |
|
"learning_rate": 1.3757695160116502e-05, |
|
"loss": 0.4106, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8821292775665399, |
|
"grad_norm": 0.6635941524739034, |
|
"learning_rate": 1.3726899070431423e-05, |
|
"loss": 0.3912, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8840304182509505, |
|
"grad_norm": 0.6926983482111053, |
|
"learning_rate": 1.3696061878580707e-05, |
|
"loss": 0.4148, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8859315589353612, |
|
"grad_norm": 0.773661723974928, |
|
"learning_rate": 1.3665183924652817e-05, |
|
"loss": 0.4141, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8878326996197718, |
|
"grad_norm": 0.7205839110020779, |
|
"learning_rate": 1.3634265549185755e-05, |
|
"loss": 0.3836, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8897338403041825, |
|
"grad_norm": 0.6732299481307472, |
|
"learning_rate": 1.3603307093163319e-05, |
|
"loss": 0.42, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8916349809885932, |
|
"grad_norm": 0.7612253415936112, |
|
"learning_rate": 1.3572308898011328e-05, |
|
"loss": 0.4097, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8935361216730038, |
|
"grad_norm": 0.6546109810554149, |
|
"learning_rate": 1.3541271305593878e-05, |
|
"loss": 0.3992, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8954372623574145, |
|
"grad_norm": 0.6585874637257052, |
|
"learning_rate": 1.3510194658209547e-05, |
|
"loss": 0.4027, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8973384030418251, |
|
"grad_norm": 0.7652282223913617, |
|
"learning_rate": 1.3479079298587634e-05, |
|
"loss": 0.4005, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8992395437262357, |
|
"grad_norm": 0.6933073840200412, |
|
"learning_rate": 1.3447925569884374e-05, |
|
"loss": 0.4087, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.9011406844106464, |
|
"grad_norm": 0.6550862975055665, |
|
"learning_rate": 1.3416733815679166e-05, |
|
"loss": 0.3978, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.903041825095057, |
|
"grad_norm": 0.7256952053007989, |
|
"learning_rate": 1.3385504379970764e-05, |
|
"loss": 0.4046, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9049429657794676, |
|
"grad_norm": 0.7148585643351413, |
|
"learning_rate": 1.3354237607173494e-05, |
|
"loss": 0.3978, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.9068441064638784, |
|
"grad_norm": 0.7142015421348241, |
|
"learning_rate": 1.3322933842113457e-05, |
|
"loss": 0.4209, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.908745247148289, |
|
"grad_norm": 0.687730106015921, |
|
"learning_rate": 1.3291593430024727e-05, |
|
"loss": 0.4043, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.9106463878326996, |
|
"grad_norm": 0.6822137552970915, |
|
"learning_rate": 1.3260216716545534e-05, |
|
"loss": 0.4171, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.9125475285171103, |
|
"grad_norm": 0.6377657140279679, |
|
"learning_rate": 1.3228804047714462e-05, |
|
"loss": 0.3887, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9144486692015209, |
|
"grad_norm": 0.6966064429186608, |
|
"learning_rate": 1.319735576996663e-05, |
|
"loss": 0.422, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.9163498098859315, |
|
"grad_norm": 0.6902117617603477, |
|
"learning_rate": 1.3165872230129869e-05, |
|
"loss": 0.404, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.9182509505703422, |
|
"grad_norm": 0.6325022995547926, |
|
"learning_rate": 1.3134353775420895e-05, |
|
"loss": 0.3899, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.9201520912547528, |
|
"grad_norm": 0.6311490054113847, |
|
"learning_rate": 1.3102800753441488e-05, |
|
"loss": 0.404, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.9220532319391636, |
|
"grad_norm": 0.7119891602684172, |
|
"learning_rate": 1.3071213512174655e-05, |
|
"loss": 0.3847, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9239543726235742, |
|
"grad_norm": 0.7140520982978685, |
|
"learning_rate": 1.3039592399980785e-05, |
|
"loss": 0.389, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9258555133079848, |
|
"grad_norm": 0.6648479626315674, |
|
"learning_rate": 1.3007937765593818e-05, |
|
"loss": 0.4076, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.9277566539923955, |
|
"grad_norm": 0.7207022349815417, |
|
"learning_rate": 1.2976249958117395e-05, |
|
"loss": 0.4197, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9296577946768061, |
|
"grad_norm": 0.6538093572467635, |
|
"learning_rate": 1.2944529327021002e-05, |
|
"loss": 0.4003, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.9315589353612167, |
|
"grad_norm": 0.6145457797778117, |
|
"learning_rate": 1.291277622213612e-05, |
|
"loss": 0.3861, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9334600760456274, |
|
"grad_norm": 0.6717081075163996, |
|
"learning_rate": 1.2880990993652379e-05, |
|
"loss": 0.404, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.935361216730038, |
|
"grad_norm": 0.6381375708758967, |
|
"learning_rate": 1.2849173992113669e-05, |
|
"loss": 0.3907, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.9372623574144486, |
|
"grad_norm": 0.6405334578995517, |
|
"learning_rate": 1.2817325568414299e-05, |
|
"loss": 0.3966, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.9391634980988594, |
|
"grad_norm": 0.6731779594600235, |
|
"learning_rate": 1.2785446073795118e-05, |
|
"loss": 0.4034, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.94106463878327, |
|
"grad_norm": 0.6969180914718714, |
|
"learning_rate": 1.2753535859839638e-05, |
|
"loss": 0.4121, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9429657794676806, |
|
"grad_norm": 0.618180483581813, |
|
"learning_rate": 1.272159527847016e-05, |
|
"loss": 0.3816, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.9448669201520913, |
|
"grad_norm": 0.6952759330447784, |
|
"learning_rate": 1.2689624681943897e-05, |
|
"loss": 0.413, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.9467680608365019, |
|
"grad_norm": 0.6898427307299725, |
|
"learning_rate": 1.2657624422849077e-05, |
|
"loss": 0.4179, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9486692015209125, |
|
"grad_norm": 0.699291230631293, |
|
"learning_rate": 1.2625594854101066e-05, |
|
"loss": 0.4157, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9505703422053232, |
|
"grad_norm": 0.6694458644588648, |
|
"learning_rate": 1.2593536328938471e-05, |
|
"loss": 0.406, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9524714828897338, |
|
"grad_norm": 0.6470287009480223, |
|
"learning_rate": 1.2561449200919253e-05, |
|
"loss": 0.3892, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9543726235741445, |
|
"grad_norm": 0.6077026389317716, |
|
"learning_rate": 1.2529333823916807e-05, |
|
"loss": 0.385, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9562737642585551, |
|
"grad_norm": 0.7033970824785918, |
|
"learning_rate": 1.2497190552116082e-05, |
|
"loss": 0.4175, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9581749049429658, |
|
"grad_norm": 0.6803712575864149, |
|
"learning_rate": 1.2465019740009662e-05, |
|
"loss": 0.3934, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.9600760456273765, |
|
"grad_norm": 0.6965934029353575, |
|
"learning_rate": 1.2432821742393854e-05, |
|
"loss": 0.412, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9619771863117871, |
|
"grad_norm": 0.6163645226860306, |
|
"learning_rate": 1.2400596914364792e-05, |
|
"loss": 0.3975, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9638783269961977, |
|
"grad_norm": 0.7311609087122628, |
|
"learning_rate": 1.2368345611314508e-05, |
|
"loss": 0.4082, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9657794676806084, |
|
"grad_norm": 0.6183545409406597, |
|
"learning_rate": 1.2336068188927002e-05, |
|
"loss": 0.3789, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.967680608365019, |
|
"grad_norm": 0.6792795167354, |
|
"learning_rate": 1.2303765003174342e-05, |
|
"loss": 0.4054, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9695817490494296, |
|
"grad_norm": 0.6667740298389744, |
|
"learning_rate": 1.2271436410312727e-05, |
|
"loss": 0.4252, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9714828897338403, |
|
"grad_norm": 0.7651718493141103, |
|
"learning_rate": 1.2239082766878557e-05, |
|
"loss": 0.4111, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.973384030418251, |
|
"grad_norm": 0.7086287990509619, |
|
"learning_rate": 1.2206704429684504e-05, |
|
"loss": 0.405, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9752851711026616, |
|
"grad_norm": 0.6852672674776774, |
|
"learning_rate": 1.2174301755815572e-05, |
|
"loss": 0.414, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9771863117870723, |
|
"grad_norm": 0.7193466666893342, |
|
"learning_rate": 1.2141875102625166e-05, |
|
"loss": 0.3903, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9790874524714829, |
|
"grad_norm": 0.6858267717011416, |
|
"learning_rate": 1.2109424827731144e-05, |
|
"loss": 0.4138, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9809885931558935, |
|
"grad_norm": 0.7580356802405223, |
|
"learning_rate": 1.2076951289011884e-05, |
|
"loss": 0.4244, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9828897338403042, |
|
"grad_norm": 0.6622004562904025, |
|
"learning_rate": 1.204445484460232e-05, |
|
"loss": 0.3984, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9847908745247148, |
|
"grad_norm": 0.6636607447233084, |
|
"learning_rate": 1.2011935852890004e-05, |
|
"loss": 0.421, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9866920152091255, |
|
"grad_norm": 0.6495045702865927, |
|
"learning_rate": 1.1979394672511156e-05, |
|
"loss": 0.3729, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9885931558935361, |
|
"grad_norm": 0.6981543503300218, |
|
"learning_rate": 1.19468316623467e-05, |
|
"loss": 0.4029, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9904942965779467, |
|
"grad_norm": 0.6662456471440278, |
|
"learning_rate": 1.1914247181518312e-05, |
|
"loss": 0.4002, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9923954372623575, |
|
"grad_norm": 0.6604787309214006, |
|
"learning_rate": 1.1881641589384456e-05, |
|
"loss": 0.4031, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9942965779467681, |
|
"grad_norm": 0.650902110560888, |
|
"learning_rate": 1.1849015245536424e-05, |
|
"loss": 0.4031, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9961977186311787, |
|
"grad_norm": 0.6798864680139955, |
|
"learning_rate": 1.1816368509794365e-05, |
|
"loss": 0.4123, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9980988593155894, |
|
"grad_norm": 0.628617009647784, |
|
"learning_rate": 1.1783701742203326e-05, |
|
"loss": 0.3965, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.8265708108728387, |
|
"learning_rate": 1.1751015303029272e-05, |
|
"loss": 0.385, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0019011406844107, |
|
"grad_norm": 0.7680727922669428, |
|
"learning_rate": 1.1718309552755118e-05, |
|
"loss": 0.3102, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0038022813688212, |
|
"grad_norm": 1.0059228784897538, |
|
"learning_rate": 1.1685584852076746e-05, |
|
"loss": 0.3157, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.005703422053232, |
|
"grad_norm": 0.6815057663621917, |
|
"learning_rate": 1.1652841561899042e-05, |
|
"loss": 0.307, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.0076045627376427, |
|
"grad_norm": 0.6843106380799396, |
|
"learning_rate": 1.1620080043331901e-05, |
|
"loss": 0.3048, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0095057034220531, |
|
"grad_norm": 0.8839345706214902, |
|
"learning_rate": 1.1587300657686254e-05, |
|
"loss": 0.3108, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.0114068441064639, |
|
"grad_norm": 0.8303137006693243, |
|
"learning_rate": 1.1554503766470069e-05, |
|
"loss": 0.3047, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.0133079847908746, |
|
"grad_norm": 0.8892874126831852, |
|
"learning_rate": 1.1521689731384391e-05, |
|
"loss": 0.3063, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.015209125475285, |
|
"grad_norm": 0.8054007880181805, |
|
"learning_rate": 1.1488858914319321e-05, |
|
"loss": 0.3027, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.0171102661596958, |
|
"grad_norm": 0.7287944687516933, |
|
"learning_rate": 1.1456011677350052e-05, |
|
"loss": 0.2995, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.0190114068441065, |
|
"grad_norm": 0.7348972384405209, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.3088, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.020912547528517, |
|
"grad_norm": 0.8489682812305904, |
|
"learning_rate": 1.1390269392901096e-05, |
|
"loss": 0.307, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.0228136882129277, |
|
"grad_norm": 0.7904156627221454, |
|
"learning_rate": 1.1357375070461241e-05, |
|
"loss": 0.2947, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.0247148288973384, |
|
"grad_norm": 0.6982214896146977, |
|
"learning_rate": 1.1324465778188846e-05, |
|
"loss": 0.2902, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.026615969581749, |
|
"grad_norm": 0.7784279498569209, |
|
"learning_rate": 1.1291541879024568e-05, |
|
"loss": 0.3027, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.0285171102661597, |
|
"grad_norm": 0.7453010549356432, |
|
"learning_rate": 1.1258603736070145e-05, |
|
"loss": 0.2725, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.0304182509505704, |
|
"grad_norm": 0.759587479503962, |
|
"learning_rate": 1.1225651712584413e-05, |
|
"loss": 0.3005, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.032319391634981, |
|
"grad_norm": 0.7533765897520451, |
|
"learning_rate": 1.1192686171979288e-05, |
|
"loss": 0.2931, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.0342205323193916, |
|
"grad_norm": 0.9132088527957141, |
|
"learning_rate": 1.1159707477815756e-05, |
|
"loss": 0.2883, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.0361216730038023, |
|
"grad_norm": 0.7770234792600628, |
|
"learning_rate": 1.1126715993799875e-05, |
|
"loss": 0.295, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.038022813688213, |
|
"grad_norm": 0.7090004784228509, |
|
"learning_rate": 1.1093712083778748e-05, |
|
"loss": 0.2812, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.0399239543726235, |
|
"grad_norm": 0.9581263392106095, |
|
"learning_rate": 1.1060696111736515e-05, |
|
"loss": 0.3059, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.0418250950570342, |
|
"grad_norm": 0.8609157729184757, |
|
"learning_rate": 1.1027668441790358e-05, |
|
"loss": 0.2977, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.043726235741445, |
|
"grad_norm": 0.8034824100554511, |
|
"learning_rate": 1.099462943818646e-05, |
|
"loss": 0.3047, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.0456273764258555, |
|
"grad_norm": 0.8362078393611865, |
|
"learning_rate": 1.0961579465295987e-05, |
|
"loss": 0.2854, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0475285171102662, |
|
"grad_norm": 0.8052304462734117, |
|
"learning_rate": 1.0928518887611099e-05, |
|
"loss": 0.3048, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.049429657794677, |
|
"grad_norm": 0.6757313280701089, |
|
"learning_rate": 1.0895448069740902e-05, |
|
"loss": 0.2926, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.0513307984790874, |
|
"grad_norm": 0.757814413923201, |
|
"learning_rate": 1.0862367376407433e-05, |
|
"loss": 0.2967, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.053231939163498, |
|
"grad_norm": 0.8824935621625047, |
|
"learning_rate": 1.0829277172441648e-05, |
|
"loss": 0.3089, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.0551330798479088, |
|
"grad_norm": 0.6561496691480129, |
|
"learning_rate": 1.0796177822779384e-05, |
|
"loss": 0.2763, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0570342205323193, |
|
"grad_norm": 0.7447663394849013, |
|
"learning_rate": 1.0763069692457346e-05, |
|
"loss": 0.3005, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.05893536121673, |
|
"grad_norm": 0.7583657632247124, |
|
"learning_rate": 1.0729953146609076e-05, |
|
"loss": 0.2945, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.0608365019011408, |
|
"grad_norm": 0.7254406369200254, |
|
"learning_rate": 1.0696828550460928e-05, |
|
"loss": 0.2984, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.0627376425855513, |
|
"grad_norm": 0.7149689970744459, |
|
"learning_rate": 1.0663696269328034e-05, |
|
"loss": 0.3029, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.064638783269962, |
|
"grad_norm": 0.7554356406107401, |
|
"learning_rate": 1.0630556668610286e-05, |
|
"loss": 0.2799, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0665399239543727, |
|
"grad_norm": 0.7162776578629291, |
|
"learning_rate": 1.059741011378829e-05, |
|
"loss": 0.2908, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.0684410646387832, |
|
"grad_norm": 0.6767578441666678, |
|
"learning_rate": 1.0564256970419367e-05, |
|
"loss": 0.2894, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.070342205323194, |
|
"grad_norm": 0.7246406918900237, |
|
"learning_rate": 1.0531097604133473e-05, |
|
"loss": 0.2963, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.0722433460076046, |
|
"grad_norm": 0.7819364408744047, |
|
"learning_rate": 1.0497932380629207e-05, |
|
"loss": 0.2869, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0741444866920151, |
|
"grad_norm": 0.707692605716939, |
|
"learning_rate": 1.0464761665669771e-05, |
|
"loss": 0.306, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.0760456273764258, |
|
"grad_norm": 0.6881486143947666, |
|
"learning_rate": 1.0431585825078916e-05, |
|
"loss": 0.2921, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.0779467680608366, |
|
"grad_norm": 0.6996755754699221, |
|
"learning_rate": 1.0398405224736927e-05, |
|
"loss": 0.301, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.079847908745247, |
|
"grad_norm": 0.6813043003304055, |
|
"learning_rate": 1.0365220230576592e-05, |
|
"loss": 0.2834, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.0817490494296578, |
|
"grad_norm": 0.728597382505159, |
|
"learning_rate": 1.0332031208579133e-05, |
|
"loss": 0.2955, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.0836501901140685, |
|
"grad_norm": 0.7013527462079502, |
|
"learning_rate": 1.0298838524770212e-05, |
|
"loss": 0.2953, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.085551330798479, |
|
"grad_norm": 0.6876135582500057, |
|
"learning_rate": 1.0265642545215872e-05, |
|
"loss": 0.2958, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.0874524714828897, |
|
"grad_norm": 0.6923169491384853, |
|
"learning_rate": 1.0232443636018502e-05, |
|
"loss": 0.2975, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.0893536121673004, |
|
"grad_norm": 0.6632548937249962, |
|
"learning_rate": 1.0199242163312794e-05, |
|
"loss": 0.2855, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.091254752851711, |
|
"grad_norm": 0.6494008105848307, |
|
"learning_rate": 1.0166038493261723e-05, |
|
"loss": 0.2957, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.0931558935361216, |
|
"grad_norm": 0.735892211231415, |
|
"learning_rate": 1.013283299205249e-05, |
|
"loss": 0.2976, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.0950570342205324, |
|
"grad_norm": 0.6959908109311861, |
|
"learning_rate": 1.0099626025892491e-05, |
|
"loss": 0.2874, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.0969581749049429, |
|
"grad_norm": 0.7177111387542888, |
|
"learning_rate": 1.0066417961005283e-05, |
|
"loss": 0.2979, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.0988593155893536, |
|
"grad_norm": 0.6953888719227289, |
|
"learning_rate": 1.0033209163626539e-05, |
|
"loss": 0.292, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.1007604562737643, |
|
"grad_norm": 0.7083766299450723, |
|
"learning_rate": 1e-05, |
|
"loss": 0.2829, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.102661596958175, |
|
"grad_norm": 0.7080381925816993, |
|
"learning_rate": 9.966790836373465e-06, |
|
"loss": 0.2865, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.1045627376425855, |
|
"grad_norm": 0.7286544209683984, |
|
"learning_rate": 9.933582038994719e-06, |
|
"loss": 0.2997, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.1064638783269962, |
|
"grad_norm": 0.6761518102078841, |
|
"learning_rate": 9.90037397410751e-06, |
|
"loss": 0.2906, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.108365019011407, |
|
"grad_norm": 0.7230980131346857, |
|
"learning_rate": 9.867167007947511e-06, |
|
"loss": 0.2963, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.1102661596958174, |
|
"grad_norm": 0.7364339625878032, |
|
"learning_rate": 9.833961506738282e-06, |
|
"loss": 0.2972, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.1121673003802282, |
|
"grad_norm": 0.73189078228728, |
|
"learning_rate": 9.80075783668721e-06, |
|
"loss": 0.2954, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.1140684410646389, |
|
"grad_norm": 0.7421538721938589, |
|
"learning_rate": 9.767556363981503e-06, |
|
"loss": 0.2892, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.1159695817490494, |
|
"grad_norm": 0.709171276811597, |
|
"learning_rate": 9.734357454784131e-06, |
|
"loss": 0.2805, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.11787072243346, |
|
"grad_norm": 0.6904742526194513, |
|
"learning_rate": 9.701161475229791e-06, |
|
"loss": 0.2913, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.1197718631178708, |
|
"grad_norm": 0.7150803253416469, |
|
"learning_rate": 9.66796879142087e-06, |
|
"loss": 0.2978, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.1216730038022813, |
|
"grad_norm": 0.6739329092291103, |
|
"learning_rate": 9.634779769423412e-06, |
|
"loss": 0.2815, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.123574144486692, |
|
"grad_norm": 0.7250322303380577, |
|
"learning_rate": 9.601594775263073e-06, |
|
"loss": 0.286, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.1254752851711027, |
|
"grad_norm": 0.8094477868153702, |
|
"learning_rate": 9.568414174921085e-06, |
|
"loss": 0.3124, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.1273764258555132, |
|
"grad_norm": 0.7502812677192022, |
|
"learning_rate": 9.535238334330234e-06, |
|
"loss": 0.2988, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.129277566539924, |
|
"grad_norm": 0.7119909198968319, |
|
"learning_rate": 9.502067619370794e-06, |
|
"loss": 0.2992, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.1311787072243347, |
|
"grad_norm": 0.7105064977926391, |
|
"learning_rate": 9.468902395866532e-06, |
|
"loss": 0.2989, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.1330798479087452, |
|
"grad_norm": 0.7321763340548684, |
|
"learning_rate": 9.435743029580638e-06, |
|
"loss": 0.2899, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.1349809885931559, |
|
"grad_norm": 0.6961265075411925, |
|
"learning_rate": 9.402589886211711e-06, |
|
"loss": 0.2888, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.1368821292775666, |
|
"grad_norm": 0.7258014284775477, |
|
"learning_rate": 9.369443331389718e-06, |
|
"loss": 0.3089, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.138783269961977, |
|
"grad_norm": 0.7634271257053743, |
|
"learning_rate": 9.336303730671968e-06, |
|
"loss": 0.2938, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.1406844106463878, |
|
"grad_norm": 0.73614464279472, |
|
"learning_rate": 9.303171449539074e-06, |
|
"loss": 0.3038, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.1425855513307985, |
|
"grad_norm": 0.6797341272857729, |
|
"learning_rate": 9.270046853390924e-06, |
|
"loss": 0.2935, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.144486692015209, |
|
"grad_norm": 0.7643628334850742, |
|
"learning_rate": 9.236930307542654e-06, |
|
"loss": 0.2926, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.1463878326996197, |
|
"grad_norm": 0.6955099869929693, |
|
"learning_rate": 9.203822177220621e-06, |
|
"loss": 0.2827, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.1482889733840305, |
|
"grad_norm": 0.7538104752903337, |
|
"learning_rate": 9.170722827558357e-06, |
|
"loss": 0.295, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.1501901140684412, |
|
"grad_norm": 0.7443764594532538, |
|
"learning_rate": 9.13763262359257e-06, |
|
"loss": 0.3024, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.1520912547528517, |
|
"grad_norm": 0.7400515219435118, |
|
"learning_rate": 9.104551930259101e-06, |
|
"loss": 0.2975, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.1539923954372624, |
|
"grad_norm": 0.7377989638091142, |
|
"learning_rate": 9.071481112388905e-06, |
|
"loss": 0.3021, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.1558935361216731, |
|
"grad_norm": 0.6351873542370496, |
|
"learning_rate": 9.038420534704015e-06, |
|
"loss": 0.287, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.1577946768060836, |
|
"grad_norm": 0.7257461591215666, |
|
"learning_rate": 9.005370561813545e-06, |
|
"loss": 0.3081, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.1596958174904943, |
|
"grad_norm": 0.671370248912275, |
|
"learning_rate": 8.972331558209644e-06, |
|
"loss": 0.3062, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.161596958174905, |
|
"grad_norm": 0.6493761967149301, |
|
"learning_rate": 8.939303888263485e-06, |
|
"loss": 0.2962, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.1634980988593155, |
|
"grad_norm": 0.686426789947725, |
|
"learning_rate": 8.906287916221259e-06, |
|
"loss": 0.2906, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.1653992395437263, |
|
"grad_norm": 0.690026487488721, |
|
"learning_rate": 8.873284006200129e-06, |
|
"loss": 0.2896, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.167300380228137, |
|
"grad_norm": 0.6872226384582601, |
|
"learning_rate": 8.840292522184247e-06, |
|
"loss": 0.2817, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.1692015209125475, |
|
"grad_norm": 0.7004489925928739, |
|
"learning_rate": 8.807313828020715e-06, |
|
"loss": 0.2936, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.1711026615969582, |
|
"grad_norm": 0.8125254022073283, |
|
"learning_rate": 8.774348287415589e-06, |
|
"loss": 0.3087, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.173003802281369, |
|
"grad_norm": 0.7569157165063869, |
|
"learning_rate": 8.74139626392986e-06, |
|
"loss": 0.305, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.1749049429657794, |
|
"grad_norm": 0.8607321690785082, |
|
"learning_rate": 8.708458120975436e-06, |
|
"loss": 0.3007, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.1768060836501901, |
|
"grad_norm": 0.7337026909841357, |
|
"learning_rate": 8.675534221811156e-06, |
|
"loss": 0.2888, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.1787072243346008, |
|
"grad_norm": 0.7420738957496412, |
|
"learning_rate": 8.64262492953876e-06, |
|
"loss": 0.292, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.1806083650190113, |
|
"grad_norm": 0.7658329152469303, |
|
"learning_rate": 8.60973060709891e-06, |
|
"loss": 0.3158, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.182509505703422, |
|
"grad_norm": 0.6977889118897987, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.2907, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.1844106463878328, |
|
"grad_norm": 0.7337642630477222, |
|
"learning_rate": 8.543988322649954e-06, |
|
"loss": 0.3118, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.1863117870722433, |
|
"grad_norm": 0.6925312820115364, |
|
"learning_rate": 8.511141085680684e-06, |
|
"loss": 0.2954, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.188212927756654, |
|
"grad_norm": 0.6946774043184807, |
|
"learning_rate": 8.478310268615612e-06, |
|
"loss": 0.302, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.1901140684410647, |
|
"grad_norm": 0.6768648494708821, |
|
"learning_rate": 8.445496233529934e-06, |
|
"loss": 0.2923, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.1920152091254752, |
|
"grad_norm": 0.707499524009905, |
|
"learning_rate": 8.41269934231375e-06, |
|
"loss": 0.307, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.193916349809886, |
|
"grad_norm": 0.6693483006844595, |
|
"learning_rate": 8.3799199566681e-06, |
|
"loss": 0.2835, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.1958174904942966, |
|
"grad_norm": 0.6756297972031414, |
|
"learning_rate": 8.34715843810096e-06, |
|
"loss": 0.2804, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.1977186311787071, |
|
"grad_norm": 0.7185793007102922, |
|
"learning_rate": 8.314415147923254e-06, |
|
"loss": 0.3013, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1996197718631179, |
|
"grad_norm": 0.6917942502617963, |
|
"learning_rate": 8.281690447244887e-06, |
|
"loss": 0.2901, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.2015209125475286, |
|
"grad_norm": 0.6922299225747153, |
|
"learning_rate": 8.248984696970732e-06, |
|
"loss": 0.2912, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.203422053231939, |
|
"grad_norm": 0.7215625334990756, |
|
"learning_rate": 8.216298257796677e-06, |
|
"loss": 0.3058, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.2053231939163498, |
|
"grad_norm": 0.7039222410582643, |
|
"learning_rate": 8.183631490205636e-06, |
|
"loss": 0.292, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.2072243346007605, |
|
"grad_norm": 0.7190859502492053, |
|
"learning_rate": 8.150984754463578e-06, |
|
"loss": 0.2948, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.209125475285171, |
|
"grad_norm": 0.7334102633794286, |
|
"learning_rate": 8.118358410615545e-06, |
|
"loss": 0.2811, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.2110266159695817, |
|
"grad_norm": 0.7643201091864527, |
|
"learning_rate": 8.08575281848169e-06, |
|
"loss": 0.2824, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.2129277566539924, |
|
"grad_norm": 0.774557710183706, |
|
"learning_rate": 8.0531683376533e-06, |
|
"loss": 0.2779, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.214828897338403, |
|
"grad_norm": 0.6902011960902502, |
|
"learning_rate": 8.020605327488846e-06, |
|
"loss": 0.2956, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.2167300380228137, |
|
"grad_norm": 0.7179399279179142, |
|
"learning_rate": 7.988064147110001e-06, |
|
"loss": 0.3033, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.2186311787072244, |
|
"grad_norm": 0.7016361082390848, |
|
"learning_rate": 7.955545155397684e-06, |
|
"loss": 0.2949, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.2205323193916349, |
|
"grad_norm": 0.703951517850484, |
|
"learning_rate": 7.923048710988119e-06, |
|
"loss": 0.2971, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.2224334600760456, |
|
"grad_norm": 0.7484726688095609, |
|
"learning_rate": 7.890575172268858e-06, |
|
"loss": 0.3055, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.2243346007604563, |
|
"grad_norm": 0.6904067527237151, |
|
"learning_rate": 7.858124897374837e-06, |
|
"loss": 0.2832, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.2262357414448668, |
|
"grad_norm": 0.7925625613106448, |
|
"learning_rate": 7.825698244184432e-06, |
|
"loss": 0.2889, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.2281368821292775, |
|
"grad_norm": 0.7082163840176895, |
|
"learning_rate": 7.7932955703155e-06, |
|
"loss": 0.2843, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.2300380228136882, |
|
"grad_norm": 0.7486971273363527, |
|
"learning_rate": 7.760917233121443e-06, |
|
"loss": 0.3093, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.231939163498099, |
|
"grad_norm": 0.6885731853188624, |
|
"learning_rate": 7.728563589687275e-06, |
|
"loss": 0.2835, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.2338403041825095, |
|
"grad_norm": 0.6929242766183379, |
|
"learning_rate": 7.696234996825663e-06, |
|
"loss": 0.2962, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.2357414448669202, |
|
"grad_norm": 0.6789650292657143, |
|
"learning_rate": 7.663931811073003e-06, |
|
"loss": 0.2919, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.2376425855513309, |
|
"grad_norm": 0.7371899002650942, |
|
"learning_rate": 7.631654388685496e-06, |
|
"loss": 0.2894, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.2395437262357414, |
|
"grad_norm": 0.7411950998073552, |
|
"learning_rate": 7.599403085635208e-06, |
|
"loss": 0.2963, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.241444866920152, |
|
"grad_norm": 0.6843358181587386, |
|
"learning_rate": 7.567178257606147e-06, |
|
"loss": 0.2942, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.2433460076045628, |
|
"grad_norm": 0.6836482839157375, |
|
"learning_rate": 7.534980259990341e-06, |
|
"loss": 0.2838, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.2452471482889733, |
|
"grad_norm": 0.7044286897986738, |
|
"learning_rate": 7.50280944788392e-06, |
|
"loss": 0.2863, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.247148288973384, |
|
"grad_norm": 0.7200508714497933, |
|
"learning_rate": 7.470666176083193e-06, |
|
"loss": 0.2999, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.2490494296577948, |
|
"grad_norm": 0.7326826806293245, |
|
"learning_rate": 7.438550799080746e-06, |
|
"loss": 0.2864, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.2509505703422052, |
|
"grad_norm": 0.6929056989440776, |
|
"learning_rate": 7.40646367106153e-06, |
|
"loss": 0.2922, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.252851711026616, |
|
"grad_norm": 0.6754973509672555, |
|
"learning_rate": 7.3744051458989395e-06, |
|
"loss": 0.2868, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.2547528517110267, |
|
"grad_norm": 0.6897474799851169, |
|
"learning_rate": 7.342375577150928e-06, |
|
"loss": 0.2909, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2566539923954372, |
|
"grad_norm": 0.7147415653676243, |
|
"learning_rate": 7.310375318056107e-06, |
|
"loss": 0.3053, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.258555133079848, |
|
"grad_norm": 0.7080936922840922, |
|
"learning_rate": 7.278404721529843e-06, |
|
"loss": 0.2968, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.2604562737642586, |
|
"grad_norm": 0.6755973914703102, |
|
"learning_rate": 7.246464140160365e-06, |
|
"loss": 0.2871, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.2623574144486693, |
|
"grad_norm": 0.75565239191557, |
|
"learning_rate": 7.214553926204884e-06, |
|
"loss": 0.2836, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.2642585551330798, |
|
"grad_norm": 0.7053307489953805, |
|
"learning_rate": 7.182674431585703e-06, |
|
"loss": 0.2928, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.2661596958174905, |
|
"grad_norm": 0.7672803521959862, |
|
"learning_rate": 7.150826007886334e-06, |
|
"loss": 0.2954, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.2680608365019013, |
|
"grad_norm": 0.7767735171999255, |
|
"learning_rate": 7.119009006347625e-06, |
|
"loss": 0.2814, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.2699619771863118, |
|
"grad_norm": 0.742984890915227, |
|
"learning_rate": 7.087223777863883e-06, |
|
"loss": 0.2922, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.2718631178707225, |
|
"grad_norm": 0.711514011396354, |
|
"learning_rate": 7.055470672979003e-06, |
|
"loss": 0.3058, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.2737642585551332, |
|
"grad_norm": 0.7325340265134281, |
|
"learning_rate": 7.023750041882609e-06, |
|
"loss": 0.2978, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2756653992395437, |
|
"grad_norm": 0.7465630777746666, |
|
"learning_rate": 6.992062234406185e-06, |
|
"loss": 0.2933, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.2775665399239544, |
|
"grad_norm": 0.689387074966669, |
|
"learning_rate": 6.960407600019217e-06, |
|
"loss": 0.2834, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.2794676806083651, |
|
"grad_norm": 0.7366198182791822, |
|
"learning_rate": 6.9287864878253475e-06, |
|
"loss": 0.3013, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.2813688212927756, |
|
"grad_norm": 0.7136083886736968, |
|
"learning_rate": 6.897199246558515e-06, |
|
"loss": 0.2873, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.2832699619771863, |
|
"grad_norm": 0.7592020918059184, |
|
"learning_rate": 6.865646224579108e-06, |
|
"loss": 0.2865, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.285171102661597, |
|
"grad_norm": 0.7112598768252201, |
|
"learning_rate": 6.834127769870134e-06, |
|
"loss": 0.2846, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.2870722433460076, |
|
"grad_norm": 0.7584893612586059, |
|
"learning_rate": 6.802644230033373e-06, |
|
"loss": 0.3095, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.2889733840304183, |
|
"grad_norm": 0.7977539723711234, |
|
"learning_rate": 6.771195952285541e-06, |
|
"loss": 0.2873, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.290874524714829, |
|
"grad_norm": 0.7676232332340589, |
|
"learning_rate": 6.739783283454469e-06, |
|
"loss": 0.2966, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.2927756653992395, |
|
"grad_norm": 0.6893092059340077, |
|
"learning_rate": 6.708406569975274e-06, |
|
"loss": 0.2738, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2946768060836502, |
|
"grad_norm": 0.7246494882694059, |
|
"learning_rate": 6.6770661578865444e-06, |
|
"loss": 0.2896, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.296577946768061, |
|
"grad_norm": 0.7073493095396471, |
|
"learning_rate": 6.645762392826509e-06, |
|
"loss": 0.3019, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.2984790874524714, |
|
"grad_norm": 0.7007386925363137, |
|
"learning_rate": 6.614495620029238e-06, |
|
"loss": 0.2922, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.3003802281368821, |
|
"grad_norm": 0.6917901925134944, |
|
"learning_rate": 6.583266184320836e-06, |
|
"loss": 0.2827, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.3022813688212929, |
|
"grad_norm": 0.7369661907640651, |
|
"learning_rate": 6.552074430115624e-06, |
|
"loss": 0.2981, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3041825095057034, |
|
"grad_norm": 0.6922292999782869, |
|
"learning_rate": 6.520920701412371e-06, |
|
"loss": 0.2755, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.306083650190114, |
|
"grad_norm": 0.7056760086531454, |
|
"learning_rate": 6.489805341790456e-06, |
|
"loss": 0.2773, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.3079847908745248, |
|
"grad_norm": 0.7434266181863763, |
|
"learning_rate": 6.458728694406124e-06, |
|
"loss": 0.2986, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.3098859315589353, |
|
"grad_norm": 0.687168409457717, |
|
"learning_rate": 6.427691101988673e-06, |
|
"loss": 0.2927, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.311787072243346, |
|
"grad_norm": 0.7282233391544028, |
|
"learning_rate": 6.396692906836686e-06, |
|
"loss": 0.2913, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.3136882129277567, |
|
"grad_norm": 0.6961182692812412, |
|
"learning_rate": 6.3657344508142495e-06, |
|
"loss": 0.2816, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.3155893536121672, |
|
"grad_norm": 0.6760834185922154, |
|
"learning_rate": 6.334816075347185e-06, |
|
"loss": 0.2849, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.317490494296578, |
|
"grad_norm": 0.7275066890851415, |
|
"learning_rate": 6.303938121419295e-06, |
|
"loss": 0.2808, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.3193916349809887, |
|
"grad_norm": 0.7354558968528108, |
|
"learning_rate": 6.273100929568579e-06, |
|
"loss": 0.289, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.3212927756653992, |
|
"grad_norm": 0.6948206887971208, |
|
"learning_rate": 6.242304839883502e-06, |
|
"loss": 0.294, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.3231939163498099, |
|
"grad_norm": 0.7047087277063151, |
|
"learning_rate": 6.211550191999223e-06, |
|
"loss": 0.2932, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.3250950570342206, |
|
"grad_norm": 0.7558936018582135, |
|
"learning_rate": 6.18083732509387e-06, |
|
"loss": 0.3002, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.326996197718631, |
|
"grad_norm": 0.7273211841524094, |
|
"learning_rate": 6.150166577884781e-06, |
|
"loss": 0.286, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.3288973384030418, |
|
"grad_norm": 0.7205144056055376, |
|
"learning_rate": 6.119538288624778e-06, |
|
"loss": 0.2849, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.3307984790874525, |
|
"grad_norm": 0.7245637955873974, |
|
"learning_rate": 6.088952795098442e-06, |
|
"loss": 0.2871, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.332699619771863, |
|
"grad_norm": 0.7456434412563985, |
|
"learning_rate": 6.058410434618367e-06, |
|
"loss": 0.2865, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.3346007604562737, |
|
"grad_norm": 0.6755876877519845, |
|
"learning_rate": 6.027911544021465e-06, |
|
"loss": 0.2771, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.3365019011406845, |
|
"grad_norm": 0.683988241876236, |
|
"learning_rate": 5.997456459665237e-06, |
|
"loss": 0.2905, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.338403041825095, |
|
"grad_norm": 0.7099361523830153, |
|
"learning_rate": 5.967045517424062e-06, |
|
"loss": 0.2874, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.3403041825095057, |
|
"grad_norm": 0.7815235818349345, |
|
"learning_rate": 5.936679052685505e-06, |
|
"loss": 0.2866, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.3422053231939164, |
|
"grad_norm": 0.7752856826992063, |
|
"learning_rate": 5.906357400346596e-06, |
|
"loss": 0.297, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.3441064638783269, |
|
"grad_norm": 0.7234830739038847, |
|
"learning_rate": 5.876080894810167e-06, |
|
"loss": 0.2933, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.3460076045627376, |
|
"grad_norm": 0.9255515792091867, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.2933, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.3479087452471483, |
|
"grad_norm": 0.666046326625046, |
|
"learning_rate": 5.815664659262845e-06, |
|
"loss": 0.2904, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.3498098859315588, |
|
"grad_norm": 0.7364163343078383, |
|
"learning_rate": 5.78552559555337e-06, |
|
"loss": 0.2867, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.3517110266159695, |
|
"grad_norm": 0.6726645429514998, |
|
"learning_rate": 5.755433011241851e-06, |
|
"loss": 0.2762, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.3536121673003803, |
|
"grad_norm": 0.7059335896205419, |
|
"learning_rate": 5.725387238204831e-06, |
|
"loss": 0.2968, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.3555133079847907, |
|
"grad_norm": 0.705725744890944, |
|
"learning_rate": 5.695388607802603e-06, |
|
"loss": 0.3011, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.3574144486692015, |
|
"grad_norm": 0.726735540484456, |
|
"learning_rate": 5.665437450875534e-06, |
|
"loss": 0.287, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.3593155893536122, |
|
"grad_norm": 0.6840462972087888, |
|
"learning_rate": 5.635534097740435e-06, |
|
"loss": 0.2904, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.3612167300380227, |
|
"grad_norm": 0.7166382524728736, |
|
"learning_rate": 5.605678878186911e-06, |
|
"loss": 0.289, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.3631178707224334, |
|
"grad_norm": 0.6751991675844237, |
|
"learning_rate": 5.575872121473722e-06, |
|
"loss": 0.2777, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.3650190114068441, |
|
"grad_norm": 0.7585194497054609, |
|
"learning_rate": 5.546114156325166e-06, |
|
"loss": 0.2858, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.3669201520912546, |
|
"grad_norm": 0.6801020689899001, |
|
"learning_rate": 5.516405310927431e-06, |
|
"loss": 0.2811, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.3688212927756653, |
|
"grad_norm": 0.680530139917202, |
|
"learning_rate": 5.4867459129249846e-06, |
|
"loss": 0.284, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.370722433460076, |
|
"grad_norm": 0.7120069344474718, |
|
"learning_rate": 5.4571362894169795e-06, |
|
"loss": 0.2939, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.3726235741444868, |
|
"grad_norm": 0.7423374877845463, |
|
"learning_rate": 5.427576766953615e-06, |
|
"loss": 0.288, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.3745247148288973, |
|
"grad_norm": 0.7218822981199374, |
|
"learning_rate": 5.398067671532554e-06, |
|
"loss": 0.2901, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.376425855513308, |
|
"grad_norm": 0.6735131115089157, |
|
"learning_rate": 5.368609328595323e-06, |
|
"loss": 0.2828, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.3783269961977187, |
|
"grad_norm": 0.6879069283917294, |
|
"learning_rate": 5.339202063023727e-06, |
|
"loss": 0.2786, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.3802281368821292, |
|
"grad_norm": 0.6924030127198797, |
|
"learning_rate": 5.309846199136258e-06, |
|
"loss": 0.28, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.38212927756654, |
|
"grad_norm": 0.7095484592211323, |
|
"learning_rate": 5.280542060684535e-06, |
|
"loss": 0.2873, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.3840304182509506, |
|
"grad_norm": 0.7279589483349861, |
|
"learning_rate": 5.2512899708497086e-06, |
|
"loss": 0.2922, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.3859315589353614, |
|
"grad_norm": 0.6812029663780734, |
|
"learning_rate": 5.222090252238916e-06, |
|
"loss": 0.2757, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.3878326996197718, |
|
"grad_norm": 0.6910962379153492, |
|
"learning_rate": 5.192943226881724e-06, |
|
"loss": 0.2745, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3897338403041826, |
|
"grad_norm": 0.7532695913142263, |
|
"learning_rate": 5.163849216226562e-06, |
|
"loss": 0.2953, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.3916349809885933, |
|
"grad_norm": 0.7092334688343299, |
|
"learning_rate": 5.134808541137183e-06, |
|
"loss": 0.2863, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.3935361216730038, |
|
"grad_norm": 0.7265062119412851, |
|
"learning_rate": 5.105821521889147e-06, |
|
"loss": 0.2991, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.3954372623574145, |
|
"grad_norm": 0.7138008594382279, |
|
"learning_rate": 5.076888478166247e-06, |
|
"loss": 0.2915, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.3973384030418252, |
|
"grad_norm": 0.7288896607533202, |
|
"learning_rate": 5.048009729057012e-06, |
|
"loss": 0.2996, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.3992395437262357, |
|
"grad_norm": 0.7475825325755282, |
|
"learning_rate": 5.0191855930511946e-06, |
|
"loss": 0.2983, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.4011406844106464, |
|
"grad_norm": 0.7141287075640784, |
|
"learning_rate": 4.990416388036233e-06, |
|
"loss": 0.2834, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.4030418250950571, |
|
"grad_norm": 0.7364723630267682, |
|
"learning_rate": 4.961702431293759e-06, |
|
"loss": 0.2985, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.4049429657794676, |
|
"grad_norm": 0.7264122514305142, |
|
"learning_rate": 4.933044039496107e-06, |
|
"loss": 0.2813, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.4068441064638784, |
|
"grad_norm": 0.673044585290331, |
|
"learning_rate": 4.904441528702806e-06, |
|
"loss": 0.2793, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.408745247148289, |
|
"grad_norm": 0.7388415667669372, |
|
"learning_rate": 4.875895214357093e-06, |
|
"loss": 0.2887, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.4106463878326996, |
|
"grad_norm": 0.7683401629002008, |
|
"learning_rate": 4.847405411282462e-06, |
|
"loss": 0.3025, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.4125475285171103, |
|
"grad_norm": 0.7170938336542718, |
|
"learning_rate": 4.818972433679145e-06, |
|
"loss": 0.285, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.414448669201521, |
|
"grad_norm": 0.7158403517137417, |
|
"learning_rate": 4.790596595120699e-06, |
|
"loss": 0.2801, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.4163498098859315, |
|
"grad_norm": 0.649447466407859, |
|
"learning_rate": 4.762278208550505e-06, |
|
"loss": 0.2689, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.4182509505703422, |
|
"grad_norm": 0.7082614566074693, |
|
"learning_rate": 4.734017586278337e-06, |
|
"loss": 0.2844, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.420152091254753, |
|
"grad_norm": 0.7085031748073999, |
|
"learning_rate": 4.7058150399769245e-06, |
|
"loss": 0.2906, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.4220532319391634, |
|
"grad_norm": 0.6732158714717617, |
|
"learning_rate": 4.677670880678493e-06, |
|
"loss": 0.2855, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.4239543726235742, |
|
"grad_norm": 0.6984936344041431, |
|
"learning_rate": 4.649585418771348e-06, |
|
"loss": 0.2796, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.4258555133079849, |
|
"grad_norm": 0.7338928129777204, |
|
"learning_rate": 4.621558963996458e-06, |
|
"loss": 0.2902, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.4277566539923954, |
|
"grad_norm": 0.6773644133879478, |
|
"learning_rate": 4.593591825444028e-06, |
|
"loss": 0.2784, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.429657794676806, |
|
"grad_norm": 0.7142651502999355, |
|
"learning_rate": 4.565684311550077e-06, |
|
"loss": 0.2789, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.4315589353612168, |
|
"grad_norm": 0.7078564802768612, |
|
"learning_rate": 4.537836730093077e-06, |
|
"loss": 0.2931, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.4334600760456273, |
|
"grad_norm": 0.6625652539973749, |
|
"learning_rate": 4.510049388190518e-06, |
|
"loss": 0.2896, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.435361216730038, |
|
"grad_norm": 0.6765165356102307, |
|
"learning_rate": 4.482322592295541e-06, |
|
"loss": 0.2835, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.4372623574144487, |
|
"grad_norm": 0.7166068338877377, |
|
"learning_rate": 4.454656648193559e-06, |
|
"loss": 0.2729, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.4391634980988592, |
|
"grad_norm": 0.6723192572008214, |
|
"learning_rate": 4.427051860998877e-06, |
|
"loss": 0.2803, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.44106463878327, |
|
"grad_norm": 0.6384592830220657, |
|
"learning_rate": 4.399508535151321e-06, |
|
"loss": 0.2701, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.4429657794676807, |
|
"grad_norm": 0.7055705261507379, |
|
"learning_rate": 4.372026974412907e-06, |
|
"loss": 0.284, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.4448669201520912, |
|
"grad_norm": 0.7358982346816677, |
|
"learning_rate": 4.344607481864466e-06, |
|
"loss": 0.2977, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.446768060836502, |
|
"grad_norm": 0.7124418132493656, |
|
"learning_rate": 4.317250359902295e-06, |
|
"loss": 0.2891, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.4486692015209126, |
|
"grad_norm": 0.7158845956833674, |
|
"learning_rate": 4.2899559102348585e-06, |
|
"loss": 0.2914, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.450570342205323, |
|
"grad_norm": 0.7182051849255634, |
|
"learning_rate": 4.262724433879427e-06, |
|
"loss": 0.2826, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.4524714828897338, |
|
"grad_norm": 0.7784690277833876, |
|
"learning_rate": 4.235556231158765e-06, |
|
"loss": 0.2919, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.4543726235741445, |
|
"grad_norm": 0.7023453496978478, |
|
"learning_rate": 4.208451601697836e-06, |
|
"loss": 0.2805, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.456273764258555, |
|
"grad_norm": 0.6788665299316395, |
|
"learning_rate": 4.181410844420473e-06, |
|
"loss": 0.29, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.4581749049429658, |
|
"grad_norm": 0.7817956027920046, |
|
"learning_rate": 4.154434257546095e-06, |
|
"loss": 0.2798, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.4600760456273765, |
|
"grad_norm": 0.7501150341927527, |
|
"learning_rate": 4.127522138586424e-06, |
|
"loss": 0.2769, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.461977186311787, |
|
"grad_norm": 0.6871700574697513, |
|
"learning_rate": 4.10067478434219e-06, |
|
"loss": 0.2792, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.4638783269961977, |
|
"grad_norm": 0.6919539570178334, |
|
"learning_rate": 4.073892490899865e-06, |
|
"loss": 0.2793, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.4657794676806084, |
|
"grad_norm": 0.6988102328053417, |
|
"learning_rate": 4.047175553628397e-06, |
|
"loss": 0.2804, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.467680608365019, |
|
"grad_norm": 0.713755478290317, |
|
"learning_rate": 4.020524267175954e-06, |
|
"loss": 0.2886, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.4695817490494296, |
|
"grad_norm": 0.6885537137147855, |
|
"learning_rate": 3.993938925466674e-06, |
|
"loss": 0.2752, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.4714828897338403, |
|
"grad_norm": 0.68133424767042, |
|
"learning_rate": 3.96741982169742e-06, |
|
"loss": 0.2868, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.4733840304182508, |
|
"grad_norm": 0.7283991398915899, |
|
"learning_rate": 3.9409672483345465e-06, |
|
"loss": 0.279, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.4752851711026616, |
|
"grad_norm": 0.7407407654354737, |
|
"learning_rate": 3.914581497110684e-06, |
|
"loss": 0.2806, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.4771863117870723, |
|
"grad_norm": 0.7227652333140067, |
|
"learning_rate": 3.888262859021508e-06, |
|
"loss": 0.2756, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.4790874524714828, |
|
"grad_norm": 0.7012898936512341, |
|
"learning_rate": 3.862011624322534e-06, |
|
"loss": 0.2871, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.4809885931558935, |
|
"grad_norm": 0.7217796654034508, |
|
"learning_rate": 3.835828082525925e-06, |
|
"loss": 0.2929, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.4828897338403042, |
|
"grad_norm": 0.7111580824215556, |
|
"learning_rate": 3.8097125223972864e-06, |
|
"loss": 0.2839, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.4847908745247147, |
|
"grad_norm": 0.7100534623335487, |
|
"learning_rate": 3.7836652319524835e-06, |
|
"loss": 0.2927, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.4866920152091254, |
|
"grad_norm": 0.7324444142897687, |
|
"learning_rate": 3.7576864984544814e-06, |
|
"loss": 0.294, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.4885931558935361, |
|
"grad_norm": 0.7158797850703152, |
|
"learning_rate": 3.73177660841015e-06, |
|
"loss": 0.3031, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.4904942965779466, |
|
"grad_norm": 0.702168210462647, |
|
"learning_rate": 3.7059358475671225e-06, |
|
"loss": 0.2787, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.4923954372623573, |
|
"grad_norm": 0.7088407589813892, |
|
"learning_rate": 3.680164500910646e-06, |
|
"loss": 0.2786, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.494296577946768, |
|
"grad_norm": 0.8644030284119277, |
|
"learning_rate": 3.654462852660423e-06, |
|
"loss": 0.2823, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.4961977186311788, |
|
"grad_norm": 0.6952639368659915, |
|
"learning_rate": 3.6288311862674885e-06, |
|
"loss": 0.2823, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.4980988593155893, |
|
"grad_norm": 0.7096763794742813, |
|
"learning_rate": 3.6032697844110896e-06, |
|
"loss": 0.2934, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.6695426045719942, |
|
"learning_rate": 3.5777789289955454e-06, |
|
"loss": 0.2784, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.5019011406844105, |
|
"grad_norm": 0.6524566363145925, |
|
"learning_rate": 3.5523589011471592e-06, |
|
"loss": 0.2699, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5038022813688214, |
|
"grad_norm": 0.7532424129615138, |
|
"learning_rate": 3.527009981211119e-06, |
|
"loss": 0.2929, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.505703422053232, |
|
"grad_norm": 0.681946779446385, |
|
"learning_rate": 3.5017324487483873e-06, |
|
"loss": 0.2805, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.5076045627376424, |
|
"grad_norm": 0.6903860675980712, |
|
"learning_rate": 3.47652658253263e-06, |
|
"loss": 0.2846, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.5095057034220534, |
|
"grad_norm": 0.7017392608830051, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.2832, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.5114068441064639, |
|
"grad_norm": 0.6995603939424366, |
|
"learning_rate": 3.4263309599818017e-06, |
|
"loss": 0.2776, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.5133079847908744, |
|
"grad_norm": 0.7174754922676291, |
|
"learning_rate": 3.4013417572299446e-06, |
|
"loss": 0.2759, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.5152091254752853, |
|
"grad_norm": 0.7064539732128455, |
|
"learning_rate": 3.37642532788541e-06, |
|
"loss": 0.2713, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.5171102661596958, |
|
"grad_norm": 0.7418356973993784, |
|
"learning_rate": 3.3515819467394184e-06, |
|
"loss": 0.2945, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.5190114068441065, |
|
"grad_norm": 0.6988116146786837, |
|
"learning_rate": 3.326811887777607e-06, |
|
"loss": 0.295, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.5209125475285172, |
|
"grad_norm": 0.7017162668187764, |
|
"learning_rate": 3.3021154241769606e-06, |
|
"loss": 0.2894, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.5228136882129277, |
|
"grad_norm": 0.7237575766380469, |
|
"learning_rate": 3.2774928283028153e-06, |
|
"loss": 0.2843, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.5247148288973384, |
|
"grad_norm": 0.7235878172992064, |
|
"learning_rate": 3.2529443717058693e-06, |
|
"loss": 0.2897, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.5266159695817492, |
|
"grad_norm": 0.692253906482968, |
|
"learning_rate": 3.228470325119164e-06, |
|
"loss": 0.2806, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.5285171102661597, |
|
"grad_norm": 0.6956965527154768, |
|
"learning_rate": 3.20407095845511e-06, |
|
"loss": 0.2753, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.5304182509505704, |
|
"grad_norm": 0.7506609358953361, |
|
"learning_rate": 3.179746540802506e-06, |
|
"loss": 0.2887, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.532319391634981, |
|
"grad_norm": 0.6857963089357372, |
|
"learning_rate": 3.155497340423588e-06, |
|
"loss": 0.2762, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.5342205323193916, |
|
"grad_norm": 0.7186855429598759, |
|
"learning_rate": 3.1313236247510414e-06, |
|
"loss": 0.2919, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.5361216730038023, |
|
"grad_norm": 0.7857081103211361, |
|
"learning_rate": 3.107225660385077e-06, |
|
"loss": 0.3134, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.538022813688213, |
|
"grad_norm": 0.7220170069663582, |
|
"learning_rate": 3.0832037130904748e-06, |
|
"loss": 0.2913, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.5399239543726235, |
|
"grad_norm": 0.7613384595428253, |
|
"learning_rate": 3.0592580477936606e-06, |
|
"loss": 0.2808, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.5418250950570342, |
|
"grad_norm": 0.7333226548617051, |
|
"learning_rate": 3.035388928579792e-06, |
|
"loss": 0.2873, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.543726235741445, |
|
"grad_norm": 0.692625551876295, |
|
"learning_rate": 3.011596618689825e-06, |
|
"loss": 0.2841, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.5456273764258555, |
|
"grad_norm": 0.696623210581126, |
|
"learning_rate": 2.9878813805176252e-06, |
|
"loss": 0.2696, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.5475285171102662, |
|
"grad_norm": 0.7282669107396988, |
|
"learning_rate": 2.9642434756070793e-06, |
|
"loss": 0.2771, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.549429657794677, |
|
"grad_norm": 0.6877414577334098, |
|
"learning_rate": 2.940683164649194e-06, |
|
"loss": 0.2836, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.5513307984790874, |
|
"grad_norm": 0.6952013837873315, |
|
"learning_rate": 2.9172007074792342e-06, |
|
"loss": 0.2754, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.553231939163498, |
|
"grad_norm": 0.6787420113133055, |
|
"learning_rate": 2.8937963630738517e-06, |
|
"loss": 0.2625, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.5551330798479088, |
|
"grad_norm": 0.6922945134021774, |
|
"learning_rate": 2.87047038954823e-06, |
|
"loss": 0.2911, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.5570342205323193, |
|
"grad_norm": 0.8402896970167123, |
|
"learning_rate": 2.8472230441532365e-06, |
|
"loss": 0.2703, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.55893536121673, |
|
"grad_norm": 0.6678746930340207, |
|
"learning_rate": 2.8240545832725963e-06, |
|
"loss": 0.2695, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.5608365019011408, |
|
"grad_norm": 0.705078676312885, |
|
"learning_rate": 2.8009652624200436e-06, |
|
"loss": 0.2825, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.5627376425855513, |
|
"grad_norm": 0.6677130225262102, |
|
"learning_rate": 2.7779553362365184e-06, |
|
"loss": 0.2613, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.564638783269962, |
|
"grad_norm": 0.7412097663609457, |
|
"learning_rate": 2.755025058487364e-06, |
|
"loss": 0.2884, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.5665399239543727, |
|
"grad_norm": 0.7406514147916663, |
|
"learning_rate": 2.7321746820595084e-06, |
|
"loss": 0.2843, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.5684410646387832, |
|
"grad_norm": 0.7676929694787699, |
|
"learning_rate": 2.709404458958693e-06, |
|
"loss": 0.3027, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.570342205323194, |
|
"grad_norm": 0.7081642929814832, |
|
"learning_rate": 2.6867146403066833e-06, |
|
"loss": 0.2777, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.5722433460076046, |
|
"grad_norm": 0.6897624695748474, |
|
"learning_rate": 2.6641054763385044e-06, |
|
"loss": 0.2766, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.5741444866920151, |
|
"grad_norm": 0.7230487976603425, |
|
"learning_rate": 2.6415772163996845e-06, |
|
"loss": 0.2856, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.5760456273764258, |
|
"grad_norm": 0.8022793519023697, |
|
"learning_rate": 2.619130108943494e-06, |
|
"loss": 0.3006, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.5779467680608366, |
|
"grad_norm": 0.6924872584474147, |
|
"learning_rate": 2.5967644015282146e-06, |
|
"loss": 0.2647, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.579847908745247, |
|
"grad_norm": 0.7422641131977431, |
|
"learning_rate": 2.5744803408144026e-06, |
|
"loss": 0.2843, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.5817490494296578, |
|
"grad_norm": 0.7068176008466083, |
|
"learning_rate": 2.5522781725621814e-06, |
|
"loss": 0.2945, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.5836501901140685, |
|
"grad_norm": 0.685027307753865, |
|
"learning_rate": 2.530158141628515e-06, |
|
"loss": 0.2746, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.585551330798479, |
|
"grad_norm": 0.6945132808889379, |
|
"learning_rate": 2.508120491964512e-06, |
|
"loss": 0.285, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.5874524714828897, |
|
"grad_norm": 0.6941931524929925, |
|
"learning_rate": 2.486165466612751e-06, |
|
"loss": 0.2882, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.5893536121673004, |
|
"grad_norm": 0.6868828496922105, |
|
"learning_rate": 2.464293307704566e-06, |
|
"loss": 0.2757, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.591254752851711, |
|
"grad_norm": 0.6880910265615507, |
|
"learning_rate": 2.4425042564574186e-06, |
|
"loss": 0.2762, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.5931558935361216, |
|
"grad_norm": 0.6676074344861813, |
|
"learning_rate": 2.4207985531722034e-06, |
|
"loss": 0.276, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.5950570342205324, |
|
"grad_norm": 0.7059312393161081, |
|
"learning_rate": 2.3991764372306113e-06, |
|
"loss": 0.2816, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.5969581749049429, |
|
"grad_norm": 0.630962071331813, |
|
"learning_rate": 2.377638147092497e-06, |
|
"loss": 0.2641, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5988593155893536, |
|
"grad_norm": 0.7035563239120735, |
|
"learning_rate": 2.3561839202932344e-06, |
|
"loss": 0.277, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.6007604562737643, |
|
"grad_norm": 0.7083571423625011, |
|
"learning_rate": 2.3348139934411008e-06, |
|
"loss": 0.2835, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.6026615969581748, |
|
"grad_norm": 0.6631571631543133, |
|
"learning_rate": 2.3135286022146785e-06, |
|
"loss": 0.2815, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.6045627376425855, |
|
"grad_norm": 0.6388015083937685, |
|
"learning_rate": 2.292327981360245e-06, |
|
"loss": 0.2573, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.6064638783269962, |
|
"grad_norm": 0.715955776983333, |
|
"learning_rate": 2.271212364689176e-06, |
|
"loss": 0.2774, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.6083650190114067, |
|
"grad_norm": 0.711586708269853, |
|
"learning_rate": 2.2501819850753925e-06, |
|
"loss": 0.2885, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.6102661596958177, |
|
"grad_norm": 0.7393571861519403, |
|
"learning_rate": 2.229237074452768e-06, |
|
"loss": 0.2938, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.6121673003802282, |
|
"grad_norm": 0.6554387256904496, |
|
"learning_rate": 2.2083778638125796e-06, |
|
"loss": 0.274, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.6140684410646386, |
|
"grad_norm": 0.7014285964200045, |
|
"learning_rate": 2.1876045832009694e-06, |
|
"loss": 0.27, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.6159695817490496, |
|
"grad_norm": 0.6924990428725116, |
|
"learning_rate": 2.16691746171639e-06, |
|
"loss": 0.2904, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.61787072243346, |
|
"grad_norm": 0.6916299465747496, |
|
"learning_rate": 2.1463167275070863e-06, |
|
"loss": 0.2791, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.6197718631178706, |
|
"grad_norm": 0.6887309552468869, |
|
"learning_rate": 2.125802607768588e-06, |
|
"loss": 0.2705, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.6216730038022815, |
|
"grad_norm": 0.706954336046035, |
|
"learning_rate": 2.1053753287411895e-06, |
|
"loss": 0.2775, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.623574144486692, |
|
"grad_norm": 0.698665188650369, |
|
"learning_rate": 2.08503511570746e-06, |
|
"loss": 0.2847, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.6254752851711025, |
|
"grad_norm": 0.6584186947310746, |
|
"learning_rate": 2.064782192989765e-06, |
|
"loss": 0.2686, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.6273764258555135, |
|
"grad_norm": 0.6969233044708365, |
|
"learning_rate": 2.0446167839477815e-06, |
|
"loss": 0.2719, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.629277566539924, |
|
"grad_norm": 0.7144170802033729, |
|
"learning_rate": 2.0245391109760437e-06, |
|
"loss": 0.285, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.6311787072243344, |
|
"grad_norm": 0.6953686650834182, |
|
"learning_rate": 2.0045493955014915e-06, |
|
"loss": 0.2679, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.6330798479087454, |
|
"grad_norm": 0.6990107420847762, |
|
"learning_rate": 1.984647857981017e-06, |
|
"loss": 0.2852, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.6349809885931559, |
|
"grad_norm": 0.7331158331376239, |
|
"learning_rate": 1.96483471789904e-06, |
|
"loss": 0.2902, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.6368821292775664, |
|
"grad_norm": 0.7131005917254934, |
|
"learning_rate": 1.9451101937650963e-06, |
|
"loss": 0.2816, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.6387832699619773, |
|
"grad_norm": 0.7251317144048253, |
|
"learning_rate": 1.925474503111412e-06, |
|
"loss": 0.2874, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.6406844106463878, |
|
"grad_norm": 0.7088245879322422, |
|
"learning_rate": 1.905927862490512e-06, |
|
"loss": 0.2954, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.6425855513307985, |
|
"grad_norm": 0.701752862243773, |
|
"learning_rate": 1.8864704874728346e-06, |
|
"loss": 0.2734, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.6444866920152093, |
|
"grad_norm": 0.7362344870791694, |
|
"learning_rate": 1.8671025926443464e-06, |
|
"loss": 0.2863, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.6463878326996197, |
|
"grad_norm": 0.7021293700111677, |
|
"learning_rate": 1.8478243916041882e-06, |
|
"loss": 0.2716, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.6482889733840305, |
|
"grad_norm": 0.7356801901576552, |
|
"learning_rate": 1.828636096962304e-06, |
|
"loss": 0.2746, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.6501901140684412, |
|
"grad_norm": 0.6870830602302619, |
|
"learning_rate": 1.8095379203371044e-06, |
|
"loss": 0.2822, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.6520912547528517, |
|
"grad_norm": 0.6979492816520024, |
|
"learning_rate": 1.7905300723531393e-06, |
|
"loss": 0.2754, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.6539923954372624, |
|
"grad_norm": 0.7065837476677297, |
|
"learning_rate": 1.771612762638758e-06, |
|
"loss": 0.2816, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.6558935361216731, |
|
"grad_norm": 0.7444380019012486, |
|
"learning_rate": 1.7527861998238094e-06, |
|
"loss": 0.2795, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.6577946768060836, |
|
"grad_norm": 0.686869282932523, |
|
"learning_rate": 1.7340505915373495e-06, |
|
"loss": 0.2693, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.6596958174904943, |
|
"grad_norm": 0.7112165686359406, |
|
"learning_rate": 1.7154061444053239e-06, |
|
"loss": 0.2863, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.661596958174905, |
|
"grad_norm": 0.7040250455743816, |
|
"learning_rate": 1.6968530640483126e-06, |
|
"loss": 0.2837, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.6634980988593155, |
|
"grad_norm": 0.7086799345623529, |
|
"learning_rate": 1.6783915550792652e-06, |
|
"loss": 0.276, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.6653992395437263, |
|
"grad_norm": 0.7269400961962234, |
|
"learning_rate": 1.660021821101222e-06, |
|
"loss": 0.2903, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.667300380228137, |
|
"grad_norm": 0.6533572900538692, |
|
"learning_rate": 1.6417440647050853e-06, |
|
"loss": 0.2695, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.6692015209125475, |
|
"grad_norm": 0.7040682654486197, |
|
"learning_rate": 1.6235584874673848e-06, |
|
"loss": 0.2697, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.6711026615969582, |
|
"grad_norm": 0.6967149920205667, |
|
"learning_rate": 1.6054652899480472e-06, |
|
"loss": 0.2625, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.673003802281369, |
|
"grad_norm": 0.6683823969010846, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.2588, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6749049429657794, |
|
"grad_norm": 0.6988335123776637, |
|
"learning_rate": 1.5695568312079156e-06, |
|
"loss": 0.2752, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.6768060836501901, |
|
"grad_norm": 0.7159400201149252, |
|
"learning_rate": 1.5517419660041277e-06, |
|
"loss": 0.2655, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.6787072243346008, |
|
"grad_norm": 0.7191911719471372, |
|
"learning_rate": 1.534020272548349e-06, |
|
"loss": 0.2876, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.6806083650190113, |
|
"grad_norm": 0.7199125152529207, |
|
"learning_rate": 1.5163919462845622e-06, |
|
"loss": 0.2826, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.682509505703422, |
|
"grad_norm": 0.7246025443648172, |
|
"learning_rate": 1.4988571816270402e-06, |
|
"loss": 0.2934, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.6844106463878328, |
|
"grad_norm": 0.7370028183061983, |
|
"learning_rate": 1.4814161719582132e-06, |
|
"loss": 0.2798, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.6863117870722433, |
|
"grad_norm": 0.7266892496508501, |
|
"learning_rate": 1.4640691096265358e-06, |
|
"loss": 0.277, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.688212927756654, |
|
"grad_norm": 0.6873680993908002, |
|
"learning_rate": 1.4468161859443609e-06, |
|
"loss": 0.2739, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.6901140684410647, |
|
"grad_norm": 0.6703885359620109, |
|
"learning_rate": 1.4296575911858268e-06, |
|
"loss": 0.2592, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.6920152091254752, |
|
"grad_norm": 0.6922176530568109, |
|
"learning_rate": 1.412593514584777e-06, |
|
"loss": 0.2772, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.693916349809886, |
|
"grad_norm": 0.7288533894027804, |
|
"learning_rate": 1.3956241443326423e-06, |
|
"loss": 0.2781, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.6958174904942966, |
|
"grad_norm": 0.7254147952557574, |
|
"learning_rate": 1.378749667576399e-06, |
|
"loss": 0.2868, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.6977186311787071, |
|
"grad_norm": 0.678166963002887, |
|
"learning_rate": 1.3619702704164783e-06, |
|
"loss": 0.2708, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.6996197718631179, |
|
"grad_norm": 0.7245499335550314, |
|
"learning_rate": 1.3452861379047289e-06, |
|
"loss": 0.2801, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.7015209125475286, |
|
"grad_norm": 0.7022238999485703, |
|
"learning_rate": 1.3286974540423747e-06, |
|
"loss": 0.2758, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.703422053231939, |
|
"grad_norm": 0.7200323997934382, |
|
"learning_rate": 1.3122044017779768e-06, |
|
"loss": 0.2873, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.7053231939163498, |
|
"grad_norm": 0.6857166277134318, |
|
"learning_rate": 1.2958071630054214e-06, |
|
"loss": 0.2732, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.7072243346007605, |
|
"grad_norm": 0.7163910985950618, |
|
"learning_rate": 1.279505918561923e-06, |
|
"loss": 0.2916, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.709125475285171, |
|
"grad_norm": 0.7542787111966438, |
|
"learning_rate": 1.2633008482260146e-06, |
|
"loss": 0.2842, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.7110266159695817, |
|
"grad_norm": 0.726413975854438, |
|
"learning_rate": 1.2471921307155655e-06, |
|
"loss": 0.2761, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.7129277566539924, |
|
"grad_norm": 0.6987579133055972, |
|
"learning_rate": 1.2311799436858275e-06, |
|
"loss": 0.2799, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.714828897338403, |
|
"grad_norm": 0.6885924970107223, |
|
"learning_rate": 1.2152644637274603e-06, |
|
"loss": 0.2684, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.7167300380228137, |
|
"grad_norm": 0.6988132337614955, |
|
"learning_rate": 1.1994458663645836e-06, |
|
"loss": 0.2759, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.7186311787072244, |
|
"grad_norm": 0.7309714586927603, |
|
"learning_rate": 1.1837243260528542e-06, |
|
"loss": 0.2751, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.7205323193916349, |
|
"grad_norm": 0.76461243677071, |
|
"learning_rate": 1.168100016177528e-06, |
|
"loss": 0.2694, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.7224334600760456, |
|
"grad_norm": 0.7742088444378231, |
|
"learning_rate": 1.1525731090515536e-06, |
|
"loss": 0.2799, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.7243346007604563, |
|
"grad_norm": 0.7322199982287021, |
|
"learning_rate": 1.137143775913675e-06, |
|
"loss": 0.2767, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.7262357414448668, |
|
"grad_norm": 0.6860769792038816, |
|
"learning_rate": 1.1218121869265365e-06, |
|
"loss": 0.2705, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.7281368821292775, |
|
"grad_norm": 0.6448650288419475, |
|
"learning_rate": 1.1065785111748117e-06, |
|
"loss": 0.2606, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.7300380228136882, |
|
"grad_norm": 0.6707110996002345, |
|
"learning_rate": 1.0914429166633355e-06, |
|
"loss": 0.2754, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.7319391634980987, |
|
"grad_norm": 0.722794817855145, |
|
"learning_rate": 1.076405570315252e-06, |
|
"loss": 0.2795, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.7338403041825095, |
|
"grad_norm": 0.709790203721322, |
|
"learning_rate": 1.0614666379701732e-06, |
|
"loss": 0.2777, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.7357414448669202, |
|
"grad_norm": 0.7314261458203519, |
|
"learning_rate": 1.046626284382356e-06, |
|
"loss": 0.2755, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.7376425855513307, |
|
"grad_norm": 0.7144173568357892, |
|
"learning_rate": 1.0318846732188737e-06, |
|
"loss": 0.2741, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.7395437262357416, |
|
"grad_norm": 0.7091709277961338, |
|
"learning_rate": 1.017241967057816e-06, |
|
"loss": 0.2761, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.741444866920152, |
|
"grad_norm": 0.6816043757736597, |
|
"learning_rate": 1.0026983273865055e-06, |
|
"loss": 0.274, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.7433460076045626, |
|
"grad_norm": 0.6870586809823216, |
|
"learning_rate": 9.882539145997027e-07, |
|
"loss": 0.2749, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.7452471482889735, |
|
"grad_norm": 0.7063464281048318, |
|
"learning_rate": 9.739088879978409e-07, |
|
"loss": 0.2775, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.747148288973384, |
|
"grad_norm": 0.6836699610582385, |
|
"learning_rate": 9.59663405785277e-07, |
|
"loss": 0.2602, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.7490494296577945, |
|
"grad_norm": 0.7176367294846602, |
|
"learning_rate": 9.455176250685338e-07, |
|
"loss": 0.2804, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.7509505703422055, |
|
"grad_norm": 0.6884051954183766, |
|
"learning_rate": 9.314717018545838e-07, |
|
"loss": 0.2698, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.752851711026616, |
|
"grad_norm": 0.6867023937088869, |
|
"learning_rate": 9.17525791049112e-07, |
|
"loss": 0.2575, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.7547528517110265, |
|
"grad_norm": 0.6955579788978207, |
|
"learning_rate": 9.036800464548157e-07, |
|
"loss": 0.2624, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.7566539923954374, |
|
"grad_norm": 0.7355570512216763, |
|
"learning_rate": 8.899346207697135e-07, |
|
"loss": 0.2762, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.758555133079848, |
|
"grad_norm": 0.7214698905495001, |
|
"learning_rate": 8.762896655854481e-07, |
|
"loss": 0.2868, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.7604562737642584, |
|
"grad_norm": 0.711700910954847, |
|
"learning_rate": 8.627453313856249e-07, |
|
"loss": 0.2709, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.7623574144486693, |
|
"grad_norm": 0.6929007543937056, |
|
"learning_rate": 8.493017675441495e-07, |
|
"loss": 0.2775, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.7642585551330798, |
|
"grad_norm": 0.723721391366007, |
|
"learning_rate": 8.359591223235785e-07, |
|
"loss": 0.2837, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.7661596958174905, |
|
"grad_norm": 0.703758893010897, |
|
"learning_rate": 8.227175428734868e-07, |
|
"loss": 0.2791, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.7680608365019013, |
|
"grad_norm": 0.7001962710879965, |
|
"learning_rate": 8.095771752288451e-07, |
|
"loss": 0.2671, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.7699619771863118, |
|
"grad_norm": 0.781253006375267, |
|
"learning_rate": 7.965381643084069e-07, |
|
"loss": 0.2694, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.7718631178707225, |
|
"grad_norm": 0.6852863424227412, |
|
"learning_rate": 7.83600653913108e-07, |
|
"loss": 0.2851, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.7737642585551332, |
|
"grad_norm": 0.6758302354581138, |
|
"learning_rate": 7.707647867244927e-07, |
|
"loss": 0.2753, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.7756653992395437, |
|
"grad_norm": 0.7442328586008942, |
|
"learning_rate": 7.580307043031232e-07, |
|
"loss": 0.2834, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.7775665399239544, |
|
"grad_norm": 0.7032226140198627, |
|
"learning_rate": 7.453985470870284e-07, |
|
"loss": 0.2764, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.7794676806083651, |
|
"grad_norm": 0.6860196036777416, |
|
"learning_rate": 7.328684543901598e-07, |
|
"loss": 0.2805, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.7813688212927756, |
|
"grad_norm": 0.6888415995303877, |
|
"learning_rate": 7.204405644008416e-07, |
|
"loss": 0.2605, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.7832699619771863, |
|
"grad_norm": 0.7035418458893344, |
|
"learning_rate": 7.081150141802518e-07, |
|
"loss": 0.2847, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.785171102661597, |
|
"grad_norm": 0.6511714601457113, |
|
"learning_rate": 6.958919396609231e-07, |
|
"loss": 0.2719, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.7870722433460076, |
|
"grad_norm": 0.7105385241153644, |
|
"learning_rate": 6.837714756452241e-07, |
|
"loss": 0.275, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.7889733840304183, |
|
"grad_norm": 0.6944218324612568, |
|
"learning_rate": 6.717537558038845e-07, |
|
"loss": 0.2769, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.790874524714829, |
|
"grad_norm": 0.715529180640508, |
|
"learning_rate": 6.598389126745209e-07, |
|
"loss": 0.2914, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.7927756653992395, |
|
"grad_norm": 0.6868017579514627, |
|
"learning_rate": 6.480270776601682e-07, |
|
"loss": 0.2776, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.7946768060836502, |
|
"grad_norm": 0.6805099573128464, |
|
"learning_rate": 6.36318381027835e-07, |
|
"loss": 0.2767, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.796577946768061, |
|
"grad_norm": 0.6639962817866456, |
|
"learning_rate": 6.247129519070728e-07, |
|
"loss": 0.2652, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.7984790874524714, |
|
"grad_norm": 0.6769414476687063, |
|
"learning_rate": 6.132109182885382e-07, |
|
"loss": 0.2833, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.8003802281368821, |
|
"grad_norm": 0.6563560713528813, |
|
"learning_rate": 6.018124070225928e-07, |
|
"loss": 0.2802, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.8022813688212929, |
|
"grad_norm": 0.6425101989048673, |
|
"learning_rate": 5.905175438178979e-07, |
|
"loss": 0.2744, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.8041825095057034, |
|
"grad_norm": 0.7010609348778838, |
|
"learning_rate": 5.793264532400311e-07, |
|
"loss": 0.2775, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.806083650190114, |
|
"grad_norm": 0.6949554370002154, |
|
"learning_rate": 5.68239258710116e-07, |
|
"loss": 0.271, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.8079847908745248, |
|
"grad_norm": 0.6771239362569724, |
|
"learning_rate": 5.572560825034523e-07, |
|
"loss": 0.2763, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.8098859315589353, |
|
"grad_norm": 0.7246258021509412, |
|
"learning_rate": 5.463770457481732e-07, |
|
"loss": 0.2862, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.811787072243346, |
|
"grad_norm": 0.6931117078118749, |
|
"learning_rate": 5.35602268423906e-07, |
|
"loss": 0.2751, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.8136882129277567, |
|
"grad_norm": 1.5629788121151869, |
|
"learning_rate": 5.249318693604577e-07, |
|
"loss": 0.294, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.8155893536121672, |
|
"grad_norm": 0.7245605831996964, |
|
"learning_rate": 5.143659662364931e-07, |
|
"loss": 0.2952, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.817490494296578, |
|
"grad_norm": 0.8273796207350173, |
|
"learning_rate": 5.039046755782417e-07, |
|
"loss": 0.2782, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.8193916349809887, |
|
"grad_norm": 0.6605671600196621, |
|
"learning_rate": 4.935481127582131e-07, |
|
"loss": 0.2558, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.8212927756653992, |
|
"grad_norm": 0.7049666552091358, |
|
"learning_rate": 4.83296391993926e-07, |
|
"loss": 0.2887, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.8231939163498099, |
|
"grad_norm": 0.6683582082239524, |
|
"learning_rate": 4.7314962634664616e-07, |
|
"loss": 0.2678, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.8250950570342206, |
|
"grad_norm": 0.666511372401851, |
|
"learning_rate": 4.631079277201389e-07, |
|
"loss": 0.2684, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.826996197718631, |
|
"grad_norm": 0.713076805831552, |
|
"learning_rate": 4.5317140685943726e-07, |
|
"loss": 0.2822, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.8288973384030418, |
|
"grad_norm": 0.6840380305959163, |
|
"learning_rate": 4.433401733496201e-07, |
|
"loss": 0.2746, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.8307984790874525, |
|
"grad_norm": 0.6861255531559266, |
|
"learning_rate": 4.3361433561460274e-07, |
|
"loss": 0.2835, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.832699619771863, |
|
"grad_norm": 0.7108197439095701, |
|
"learning_rate": 4.2399400091594154e-07, |
|
"loss": 0.2749, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.8346007604562737, |
|
"grad_norm": 0.6443379601457143, |
|
"learning_rate": 4.14479275351648e-07, |
|
"loss": 0.2629, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.8365019011406845, |
|
"grad_norm": 0.656392467160194, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.2754, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.838403041825095, |
|
"grad_norm": 0.6815368148193814, |
|
"learning_rate": 3.9576707019350903e-07, |
|
"loss": 0.2783, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.8403041825095057, |
|
"grad_norm": 0.7310484437764095, |
|
"learning_rate": 3.865697969675164e-07, |
|
"loss": 0.2785, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.8422053231939164, |
|
"grad_norm": 0.7053270490582717, |
|
"learning_rate": 3.7747854560931996e-07, |
|
"loss": 0.2791, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.8441064638783269, |
|
"grad_norm": 0.6834121858358295, |
|
"learning_rate": 3.684934163819309e-07, |
|
"loss": 0.2819, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.8460076045627376, |
|
"grad_norm": 0.6587444216756789, |
|
"learning_rate": 3.596145083779912e-07, |
|
"loss": 0.2697, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.8479087452471483, |
|
"grad_norm": 0.668894674972671, |
|
"learning_rate": 3.508419195186774e-07, |
|
"loss": 0.2604, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.8498098859315588, |
|
"grad_norm": 0.7078173054603617, |
|
"learning_rate": 3.421757465526243e-07, |
|
"loss": 0.2787, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.8517110266159695, |
|
"grad_norm": 0.7042714991904963, |
|
"learning_rate": 3.33616085054862e-07, |
|
"loss": 0.2745, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.8536121673003803, |
|
"grad_norm": 0.7054790800557664, |
|
"learning_rate": 3.2516302942574794e-07, |
|
"loss": 0.2837, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.8555133079847907, |
|
"grad_norm": 0.6946526683774901, |
|
"learning_rate": 3.1681667288994353e-07, |
|
"loss": 0.286, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.8574144486692015, |
|
"grad_norm": 0.672029880394973, |
|
"learning_rate": 3.0857710749537585e-07, |
|
"loss": 0.2745, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.8593155893536122, |
|
"grad_norm": 0.6453329118204936, |
|
"learning_rate": 3.0044442411222066e-07, |
|
"loss": 0.2669, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.8612167300380227, |
|
"grad_norm": 0.6817068127909786, |
|
"learning_rate": 2.9241871243190555e-07, |
|
"loss": 0.2743, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.8631178707224336, |
|
"grad_norm": 0.6781979485553478, |
|
"learning_rate": 2.845000609661208e-07, |
|
"loss": 0.2784, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.8650190114068441, |
|
"grad_norm": 0.6777472732815648, |
|
"learning_rate": 2.7668855704583997e-07, |
|
"loss": 0.2697, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.8669201520912546, |
|
"grad_norm": 0.6600893491559898, |
|
"learning_rate": 2.689842868203563e-07, |
|
"loss": 0.2632, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.8688212927756656, |
|
"grad_norm": 0.6848691236860588, |
|
"learning_rate": 2.6138733525633896e-07, |
|
"loss": 0.271, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.870722433460076, |
|
"grad_norm": 0.713711477238897, |
|
"learning_rate": 2.5389778613688744e-07, |
|
"loss": 0.2853, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.8726235741444865, |
|
"grad_norm": 0.6712765682266242, |
|
"learning_rate": 2.46515722060614e-07, |
|
"loss": 0.2729, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.8745247148288975, |
|
"grad_norm": 0.6492014723950595, |
|
"learning_rate": 2.392412244407294e-07, |
|
"loss": 0.2547, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.876425855513308, |
|
"grad_norm": 0.6821474827315953, |
|
"learning_rate": 2.3207437350414418e-07, |
|
"loss": 0.2722, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.8783269961977185, |
|
"grad_norm": 0.6935198347935035, |
|
"learning_rate": 2.2501524829059208e-07, |
|
"loss": 0.2811, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.8802281368821294, |
|
"grad_norm": 0.7166980434502301, |
|
"learning_rate": 2.180639266517448e-07, |
|
"loss": 0.2757, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.88212927756654, |
|
"grad_norm": 0.6994391420116437, |
|
"learning_rate": 2.1122048525036409e-07, |
|
"loss": 0.2789, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8840304182509504, |
|
"grad_norm": 0.7312907428692164, |
|
"learning_rate": 2.0448499955945223e-07, |
|
"loss": 0.2791, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.8859315589353614, |
|
"grad_norm": 0.710070025719305, |
|
"learning_rate": 1.9785754386142164e-07, |
|
"loss": 0.2813, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.8878326996197718, |
|
"grad_norm": 0.668768603271782, |
|
"learning_rate": 1.9133819124727003e-07, |
|
"loss": 0.2757, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.8897338403041823, |
|
"grad_norm": 0.7583087524168939, |
|
"learning_rate": 1.8492701361578326e-07, |
|
"loss": 0.2844, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.8916349809885933, |
|
"grad_norm": 0.6935207627941306, |
|
"learning_rate": 1.7862408167273472e-07, |
|
"loss": 0.2924, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.8935361216730038, |
|
"grad_norm": 0.7123814823636084, |
|
"learning_rate": 1.724294649301095e-07, |
|
"loss": 0.2807, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.8954372623574145, |
|
"grad_norm": 0.6968075678141445, |
|
"learning_rate": 1.6634323170533928e-07, |
|
"loss": 0.2787, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.8973384030418252, |
|
"grad_norm": 0.6720523523906995, |
|
"learning_rate": 1.6036544912054087e-07, |
|
"loss": 0.271, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.8992395437262357, |
|
"grad_norm": 0.7150382906764732, |
|
"learning_rate": 1.544961831017855e-07, |
|
"loss": 0.2881, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.9011406844106464, |
|
"grad_norm": 0.7261200240290979, |
|
"learning_rate": 1.487354983783673e-07, |
|
"loss": 0.2798, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.9030418250950571, |
|
"grad_norm": 0.6801206319161398, |
|
"learning_rate": 1.430834584820895e-07, |
|
"loss": 0.2761, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.9049429657794676, |
|
"grad_norm": 0.6928480609200753, |
|
"learning_rate": 1.375401257465625e-07, |
|
"loss": 0.2798, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.9068441064638784, |
|
"grad_norm": 0.7066673420549063, |
|
"learning_rate": 1.3210556130652031e-07, |
|
"loss": 0.2753, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.908745247148289, |
|
"grad_norm": 0.6938024176715012, |
|
"learning_rate": 1.2677982509714415e-07, |
|
"loss": 0.2696, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.9106463878326996, |
|
"grad_norm": 0.6832162725081329, |
|
"learning_rate": 1.2156297585339872e-07, |
|
"loss": 0.2702, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.9125475285171103, |
|
"grad_norm": 0.6868544179685439, |
|
"learning_rate": 1.1645507110938925e-07, |
|
"loss": 0.2759, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.914448669201521, |
|
"grad_norm": 0.7009678907368356, |
|
"learning_rate": 1.1145616719772545e-07, |
|
"loss": 0.2818, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.9163498098859315, |
|
"grad_norm": 0.6906500283728016, |
|
"learning_rate": 1.0656631924889749e-07, |
|
"loss": 0.2781, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.9182509505703422, |
|
"grad_norm": 0.7178300123778346, |
|
"learning_rate": 1.0178558119067316e-07, |
|
"loss": 0.2766, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.920152091254753, |
|
"grad_norm": 0.6780159664421509, |
|
"learning_rate": 9.711400574749507e-08, |
|
"loss": 0.265, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.9220532319391634, |
|
"grad_norm": 0.6496202474796194, |
|
"learning_rate": 9.255164443990994e-08, |
|
"loss": 0.2748, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.9239543726235742, |
|
"grad_norm": 0.7352653366632927, |
|
"learning_rate": 8.809854758399017e-08, |
|
"loss": 0.2752, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.9258555133079849, |
|
"grad_norm": 0.6820168331591996, |
|
"learning_rate": 8.375476429078543e-08, |
|
"loss": 0.2903, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.9277566539923954, |
|
"grad_norm": 0.6685324652949994, |
|
"learning_rate": 7.952034246577977e-08, |
|
"loss": 0.2707, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.929657794676806, |
|
"grad_norm": 0.6938832969532969, |
|
"learning_rate": 7.539532880836087e-08, |
|
"loss": 0.2821, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.9315589353612168, |
|
"grad_norm": 0.6434289753672234, |
|
"learning_rate": 7.137976881130826e-08, |
|
"loss": 0.2655, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.9334600760456273, |
|
"grad_norm": 0.7086701935734794, |
|
"learning_rate": 6.747370676028819e-08, |
|
"loss": 0.2704, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.935361216730038, |
|
"grad_norm": 0.7360043690034902, |
|
"learning_rate": 6.367718573336845e-08, |
|
"loss": 0.2794, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.9372623574144487, |
|
"grad_norm": 0.7306312833478155, |
|
"learning_rate": 5.999024760054095e-08, |
|
"loss": 0.2775, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.9391634980988592, |
|
"grad_norm": 0.6660644821121462, |
|
"learning_rate": 5.641293302326323e-08, |
|
"loss": 0.2709, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.94106463878327, |
|
"grad_norm": 0.6685425836634464, |
|
"learning_rate": 5.2945281454003236e-08, |
|
"loss": 0.2791, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.9429657794676807, |
|
"grad_norm": 0.6848757248963188, |
|
"learning_rate": 4.958733113581415e-08, |
|
"loss": 0.2758, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.9448669201520912, |
|
"grad_norm": 0.7092741245479027, |
|
"learning_rate": 4.6339119101902475e-08, |
|
"loss": 0.269, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.946768060836502, |
|
"grad_norm": 0.6752088458139953, |
|
"learning_rate": 4.320068117522835e-08, |
|
"loss": 0.2813, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.9486692015209126, |
|
"grad_norm": 0.6822127675124053, |
|
"learning_rate": 4.0172051968101474e-08, |
|
"loss": 0.277, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.950570342205323, |
|
"grad_norm": 0.6551400841235119, |
|
"learning_rate": 3.7253264881809137e-08, |
|
"loss": 0.2634, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.9524714828897338, |
|
"grad_norm": 0.6589303753374033, |
|
"learning_rate": 3.4444352106242086e-08, |
|
"loss": 0.2632, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.9543726235741445, |
|
"grad_norm": 0.6674874277029278, |
|
"learning_rate": 3.174534461953593e-08, |
|
"loss": 0.2662, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.956273764258555, |
|
"grad_norm": 0.7391127060114976, |
|
"learning_rate": 2.915627218774142e-08, |
|
"loss": 0.267, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.9581749049429658, |
|
"grad_norm": 0.713977297670967, |
|
"learning_rate": 2.667716336448356e-08, |
|
"loss": 0.2827, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.9600760456273765, |
|
"grad_norm": 0.668278900879979, |
|
"learning_rate": 2.430804549065302e-08, |
|
"loss": 0.2664, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.961977186311787, |
|
"grad_norm": 0.6680015728275445, |
|
"learning_rate": 2.2048944694104123e-08, |
|
"loss": 0.2657, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.9638783269961977, |
|
"grad_norm": 0.7243982885229068, |
|
"learning_rate": 1.989988588936509e-08, |
|
"loss": 0.2933, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.9657794676806084, |
|
"grad_norm": 0.6947306471412228, |
|
"learning_rate": 1.7860892777367133e-08, |
|
"loss": 0.2675, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.967680608365019, |
|
"grad_norm": 0.6625040156740044, |
|
"learning_rate": 1.5931987845176912e-08, |
|
"loss": 0.2588, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.9695817490494296, |
|
"grad_norm": 0.6748019757125813, |
|
"learning_rate": 1.411319236575337e-08, |
|
"loss": 0.2701, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.9714828897338403, |
|
"grad_norm": 0.7437194669718353, |
|
"learning_rate": 1.2404526397711281e-08, |
|
"loss": 0.2904, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.9733840304182508, |
|
"grad_norm": 0.6601772381612543, |
|
"learning_rate": 1.0806008785100297e-08, |
|
"loss": 0.2644, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.9752851711026616, |
|
"grad_norm": 0.7614110492574241, |
|
"learning_rate": 9.317657157197347e-09, |
|
"loss": 0.2899, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.9771863117870723, |
|
"grad_norm": 0.7584653405527474, |
|
"learning_rate": 7.93948792831234e-09, |
|
"loss": 0.2845, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.9790874524714828, |
|
"grad_norm": 0.707319769410083, |
|
"learning_rate": 6.671516297606095e-09, |
|
"loss": 0.2767, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.9809885931558935, |
|
"grad_norm": 0.7191158662108926, |
|
"learning_rate": 5.513756248924917e-09, |
|
"loss": 0.2665, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.9828897338403042, |
|
"grad_norm": 0.6804447955325379, |
|
"learning_rate": 4.466220550641831e-09, |
|
"loss": 0.267, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.9847908745247147, |
|
"grad_norm": 0.707744828388341, |
|
"learning_rate": 3.528920755523357e-09, |
|
"loss": 0.281, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.9866920152091256, |
|
"grad_norm": 0.7064534939100834, |
|
"learning_rate": 2.701867200592956e-09, |
|
"loss": 0.2781, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.9885931558935361, |
|
"grad_norm": 0.6585852925605958, |
|
"learning_rate": 1.9850690070266633e-09, |
|
"loss": 0.2742, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.9904942965779466, |
|
"grad_norm": 0.692491061678846, |
|
"learning_rate": 1.378534080042071e-09, |
|
"loss": 0.2748, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.9923954372623576, |
|
"grad_norm": 0.7017954683716567, |
|
"learning_rate": 8.822691088195001e-10, |
|
"loss": 0.2717, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.994296577946768, |
|
"grad_norm": 0.6978590940080142, |
|
"learning_rate": 4.962795664265052e-10, |
|
"loss": 0.268, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.9961977186311786, |
|
"grad_norm": 0.6984542211561577, |
|
"learning_rate": 2.2056970975459223e-10, |
|
"loss": 0.266, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9980988593155895, |
|
"grad_norm": 0.7222912986809148, |
|
"learning_rate": 5.514257947369928e-11, |
|
"loss": 0.2866, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.6501804000315491, |
|
"learning_rate": 0.0, |
|
"loss": 0.2698, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1052, |
|
"total_flos": 100181286666240.0, |
|
"train_loss": 0.36181722887341966, |
|
"train_runtime": 3413.9469, |
|
"train_samples_per_second": 39.443, |
|
"train_steps_per_second": 0.308 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1052, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 100181286666240.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|