|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 1052, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0019011406844106464, |
|
"grad_norm": 0.03668688377117645, |
|
"learning_rate": 1.886792452830189e-07, |
|
"loss": 0.7196, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0038022813688212928, |
|
"grad_norm": 0.035952567213973156, |
|
"learning_rate": 3.773584905660378e-07, |
|
"loss": 0.6543, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005703422053231939, |
|
"grad_norm": 0.034396901240204815, |
|
"learning_rate": 5.660377358490567e-07, |
|
"loss": 0.6342, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0076045627376425855, |
|
"grad_norm": 0.03101352011559978, |
|
"learning_rate": 7.547169811320755e-07, |
|
"loss": 0.5223, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009505703422053232, |
|
"grad_norm": 0.036575726773312375, |
|
"learning_rate": 9.433962264150944e-07, |
|
"loss": 0.6899, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011406844106463879, |
|
"grad_norm": 0.033897651666239606, |
|
"learning_rate": 1.1320754716981133e-06, |
|
"loss": 0.6142, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.013307984790874524, |
|
"grad_norm": 0.033700806599582314, |
|
"learning_rate": 1.3207547169811322e-06, |
|
"loss": 0.5715, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.015209125475285171, |
|
"grad_norm": 0.030982637040042928, |
|
"learning_rate": 1.509433962264151e-06, |
|
"loss": 0.5732, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.017110266159695818, |
|
"grad_norm": 0.03614218285927396, |
|
"learning_rate": 1.6981132075471698e-06, |
|
"loss": 0.6487, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.019011406844106463, |
|
"grad_norm": 0.03291005003403244, |
|
"learning_rate": 1.8867924528301889e-06, |
|
"loss": 0.599, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02091254752851711, |
|
"grad_norm": 0.03524509129630007, |
|
"learning_rate": 2.075471698113208e-06, |
|
"loss": 0.6225, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.022813688212927757, |
|
"grad_norm": 0.030669610415498205, |
|
"learning_rate": 2.2641509433962266e-06, |
|
"loss": 0.5753, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.024714828897338403, |
|
"grad_norm": 0.03687063960415838, |
|
"learning_rate": 2.4528301886792453e-06, |
|
"loss": 0.6895, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.026615969581749048, |
|
"grad_norm": 0.03611941017678787, |
|
"learning_rate": 2.6415094339622644e-06, |
|
"loss": 0.6264, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.028517110266159697, |
|
"grad_norm": 0.03401456315750716, |
|
"learning_rate": 2.830188679245283e-06, |
|
"loss": 0.5686, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.030418250950570342, |
|
"grad_norm": 0.03419637601617932, |
|
"learning_rate": 3.018867924528302e-06, |
|
"loss": 0.608, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03231939163498099, |
|
"grad_norm": 0.03584287912078038, |
|
"learning_rate": 3.207547169811321e-06, |
|
"loss": 0.5813, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.034220532319391636, |
|
"grad_norm": 0.03615992890750685, |
|
"learning_rate": 3.3962264150943395e-06, |
|
"loss": 0.5755, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03612167300380228, |
|
"grad_norm": 0.03110134121928749, |
|
"learning_rate": 3.5849056603773586e-06, |
|
"loss": 0.5446, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03802281368821293, |
|
"grad_norm": 0.03232315708906836, |
|
"learning_rate": 3.7735849056603777e-06, |
|
"loss": 0.5658, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.039923954372623575, |
|
"grad_norm": 0.03924214823765209, |
|
"learning_rate": 3.962264150943396e-06, |
|
"loss": 0.708, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04182509505703422, |
|
"grad_norm": 0.03682731607691239, |
|
"learning_rate": 4.150943396226416e-06, |
|
"loss": 0.5808, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.043726235741444866, |
|
"grad_norm": 0.030617107823517682, |
|
"learning_rate": 4.339622641509435e-06, |
|
"loss": 0.4551, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.045627376425855515, |
|
"grad_norm": 0.033812282631185166, |
|
"learning_rate": 4.528301886792453e-06, |
|
"loss": 0.5435, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04752851711026616, |
|
"grad_norm": 0.034386356534444155, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 0.5295, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.049429657794676805, |
|
"grad_norm": 0.0356182441226142, |
|
"learning_rate": 4.905660377358491e-06, |
|
"loss": 0.558, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.051330798479087454, |
|
"grad_norm": 0.036577927998350704, |
|
"learning_rate": 5.09433962264151e-06, |
|
"loss": 0.5265, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.053231939163498096, |
|
"grad_norm": 0.03304001445051265, |
|
"learning_rate": 5.283018867924529e-06, |
|
"loss": 0.4835, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.055133079847908745, |
|
"grad_norm": 0.04201451567231352, |
|
"learning_rate": 5.4716981132075475e-06, |
|
"loss": 0.6623, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.057034220532319393, |
|
"grad_norm": 0.04247369627455591, |
|
"learning_rate": 5.660377358490566e-06, |
|
"loss": 0.6387, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.058935361216730035, |
|
"grad_norm": 0.038223344796174466, |
|
"learning_rate": 5.849056603773585e-06, |
|
"loss": 0.575, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.060836501901140684, |
|
"grad_norm": 0.038744694247938374, |
|
"learning_rate": 6.037735849056604e-06, |
|
"loss": 0.543, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06273764258555133, |
|
"grad_norm": 0.04824408233555485, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 0.7119, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06463878326996197, |
|
"grad_norm": 0.04187197162423142, |
|
"learning_rate": 6.415094339622642e-06, |
|
"loss": 0.6076, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06653992395437262, |
|
"grad_norm": 0.0389443389521866, |
|
"learning_rate": 6.60377358490566e-06, |
|
"loss": 0.5593, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06844106463878327, |
|
"grad_norm": 0.04670007654518919, |
|
"learning_rate": 6.792452830188679e-06, |
|
"loss": 0.6212, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07034220532319392, |
|
"grad_norm": 0.03685730922340025, |
|
"learning_rate": 6.981132075471699e-06, |
|
"loss": 0.5979, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07224334600760456, |
|
"grad_norm": 0.04520087956267333, |
|
"learning_rate": 7.169811320754717e-06, |
|
"loss": 0.6699, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0741444866920152, |
|
"grad_norm": 0.042901948849604715, |
|
"learning_rate": 7.358490566037736e-06, |
|
"loss": 0.6285, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07604562737642585, |
|
"grad_norm": 0.04422418983311354, |
|
"learning_rate": 7.5471698113207555e-06, |
|
"loss": 0.6251, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0779467680608365, |
|
"grad_norm": 0.035707194488757074, |
|
"learning_rate": 7.735849056603775e-06, |
|
"loss": 0.5684, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07984790874524715, |
|
"grad_norm": 0.03523812130796158, |
|
"learning_rate": 7.924528301886793e-06, |
|
"loss": 0.5295, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0817490494296578, |
|
"grad_norm": 0.04005409994339303, |
|
"learning_rate": 8.113207547169812e-06, |
|
"loss": 0.6207, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08365019011406843, |
|
"grad_norm": 0.03875008979754778, |
|
"learning_rate": 8.301886792452832e-06, |
|
"loss": 0.6121, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08555133079847908, |
|
"grad_norm": 0.03192147884997576, |
|
"learning_rate": 8.49056603773585e-06, |
|
"loss": 0.4839, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08745247148288973, |
|
"grad_norm": 0.035931672099034556, |
|
"learning_rate": 8.67924528301887e-06, |
|
"loss": 0.5836, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08935361216730038, |
|
"grad_norm": 0.036619233293086306, |
|
"learning_rate": 8.867924528301887e-06, |
|
"loss": 0.5626, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09125475285171103, |
|
"grad_norm": 0.035039592869265425, |
|
"learning_rate": 9.056603773584907e-06, |
|
"loss": 0.5583, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09315589353612168, |
|
"grad_norm": 0.034224086735969404, |
|
"learning_rate": 9.245283018867926e-06, |
|
"loss": 0.566, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09505703422053231, |
|
"grad_norm": 0.03240395449368507, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 0.5183, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09695817490494296, |
|
"grad_norm": 0.033078153570674076, |
|
"learning_rate": 9.622641509433963e-06, |
|
"loss": 0.4964, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09885931558935361, |
|
"grad_norm": 0.03042799523677292, |
|
"learning_rate": 9.811320754716981e-06, |
|
"loss": 0.5534, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10076045627376426, |
|
"grad_norm": 0.03836324861482314, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6341, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.10266159695817491, |
|
"grad_norm": 0.033942069333683274, |
|
"learning_rate": 1.018867924528302e-05, |
|
"loss": 0.6775, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10456273764258556, |
|
"grad_norm": 0.025887503952264527, |
|
"learning_rate": 1.0377358490566038e-05, |
|
"loss": 0.4627, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10646387832699619, |
|
"grad_norm": 0.03394457148243923, |
|
"learning_rate": 1.0566037735849058e-05, |
|
"loss": 0.6204, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10836501901140684, |
|
"grad_norm": 0.028338427146352457, |
|
"learning_rate": 1.0754716981132076e-05, |
|
"loss": 0.4382, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11026615969581749, |
|
"grad_norm": 0.027333559713593924, |
|
"learning_rate": 1.0943396226415095e-05, |
|
"loss": 0.4175, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11216730038022814, |
|
"grad_norm": 0.03359787126455066, |
|
"learning_rate": 1.1132075471698115e-05, |
|
"loss": 0.6217, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11406844106463879, |
|
"grad_norm": 0.03025080325985568, |
|
"learning_rate": 1.1320754716981132e-05, |
|
"loss": 0.4926, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11596958174904944, |
|
"grad_norm": 0.0309218336145624, |
|
"learning_rate": 1.1509433962264152e-05, |
|
"loss": 0.4542, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11787072243346007, |
|
"grad_norm": 0.02984510262256353, |
|
"learning_rate": 1.169811320754717e-05, |
|
"loss": 0.4389, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11977186311787072, |
|
"grad_norm": 0.033556526158935, |
|
"learning_rate": 1.188679245283019e-05, |
|
"loss": 0.5732, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12167300380228137, |
|
"grad_norm": 0.0284281501030829, |
|
"learning_rate": 1.2075471698113209e-05, |
|
"loss": 0.4243, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12357414448669202, |
|
"grad_norm": 0.03174232497744746, |
|
"learning_rate": 1.2264150943396227e-05, |
|
"loss": 0.5259, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12547528517110265, |
|
"grad_norm": 0.02833552096705856, |
|
"learning_rate": 1.2452830188679246e-05, |
|
"loss": 0.5491, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12737642585551331, |
|
"grad_norm": 0.028611836078715383, |
|
"learning_rate": 1.2641509433962264e-05, |
|
"loss": 0.6006, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12927756653992395, |
|
"grad_norm": 0.03132563373904203, |
|
"learning_rate": 1.2830188679245283e-05, |
|
"loss": 0.4095, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1311787072243346, |
|
"grad_norm": 0.02524499759557046, |
|
"learning_rate": 1.3018867924528303e-05, |
|
"loss": 0.483, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13307984790874525, |
|
"grad_norm": 0.027887458074904888, |
|
"learning_rate": 1.320754716981132e-05, |
|
"loss": 0.493, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13498098859315588, |
|
"grad_norm": 0.031232250695461282, |
|
"learning_rate": 1.339622641509434e-05, |
|
"loss": 0.4961, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13688212927756654, |
|
"grad_norm": 0.03387446832617775, |
|
"learning_rate": 1.3584905660377358e-05, |
|
"loss": 0.5181, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13878326996197718, |
|
"grad_norm": 0.03648093707574201, |
|
"learning_rate": 1.3773584905660378e-05, |
|
"loss": 0.6117, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14068441064638784, |
|
"grad_norm": 0.02991484915971992, |
|
"learning_rate": 1.3962264150943397e-05, |
|
"loss": 0.5813, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14258555133079848, |
|
"grad_norm": 0.0242367290202634, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 0.4708, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1444866920152091, |
|
"grad_norm": 0.022688931847156046, |
|
"learning_rate": 1.4339622641509435e-05, |
|
"loss": 0.4818, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14638783269961977, |
|
"grad_norm": 0.02246018206602372, |
|
"learning_rate": 1.4528301886792452e-05, |
|
"loss": 0.4843, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1482889733840304, |
|
"grad_norm": 0.020144711191450268, |
|
"learning_rate": 1.4716981132075472e-05, |
|
"loss": 0.4567, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15019011406844107, |
|
"grad_norm": 0.024418448428126814, |
|
"learning_rate": 1.4905660377358491e-05, |
|
"loss": 0.5455, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1520912547528517, |
|
"grad_norm": 0.018259798366678376, |
|
"learning_rate": 1.5094339622641511e-05, |
|
"loss": 0.4245, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15399239543726237, |
|
"grad_norm": 0.020571149791388653, |
|
"learning_rate": 1.5283018867924532e-05, |
|
"loss": 0.5728, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.155893536121673, |
|
"grad_norm": 0.019419569306863834, |
|
"learning_rate": 1.547169811320755e-05, |
|
"loss": 0.4749, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15779467680608364, |
|
"grad_norm": 0.01873004169719714, |
|
"learning_rate": 1.5660377358490568e-05, |
|
"loss": 0.4674, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1596958174904943, |
|
"grad_norm": 0.01821765696372897, |
|
"learning_rate": 1.5849056603773586e-05, |
|
"loss": 0.4949, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.16159695817490494, |
|
"grad_norm": 0.022516771000966596, |
|
"learning_rate": 1.6037735849056607e-05, |
|
"loss": 0.5164, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1634980988593156, |
|
"grad_norm": 0.018221938257895465, |
|
"learning_rate": 1.6226415094339625e-05, |
|
"loss": 0.4871, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.16539923954372623, |
|
"grad_norm": 0.01658953731682099, |
|
"learning_rate": 1.6415094339622643e-05, |
|
"loss": 0.5215, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16730038022813687, |
|
"grad_norm": 0.01674622907707825, |
|
"learning_rate": 1.6603773584905664e-05, |
|
"loss": 0.4574, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16920152091254753, |
|
"grad_norm": 0.017762783610176393, |
|
"learning_rate": 1.679245283018868e-05, |
|
"loss": 0.4577, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17110266159695817, |
|
"grad_norm": 0.01762770573990937, |
|
"learning_rate": 1.69811320754717e-05, |
|
"loss": 0.4692, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17300380228136883, |
|
"grad_norm": 0.017322040277817134, |
|
"learning_rate": 1.716981132075472e-05, |
|
"loss": 0.5164, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17490494296577946, |
|
"grad_norm": 0.018718653390551426, |
|
"learning_rate": 1.735849056603774e-05, |
|
"loss": 0.5602, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17680608365019013, |
|
"grad_norm": 0.020062480889097352, |
|
"learning_rate": 1.7547169811320756e-05, |
|
"loss": 0.5318, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17870722433460076, |
|
"grad_norm": 0.016213045403896325, |
|
"learning_rate": 1.7735849056603774e-05, |
|
"loss": 0.4815, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1806083650190114, |
|
"grad_norm": 0.015622055125179094, |
|
"learning_rate": 1.7924528301886795e-05, |
|
"loss": 0.5143, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18250950570342206, |
|
"grad_norm": 0.016442850437595302, |
|
"learning_rate": 1.8113207547169813e-05, |
|
"loss": 0.4147, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1844106463878327, |
|
"grad_norm": 0.01655292657070884, |
|
"learning_rate": 1.830188679245283e-05, |
|
"loss": 0.4282, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.18631178707224336, |
|
"grad_norm": 0.019520087480489844, |
|
"learning_rate": 1.8490566037735852e-05, |
|
"loss": 0.458, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.188212927756654, |
|
"grad_norm": 0.016399201324681423, |
|
"learning_rate": 1.867924528301887e-05, |
|
"loss": 0.4447, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19011406844106463, |
|
"grad_norm": 0.01716766677575734, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 0.4929, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1920152091254753, |
|
"grad_norm": 0.01754420516743432, |
|
"learning_rate": 1.905660377358491e-05, |
|
"loss": 0.491, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19391634980988592, |
|
"grad_norm": 0.017159235430977852, |
|
"learning_rate": 1.9245283018867927e-05, |
|
"loss": 0.5279, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1958174904942966, |
|
"grad_norm": 0.016855111063929475, |
|
"learning_rate": 1.9433962264150945e-05, |
|
"loss": 0.5295, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.19771863117870722, |
|
"grad_norm": 0.018232977596542764, |
|
"learning_rate": 1.9622641509433963e-05, |
|
"loss": 0.5494, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19961977186311788, |
|
"grad_norm": 0.02013827150488014, |
|
"learning_rate": 1.9811320754716984e-05, |
|
"loss": 0.4967, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.20152091254752852, |
|
"grad_norm": 0.01601850564273604, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5408, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.20342205323193915, |
|
"grad_norm": 0.015080995013080477, |
|
"learning_rate": 1.9999944857420527e-05, |
|
"loss": 0.4361, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.20532319391634982, |
|
"grad_norm": 0.016582553669836204, |
|
"learning_rate": 1.9999779430290247e-05, |
|
"loss": 0.4896, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.20722433460076045, |
|
"grad_norm": 0.014960817408208395, |
|
"learning_rate": 1.9999503720433575e-05, |
|
"loss": 0.481, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.20912547528517111, |
|
"grad_norm": 0.01520222559031829, |
|
"learning_rate": 1.999911773089118e-05, |
|
"loss": 0.4902, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21102661596958175, |
|
"grad_norm": 0.014460789211133408, |
|
"learning_rate": 1.999862146591996e-05, |
|
"loss": 0.4465, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.21292775665399238, |
|
"grad_norm": 0.015975633584946487, |
|
"learning_rate": 1.9998014930992976e-05, |
|
"loss": 0.4625, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21482889733840305, |
|
"grad_norm": 0.017919922669170172, |
|
"learning_rate": 1.9997298132799408e-05, |
|
"loss": 0.4213, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.21673003802281368, |
|
"grad_norm": 0.01889452329085544, |
|
"learning_rate": 1.9996471079244477e-05, |
|
"loss": 0.4523, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.21863117870722434, |
|
"grad_norm": 0.016761850759718987, |
|
"learning_rate": 1.999553377944936e-05, |
|
"loss": 0.434, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22053231939163498, |
|
"grad_norm": 0.016133783491997265, |
|
"learning_rate": 1.9994486243751076e-05, |
|
"loss": 0.4778, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2224334600760456, |
|
"grad_norm": 0.018531447789392804, |
|
"learning_rate": 1.9993328483702393e-05, |
|
"loss": 0.5375, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22433460076045628, |
|
"grad_norm": 0.01551741242441263, |
|
"learning_rate": 1.999206051207169e-05, |
|
"loss": 0.5587, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2262357414448669, |
|
"grad_norm": 0.015842500025624345, |
|
"learning_rate": 1.9990682342842805e-05, |
|
"loss": 0.5979, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.22813688212927757, |
|
"grad_norm": 0.020876630929871697, |
|
"learning_rate": 1.99891939912149e-05, |
|
"loss": 0.6885, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2300380228136882, |
|
"grad_norm": 0.01644422720447315, |
|
"learning_rate": 1.9987595473602292e-05, |
|
"loss": 0.5064, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23193916349809887, |
|
"grad_norm": 0.017044981544664082, |
|
"learning_rate": 1.9985886807634246e-05, |
|
"loss": 0.4923, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2338403041825095, |
|
"grad_norm": 0.016870862107049755, |
|
"learning_rate": 1.9984068012154824e-05, |
|
"loss": 0.423, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.23574144486692014, |
|
"grad_norm": 0.015777583476224797, |
|
"learning_rate": 1.9982139107222634e-05, |
|
"loss": 0.4574, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2376425855513308, |
|
"grad_norm": 0.015781286625695973, |
|
"learning_rate": 1.9980100114110637e-05, |
|
"loss": 0.4665, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.23954372623574144, |
|
"grad_norm": 0.015679150227417577, |
|
"learning_rate": 1.99779510553059e-05, |
|
"loss": 0.4425, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2414448669201521, |
|
"grad_norm": 0.017376402458769775, |
|
"learning_rate": 1.9975691954509347e-05, |
|
"loss": 0.4736, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24334600760456274, |
|
"grad_norm": 0.01663302538553884, |
|
"learning_rate": 1.9973322836635517e-05, |
|
"loss": 0.427, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.24524714828897337, |
|
"grad_norm": 0.01619990675448057, |
|
"learning_rate": 1.997084372781226e-05, |
|
"loss": 0.4789, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24714828897338403, |
|
"grad_norm": 0.014751100997639594, |
|
"learning_rate": 1.9968254655380465e-05, |
|
"loss": 0.4641, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.24904942965779467, |
|
"grad_norm": 0.01788111921176585, |
|
"learning_rate": 1.996555564789376e-05, |
|
"loss": 0.6526, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2509505703422053, |
|
"grad_norm": 0.017304585767881957, |
|
"learning_rate": 1.996274673511819e-05, |
|
"loss": 0.5883, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.25285171102661597, |
|
"grad_norm": 0.017558281140256256, |
|
"learning_rate": 1.99598279480319e-05, |
|
"loss": 0.5356, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.25475285171102663, |
|
"grad_norm": 0.01653596608203917, |
|
"learning_rate": 1.9956799318824776e-05, |
|
"loss": 0.4124, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.25665399239543724, |
|
"grad_norm": 0.018759070146296956, |
|
"learning_rate": 1.99536608808981e-05, |
|
"loss": 0.4521, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2585551330798479, |
|
"grad_norm": 0.016451732444213572, |
|
"learning_rate": 1.995041266886419e-05, |
|
"loss": 0.4564, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.26045627376425856, |
|
"grad_norm": 0.03972131177385222, |
|
"learning_rate": 1.9947054718545996e-05, |
|
"loss": 0.5367, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2623574144486692, |
|
"grad_norm": 0.017213070936728483, |
|
"learning_rate": 1.994358706697674e-05, |
|
"loss": 0.4412, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.26425855513307983, |
|
"grad_norm": 0.01820884630697662, |
|
"learning_rate": 1.9940009752399462e-05, |
|
"loss": 0.4635, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2661596958174905, |
|
"grad_norm": 0.019567114976084547, |
|
"learning_rate": 1.9936322814266634e-05, |
|
"loss": 0.56, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26806083650190116, |
|
"grad_norm": 0.017133833604248784, |
|
"learning_rate": 1.9932526293239713e-05, |
|
"loss": 0.4149, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.26996197718631176, |
|
"grad_norm": 0.018600324556852586, |
|
"learning_rate": 1.9928620231188694e-05, |
|
"loss": 0.3961, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2718631178707224, |
|
"grad_norm": 0.018970017200871575, |
|
"learning_rate": 1.992460467119164e-05, |
|
"loss": 0.4488, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2737642585551331, |
|
"grad_norm": 0.018135694037860434, |
|
"learning_rate": 1.992047965753422e-05, |
|
"loss": 0.5313, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.27566539923954375, |
|
"grad_norm": 0.04619547629543339, |
|
"learning_rate": 1.991624523570922e-05, |
|
"loss": 0.4452, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.27756653992395436, |
|
"grad_norm": 0.019254266322524038, |
|
"learning_rate": 1.9911901452416012e-05, |
|
"loss": 0.4949, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.279467680608365, |
|
"grad_norm": 0.020224200919704446, |
|
"learning_rate": 1.9907448355560094e-05, |
|
"loss": 0.5203, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2813688212927757, |
|
"grad_norm": 0.019583737042576384, |
|
"learning_rate": 1.9902885994252506e-05, |
|
"loss": 0.4314, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2832699619771863, |
|
"grad_norm": 0.020322692978619984, |
|
"learning_rate": 1.989821441880933e-05, |
|
"loss": 0.5791, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.28517110266159695, |
|
"grad_norm": 0.02330003106029221, |
|
"learning_rate": 1.9893433680751105e-05, |
|
"loss": 0.4411, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2870722433460076, |
|
"grad_norm": 0.02159054999321492, |
|
"learning_rate": 1.9888543832802277e-05, |
|
"loss": 0.499, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2889733840304182, |
|
"grad_norm": 0.01996792223389153, |
|
"learning_rate": 1.9883544928890612e-05, |
|
"loss": 0.4229, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2908745247148289, |
|
"grad_norm": 0.021058283772857712, |
|
"learning_rate": 1.9878437024146603e-05, |
|
"loss": 0.5255, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.29277566539923955, |
|
"grad_norm": 0.024196266685836586, |
|
"learning_rate": 1.9873220174902857e-05, |
|
"loss": 0.4656, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2946768060836502, |
|
"grad_norm": 0.02096696363566598, |
|
"learning_rate": 1.986789443869348e-05, |
|
"loss": 0.4385, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2965779467680608, |
|
"grad_norm": 0.020706886476065403, |
|
"learning_rate": 1.9862459874253438e-05, |
|
"loss": 0.4272, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2984790874524715, |
|
"grad_norm": 0.025633809682526005, |
|
"learning_rate": 1.985691654151791e-05, |
|
"loss": 0.4376, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.30038022813688214, |
|
"grad_norm": 0.022367447749969346, |
|
"learning_rate": 1.9851264501621635e-05, |
|
"loss": 0.476, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.30228136882129275, |
|
"grad_norm": 0.019818236100033224, |
|
"learning_rate": 1.984550381689822e-05, |
|
"loss": 0.4381, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3041825095057034, |
|
"grad_norm": 0.018035025834197898, |
|
"learning_rate": 1.983963455087946e-05, |
|
"loss": 0.4139, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3060836501901141, |
|
"grad_norm": 0.02102741417831402, |
|
"learning_rate": 1.983365676829466e-05, |
|
"loss": 0.4498, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.30798479087452474, |
|
"grad_norm": 0.019483110144615897, |
|
"learning_rate": 1.982757053506989e-05, |
|
"loss": 0.4141, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.30988593155893535, |
|
"grad_norm": 0.01815612650289187, |
|
"learning_rate": 1.9821375918327268e-05, |
|
"loss": 0.4323, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.311787072243346, |
|
"grad_norm": 0.017730579316108816, |
|
"learning_rate": 1.981507298638422e-05, |
|
"loss": 0.5347, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.31368821292775667, |
|
"grad_norm": 0.019188868789103374, |
|
"learning_rate": 1.9808661808752735e-05, |
|
"loss": 0.4088, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3155893536121673, |
|
"grad_norm": 0.021357864822283502, |
|
"learning_rate": 1.980214245613858e-05, |
|
"loss": 0.4764, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.31749049429657794, |
|
"grad_norm": 0.017524761211716523, |
|
"learning_rate": 1.979551500044055e-05, |
|
"loss": 0.5122, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3193916349809886, |
|
"grad_norm": 0.018425099678369045, |
|
"learning_rate": 1.9788779514749635e-05, |
|
"loss": 0.4602, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.32129277566539927, |
|
"grad_norm": 0.018975566503684312, |
|
"learning_rate": 1.978193607334826e-05, |
|
"loss": 0.4846, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3231939163498099, |
|
"grad_norm": 0.018506385123025323, |
|
"learning_rate": 1.977498475170941e-05, |
|
"loss": 0.5318, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32509505703422054, |
|
"grad_norm": 0.017679822429953924, |
|
"learning_rate": 1.9767925626495857e-05, |
|
"loss": 0.438, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3269961977186312, |
|
"grad_norm": 0.01875235194047055, |
|
"learning_rate": 1.9760758775559275e-05, |
|
"loss": 0.4472, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3288973384030418, |
|
"grad_norm": 0.01767922446458576, |
|
"learning_rate": 1.975348427793939e-05, |
|
"loss": 0.5602, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.33079847908745247, |
|
"grad_norm": 0.018382173225872267, |
|
"learning_rate": 1.9746102213863113e-05, |
|
"loss": 0.4756, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.33269961977186313, |
|
"grad_norm": 0.01662767369884979, |
|
"learning_rate": 1.973861266474366e-05, |
|
"loss": 0.4727, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33460076045627374, |
|
"grad_norm": 0.020123291885424312, |
|
"learning_rate": 1.9731015713179643e-05, |
|
"loss": 0.4743, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3365019011406844, |
|
"grad_norm": 0.016913284449301725, |
|
"learning_rate": 1.9723311442954163e-05, |
|
"loss": 0.4673, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.33840304182509506, |
|
"grad_norm": 0.02195956626625843, |
|
"learning_rate": 1.9715499939033883e-05, |
|
"loss": 0.3841, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3403041825095057, |
|
"grad_norm": 0.018554265629519444, |
|
"learning_rate": 1.9707581287568094e-05, |
|
"loss": 0.5465, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.34220532319391633, |
|
"grad_norm": 0.016914420157391603, |
|
"learning_rate": 1.969955557588778e-05, |
|
"loss": 0.4325, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.344106463878327, |
|
"grad_norm": 0.02019706373919531, |
|
"learning_rate": 1.9691422892504626e-05, |
|
"loss": 0.4832, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.34600760456273766, |
|
"grad_norm": 0.02068472935834528, |
|
"learning_rate": 1.968318332711006e-05, |
|
"loss": 0.4557, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.34790874524714827, |
|
"grad_norm": 0.01885222047217075, |
|
"learning_rate": 1.9674836970574253e-05, |
|
"loss": 0.4579, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.34980988593155893, |
|
"grad_norm": 0.01929841860727856, |
|
"learning_rate": 1.966638391494514e-05, |
|
"loss": 0.4636, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3517110266159696, |
|
"grad_norm": 0.01681745337606996, |
|
"learning_rate": 1.9657824253447378e-05, |
|
"loss": 0.4486, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.35361216730038025, |
|
"grad_norm": 0.017711355674748283, |
|
"learning_rate": 1.9649158080481327e-05, |
|
"loss": 0.4196, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.35551330798479086, |
|
"grad_norm": 0.021237427364918404, |
|
"learning_rate": 1.964038549162201e-05, |
|
"loss": 0.4849, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3574144486692015, |
|
"grad_norm": 0.020159495183868427, |
|
"learning_rate": 1.963150658361807e-05, |
|
"loss": 0.4576, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3593155893536122, |
|
"grad_norm": 0.018156933355716387, |
|
"learning_rate": 1.962252145439068e-05, |
|
"loss": 0.4448, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3612167300380228, |
|
"grad_norm": 0.016649738646787975, |
|
"learning_rate": 1.9613430203032486e-05, |
|
"loss": 0.3491, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36311787072243346, |
|
"grad_norm": 0.019962082237500515, |
|
"learning_rate": 1.9604232929806493e-05, |
|
"loss": 0.5425, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3650190114068441, |
|
"grad_norm": 0.018374631285756067, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.4724, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3669201520912547, |
|
"grad_norm": 0.02317193691864784, |
|
"learning_rate": 1.9585520724648354e-05, |
|
"loss": 0.4765, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3688212927756654, |
|
"grad_norm": 0.018838805142255447, |
|
"learning_rate": 1.957600599908406e-05, |
|
"loss": 0.5229, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.37072243346007605, |
|
"grad_norm": 0.01761088352260932, |
|
"learning_rate": 1.95663856643854e-05, |
|
"loss": 0.464, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3726235741444867, |
|
"grad_norm": 0.016815312376514308, |
|
"learning_rate": 1.955665982665038e-05, |
|
"loss": 0.4853, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3745247148288973, |
|
"grad_norm": 0.018392067679920427, |
|
"learning_rate": 1.9546828593140565e-05, |
|
"loss": 0.4762, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.376425855513308, |
|
"grad_norm": 0.02122811232370391, |
|
"learning_rate": 1.9536892072279863e-05, |
|
"loss": 0.3899, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.37832699619771865, |
|
"grad_norm": 0.019854868900050456, |
|
"learning_rate": 1.9526850373653356e-05, |
|
"loss": 0.3948, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.38022813688212925, |
|
"grad_norm": 0.023763843903790046, |
|
"learning_rate": 1.9516703608006074e-05, |
|
"loss": 0.4453, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3821292775665399, |
|
"grad_norm": 0.01927284079047068, |
|
"learning_rate": 1.9506451887241787e-05, |
|
"loss": 0.5135, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3840304182509506, |
|
"grad_norm": 0.020767632866775965, |
|
"learning_rate": 1.949609532442176e-05, |
|
"loss": 0.5374, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.38593155893536124, |
|
"grad_norm": 0.019139363444625492, |
|
"learning_rate": 1.9485634033763507e-05, |
|
"loss": 0.4858, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.38783269961977185, |
|
"grad_norm": 0.017436727750232788, |
|
"learning_rate": 1.9475068130639543e-05, |
|
"loss": 0.5505, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3897338403041825, |
|
"grad_norm": 0.01696558264370334, |
|
"learning_rate": 1.9464397731576093e-05, |
|
"loss": 0.4323, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3916349809885932, |
|
"grad_norm": 0.019107284844297346, |
|
"learning_rate": 1.945362295425183e-05, |
|
"loss": 0.4586, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3935361216730038, |
|
"grad_norm": 0.018034308085610313, |
|
"learning_rate": 1.944274391749655e-05, |
|
"loss": 0.4668, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.39543726235741444, |
|
"grad_norm": 0.019543142514623062, |
|
"learning_rate": 1.9431760741289886e-05, |
|
"loss": 0.5371, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3973384030418251, |
|
"grad_norm": 0.019931000999436656, |
|
"learning_rate": 1.942067354675997e-05, |
|
"loss": 0.4441, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.39923954372623577, |
|
"grad_norm": 0.018331743495642414, |
|
"learning_rate": 1.9409482456182105e-05, |
|
"loss": 0.4379, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4011406844106464, |
|
"grad_norm": 0.02230823654311762, |
|
"learning_rate": 1.939818759297741e-05, |
|
"loss": 0.4806, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.40304182509505704, |
|
"grad_norm": 0.019717968785659953, |
|
"learning_rate": 1.9386789081711465e-05, |
|
"loss": 0.547, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.4049429657794677, |
|
"grad_norm": 0.01996049638659254, |
|
"learning_rate": 1.9375287048092927e-05, |
|
"loss": 0.4786, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4068441064638783, |
|
"grad_norm": 0.022379601814349274, |
|
"learning_rate": 1.9363681618972166e-05, |
|
"loss": 0.4812, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.40874524714828897, |
|
"grad_norm": 0.019091180196167398, |
|
"learning_rate": 1.9351972922339835e-05, |
|
"loss": 0.492, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.41064638783269963, |
|
"grad_norm": 0.017533111807113918, |
|
"learning_rate": 1.9340161087325483e-05, |
|
"loss": 0.4814, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.41254752851711024, |
|
"grad_norm": 0.021445851431766402, |
|
"learning_rate": 1.9328246244196117e-05, |
|
"loss": 0.5106, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4144486692015209, |
|
"grad_norm": 0.018645780324267735, |
|
"learning_rate": 1.931622852435478e-05, |
|
"loss": 0.4236, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.41634980988593157, |
|
"grad_norm": 0.02124080488001633, |
|
"learning_rate": 1.930410806033908e-05, |
|
"loss": 0.4447, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.41825095057034223, |
|
"grad_norm": 0.020448657398421086, |
|
"learning_rate": 1.929188498581975e-05, |
|
"loss": 0.4553, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.42015209125475284, |
|
"grad_norm": 0.022998272051918525, |
|
"learning_rate": 1.9279559435599164e-05, |
|
"loss": 0.5504, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4220532319391635, |
|
"grad_norm": 0.01751636845626442, |
|
"learning_rate": 1.926713154560984e-05, |
|
"loss": 0.4087, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.42395437262357416, |
|
"grad_norm": 0.0183705673199705, |
|
"learning_rate": 1.9254601452912972e-05, |
|
"loss": 0.477, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.42585551330798477, |
|
"grad_norm": 0.01700996933235925, |
|
"learning_rate": 1.924196929569688e-05, |
|
"loss": 0.4239, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.42775665399239543, |
|
"grad_norm": 0.028348207596178773, |
|
"learning_rate": 1.922923521327551e-05, |
|
"loss": 0.5081, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4296577946768061, |
|
"grad_norm": 0.020351003380647062, |
|
"learning_rate": 1.9216399346086893e-05, |
|
"loss": 0.4489, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.43155893536121676, |
|
"grad_norm": 0.025094961377472726, |
|
"learning_rate": 1.9203461835691596e-05, |
|
"loss": 0.4519, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.43346007604562736, |
|
"grad_norm": 0.01931118839865135, |
|
"learning_rate": 1.9190422824771158e-05, |
|
"loss": 0.4261, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.435361216730038, |
|
"grad_norm": 0.023998044821670804, |
|
"learning_rate": 1.9177282457126515e-05, |
|
"loss": 0.501, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4372623574144487, |
|
"grad_norm": 0.02017772118245582, |
|
"learning_rate": 1.9164040877676425e-05, |
|
"loss": 0.463, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4391634980988593, |
|
"grad_norm": 0.02035855929839421, |
|
"learning_rate": 1.9150698232455853e-05, |
|
"loss": 0.4087, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.44106463878326996, |
|
"grad_norm": 0.019391301479384226, |
|
"learning_rate": 1.913725466861438e-05, |
|
"loss": 0.536, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4429657794676806, |
|
"grad_norm": 0.02051219750595343, |
|
"learning_rate": 1.9123710334414552e-05, |
|
"loss": 0.4986, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4448669201520912, |
|
"grad_norm": 0.021123597813669577, |
|
"learning_rate": 1.911006537923029e-05, |
|
"loss": 0.489, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4467680608365019, |
|
"grad_norm": 0.01934532070020403, |
|
"learning_rate": 1.9096319953545186e-05, |
|
"loss": 0.5335, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.44866920152091255, |
|
"grad_norm": 0.022232569152747954, |
|
"learning_rate": 1.908247420895089e-05, |
|
"loss": 0.5413, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4505703422053232, |
|
"grad_norm": 0.01918609878743169, |
|
"learning_rate": 1.9068528298145418e-05, |
|
"loss": 0.3835, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4524714828897338, |
|
"grad_norm": 0.022646076049038216, |
|
"learning_rate": 1.905448237493147e-05, |
|
"loss": 0.6124, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4543726235741445, |
|
"grad_norm": 0.0202683182037997, |
|
"learning_rate": 1.9040336594214727e-05, |
|
"loss": 0.5275, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.45627376425855515, |
|
"grad_norm": 0.022029101866042167, |
|
"learning_rate": 1.9026091112002163e-05, |
|
"loss": 0.4243, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45817490494296575, |
|
"grad_norm": 0.020459631583791056, |
|
"learning_rate": 1.90117460854003e-05, |
|
"loss": 0.4901, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4600760456273764, |
|
"grad_norm": 0.025114855573878776, |
|
"learning_rate": 1.8997301672613496e-05, |
|
"loss": 0.4977, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.4619771863117871, |
|
"grad_norm": 0.021614635116058833, |
|
"learning_rate": 1.8982758032942184e-05, |
|
"loss": 0.4515, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.46387832699619774, |
|
"grad_norm": 0.021672300801502467, |
|
"learning_rate": 1.896811532678113e-05, |
|
"loss": 0.4489, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.46577946768060835, |
|
"grad_norm": 0.018022961395727993, |
|
"learning_rate": 1.8953373715617646e-05, |
|
"loss": 0.4495, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.467680608365019, |
|
"grad_norm": 0.018779771734770696, |
|
"learning_rate": 1.893853336202983e-05, |
|
"loss": 0.4212, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4695817490494297, |
|
"grad_norm": 0.020783339016957118, |
|
"learning_rate": 1.892359442968475e-05, |
|
"loss": 0.5068, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4714828897338403, |
|
"grad_norm": 0.01987320833611613, |
|
"learning_rate": 1.8908557083336668e-05, |
|
"loss": 0.5315, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.47338403041825095, |
|
"grad_norm": 0.018697682064789396, |
|
"learning_rate": 1.889342148882519e-05, |
|
"loss": 0.4605, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.4752851711026616, |
|
"grad_norm": 0.021499753416302066, |
|
"learning_rate": 1.8878187813073465e-05, |
|
"loss": 0.4466, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.47718631178707227, |
|
"grad_norm": 0.018547287933737062, |
|
"learning_rate": 1.886285622408633e-05, |
|
"loss": 0.4693, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4790874524714829, |
|
"grad_norm": 0.023084485584254806, |
|
"learning_rate": 1.8847426890948447e-05, |
|
"loss": 0.4594, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.48098859315589354, |
|
"grad_norm": 0.02095991299497145, |
|
"learning_rate": 1.8831899983822475e-05, |
|
"loss": 0.5958, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4828897338403042, |
|
"grad_norm": 0.022676472901591558, |
|
"learning_rate": 1.8816275673947148e-05, |
|
"loss": 0.4794, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4847908745247148, |
|
"grad_norm": 0.022256603233549825, |
|
"learning_rate": 1.8800554133635417e-05, |
|
"loss": 0.4385, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4866920152091255, |
|
"grad_norm": 0.018181221043854088, |
|
"learning_rate": 1.8784735536272543e-05, |
|
"loss": 0.3858, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.48859315589353614, |
|
"grad_norm": 0.022066225412088022, |
|
"learning_rate": 1.8768820056314173e-05, |
|
"loss": 0.4495, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.49049429657794674, |
|
"grad_norm": 0.02200490107058745, |
|
"learning_rate": 1.875280786928444e-05, |
|
"loss": 0.5011, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4923954372623574, |
|
"grad_norm": 0.021271769486431728, |
|
"learning_rate": 1.873669915177399e-05, |
|
"loss": 0.4346, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.49429657794676807, |
|
"grad_norm": 0.02012803137107531, |
|
"learning_rate": 1.872049408143808e-05, |
|
"loss": 0.4196, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.49619771863117873, |
|
"grad_norm": 0.021961152700393033, |
|
"learning_rate": 1.8704192836994578e-05, |
|
"loss": 0.3991, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.49809885931558934, |
|
"grad_norm": 0.024936197092006625, |
|
"learning_rate": 1.8687795598222024e-05, |
|
"loss": 0.5895, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.021262424107645336, |
|
"learning_rate": 1.8671302545957628e-05, |
|
"loss": 0.4397, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5019011406844106, |
|
"grad_norm": 0.02111275635557399, |
|
"learning_rate": 1.8654713862095272e-05, |
|
"loss": 0.4706, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5038022813688213, |
|
"grad_norm": 0.021959234105395655, |
|
"learning_rate": 1.8638029729583524e-05, |
|
"loss": 0.3666, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5057034220532319, |
|
"grad_norm": 0.02200752182868664, |
|
"learning_rate": 1.8621250332423603e-05, |
|
"loss": 0.4428, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5076045627376425, |
|
"grad_norm": 0.025080248739382673, |
|
"learning_rate": 1.860437585566736e-05, |
|
"loss": 0.6006, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5095057034220533, |
|
"grad_norm": 0.022659597179735286, |
|
"learning_rate": 1.8587406485415226e-05, |
|
"loss": 0.4237, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5114068441064639, |
|
"grad_norm": 0.023798174928936905, |
|
"learning_rate": 1.8570342408814173e-05, |
|
"loss": 0.4126, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5133079847908745, |
|
"grad_norm": 0.022256558231383495, |
|
"learning_rate": 1.855318381405564e-05, |
|
"loss": 0.4742, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5152091254752852, |
|
"grad_norm": 0.02111936212093974, |
|
"learning_rate": 1.8535930890373467e-05, |
|
"loss": 0.3881, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5171102661596958, |
|
"grad_norm": 0.02224791336065645, |
|
"learning_rate": 1.8518583828041787e-05, |
|
"loss": 0.3921, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5190114068441065, |
|
"grad_norm": 0.022880761831731673, |
|
"learning_rate": 1.8501142818372964e-05, |
|
"loss": 0.6346, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5209125475285171, |
|
"grad_norm": 0.024343970646870612, |
|
"learning_rate": 1.848360805371544e-05, |
|
"loss": 0.4756, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5228136882129277, |
|
"grad_norm": 0.024351775075405556, |
|
"learning_rate": 1.8465979727451653e-05, |
|
"loss": 0.4734, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5247148288973384, |
|
"grad_norm": 0.018962907375531707, |
|
"learning_rate": 1.8448258033995877e-05, |
|
"loss": 0.3971, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.526615969581749, |
|
"grad_norm": 0.021617768076139313, |
|
"learning_rate": 1.8430443168792087e-05, |
|
"loss": 0.5119, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5285171102661597, |
|
"grad_norm": 0.021857799342184975, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.5116, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5304182509505704, |
|
"grad_norm": 0.02809434456106648, |
|
"learning_rate": 1.8394534710051956e-05, |
|
"loss": 0.4666, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.532319391634981, |
|
"grad_norm": 0.02234090014207455, |
|
"learning_rate": 1.8376441512532617e-05, |
|
"loss": 0.4725, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5342205323193916, |
|
"grad_norm": 0.02451211288371571, |
|
"learning_rate": 1.835825593529492e-05, |
|
"loss": 0.4708, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5361216730038023, |
|
"grad_norm": 0.022641545839139392, |
|
"learning_rate": 1.833997817889878e-05, |
|
"loss": 0.4214, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5380228136882129, |
|
"grad_norm": 0.025872109051734524, |
|
"learning_rate": 1.8321608444920738e-05, |
|
"loss": 0.5036, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5399239543726235, |
|
"grad_norm": 0.023529092183304456, |
|
"learning_rate": 1.830314693595169e-05, |
|
"loss": 0.5425, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5418250950570342, |
|
"grad_norm": 0.024348495219394833, |
|
"learning_rate": 1.828459385559468e-05, |
|
"loss": 0.4342, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5437262357414449, |
|
"grad_norm": 0.02239963896857723, |
|
"learning_rate": 1.8265949408462657e-05, |
|
"loss": 0.4776, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5456273764258555, |
|
"grad_norm": 0.02080669784822377, |
|
"learning_rate": 1.8247213800176192e-05, |
|
"loss": 0.3624, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5475285171102662, |
|
"grad_norm": 0.023617380475480572, |
|
"learning_rate": 1.8228387237361245e-05, |
|
"loss": 0.3952, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5494296577946768, |
|
"grad_norm": 0.024814665778031497, |
|
"learning_rate": 1.8209469927646863e-05, |
|
"loss": 0.4507, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5513307984790875, |
|
"grad_norm": 0.021845348519988903, |
|
"learning_rate": 1.8190462079662897e-05, |
|
"loss": 0.4285, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5532319391634981, |
|
"grad_norm": 0.02184467934463939, |
|
"learning_rate": 1.81713639030377e-05, |
|
"loss": 0.4422, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5551330798479087, |
|
"grad_norm": 0.029176904840741643, |
|
"learning_rate": 1.8152175608395814e-05, |
|
"loss": 0.4321, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5570342205323194, |
|
"grad_norm": 0.026819867766165566, |
|
"learning_rate": 1.8132897407355657e-05, |
|
"loss": 0.4717, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.55893536121673, |
|
"grad_norm": 0.021172044415504698, |
|
"learning_rate": 1.811352951252717e-05, |
|
"loss": 0.4148, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5608365019011406, |
|
"grad_norm": 0.020343820915447078, |
|
"learning_rate": 1.809407213750949e-05, |
|
"loss": 0.5348, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5627376425855514, |
|
"grad_norm": 0.02421850881920752, |
|
"learning_rate": 1.807452549688859e-05, |
|
"loss": 0.4458, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.564638783269962, |
|
"grad_norm": 0.023147892279235603, |
|
"learning_rate": 1.8054889806234906e-05, |
|
"loss": 0.4486, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5665399239543726, |
|
"grad_norm": 0.02395397717021146, |
|
"learning_rate": 1.8035165282100963e-05, |
|
"loss": 0.5499, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5684410646387833, |
|
"grad_norm": 0.02031353995079303, |
|
"learning_rate": 1.8015352142018984e-05, |
|
"loss": 0.5031, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5703422053231939, |
|
"grad_norm": 0.020808578024095383, |
|
"learning_rate": 1.799545060449851e-05, |
|
"loss": 0.467, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5722433460076045, |
|
"grad_norm": 0.019246896542792716, |
|
"learning_rate": 1.797546088902396e-05, |
|
"loss": 0.487, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5741444866920152, |
|
"grad_norm": 0.022104685939284753, |
|
"learning_rate": 1.7955383216052224e-05, |
|
"loss": 0.4903, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5760456273764258, |
|
"grad_norm": 0.02365048405686273, |
|
"learning_rate": 1.7935217807010238e-05, |
|
"loss": 0.4795, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5779467680608364, |
|
"grad_norm": 0.023930921277957873, |
|
"learning_rate": 1.7914964884292543e-05, |
|
"loss": 0.5356, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5798479087452472, |
|
"grad_norm": 0.024823876120292392, |
|
"learning_rate": 1.7894624671258813e-05, |
|
"loss": 0.4837, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5817490494296578, |
|
"grad_norm": 0.019457627346199154, |
|
"learning_rate": 1.7874197392231414e-05, |
|
"loss": 0.3855, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5836501901140685, |
|
"grad_norm": 0.01924926829147994, |
|
"learning_rate": 1.7853683272492913e-05, |
|
"loss": 0.3732, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5855513307984791, |
|
"grad_norm": 0.024460829179782474, |
|
"learning_rate": 1.7833082538283615e-05, |
|
"loss": 0.3846, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5874524714828897, |
|
"grad_norm": 0.020841592567127323, |
|
"learning_rate": 1.7812395416799034e-05, |
|
"loss": 0.6149, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5893536121673004, |
|
"grad_norm": 0.023282119114238924, |
|
"learning_rate": 1.7791622136187422e-05, |
|
"loss": 0.5066, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.591254752851711, |
|
"grad_norm": 0.021526927277188218, |
|
"learning_rate": 1.7770762925547235e-05, |
|
"loss": 0.5292, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5931558935361216, |
|
"grad_norm": 0.021188916531237858, |
|
"learning_rate": 1.7749818014924612e-05, |
|
"loss": 0.4636, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5950570342205324, |
|
"grad_norm": 0.0207773105187197, |
|
"learning_rate": 1.7728787635310828e-05, |
|
"loss": 0.4813, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.596958174904943, |
|
"grad_norm": 0.01936236572719632, |
|
"learning_rate": 1.770767201863976e-05, |
|
"loss": 0.4322, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5988593155893536, |
|
"grad_norm": 0.02286004336607461, |
|
"learning_rate": 1.7686471397785322e-05, |
|
"loss": 0.4369, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6007604562737643, |
|
"grad_norm": 0.022439241037259583, |
|
"learning_rate": 1.76651860065589e-05, |
|
"loss": 0.4423, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6026615969581749, |
|
"grad_norm": 0.024478715442342234, |
|
"learning_rate": 1.764381607970677e-05, |
|
"loss": 0.5333, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6045627376425855, |
|
"grad_norm": 0.025558126559534863, |
|
"learning_rate": 1.7622361852907506e-05, |
|
"loss": 0.524, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.6064638783269962, |
|
"grad_norm": 0.0201537954427199, |
|
"learning_rate": 1.760082356276939e-05, |
|
"loss": 0.3705, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6083650190114068, |
|
"grad_norm": 0.023944335476352968, |
|
"learning_rate": 1.75792014468278e-05, |
|
"loss": 0.5327, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6102661596958175, |
|
"grad_norm": 0.021784871903256658, |
|
"learning_rate": 1.7557495743542586e-05, |
|
"loss": 0.4492, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.6121673003802282, |
|
"grad_norm": 0.021130751816902157, |
|
"learning_rate": 1.7535706692295436e-05, |
|
"loss": 0.4425, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.6140684410646388, |
|
"grad_norm": 0.01834511623783937, |
|
"learning_rate": 1.7513834533387256e-05, |
|
"loss": 0.3811, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.6159695817490495, |
|
"grad_norm": 0.02088833695832576, |
|
"learning_rate": 1.7491879508035488e-05, |
|
"loss": 0.5583, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.6178707224334601, |
|
"grad_norm": 0.0245919479523908, |
|
"learning_rate": 1.746984185837149e-05, |
|
"loss": 0.3603, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6197718631178707, |
|
"grad_norm": 0.020568875402404277, |
|
"learning_rate": 1.744772182743782e-05, |
|
"loss": 0.4601, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6216730038022814, |
|
"grad_norm": 0.025449491000498543, |
|
"learning_rate": 1.7425519659185596e-05, |
|
"loss": 0.5307, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.623574144486692, |
|
"grad_norm": 0.021055420950145058, |
|
"learning_rate": 1.740323559847179e-05, |
|
"loss": 0.494, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6254752851711026, |
|
"grad_norm": 0.02478574829718437, |
|
"learning_rate": 1.738086989105651e-05, |
|
"loss": 0.3949, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.6273764258555133, |
|
"grad_norm": 0.02437888482474706, |
|
"learning_rate": 1.735842278360032e-05, |
|
"loss": 0.451, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.629277566539924, |
|
"grad_norm": 0.02216815353892549, |
|
"learning_rate": 1.73358945236615e-05, |
|
"loss": 0.51, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6311787072243346, |
|
"grad_norm": 0.022424223813542376, |
|
"learning_rate": 1.7313285359693322e-05, |
|
"loss": 0.4929, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6330798479087453, |
|
"grad_norm": 0.022029524884207886, |
|
"learning_rate": 1.7290595541041312e-05, |
|
"loss": 0.4765, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6349809885931559, |
|
"grad_norm": 0.021618750137745387, |
|
"learning_rate": 1.7267825317940494e-05, |
|
"loss": 0.536, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6368821292775665, |
|
"grad_norm": 0.022165231076960286, |
|
"learning_rate": 1.724497494151264e-05, |
|
"loss": 0.4764, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6387832699619772, |
|
"grad_norm": 0.024275845667969007, |
|
"learning_rate": 1.7222044663763484e-05, |
|
"loss": 0.4296, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6406844106463878, |
|
"grad_norm": 0.022498217810086048, |
|
"learning_rate": 1.7199034737579962e-05, |
|
"loss": 0.4807, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6425855513307985, |
|
"grad_norm": 0.022762477103989702, |
|
"learning_rate": 1.7175945416727405e-05, |
|
"loss": 0.6568, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6444866920152091, |
|
"grad_norm": 0.01854881996841053, |
|
"learning_rate": 1.7152776955846768e-05, |
|
"loss": 0.3959, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6463878326996197, |
|
"grad_norm": 0.023310529482788885, |
|
"learning_rate": 1.7129529610451775e-05, |
|
"loss": 0.4058, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6482889733840305, |
|
"grad_norm": 0.020208656516758884, |
|
"learning_rate": 1.7106203636926154e-05, |
|
"loss": 0.3985, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6501901140684411, |
|
"grad_norm": 0.022675781235370007, |
|
"learning_rate": 1.7082799292520767e-05, |
|
"loss": 0.4728, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6520912547528517, |
|
"grad_norm": 0.02260039611252313, |
|
"learning_rate": 1.7059316835350806e-05, |
|
"loss": 0.3194, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6539923954372624, |
|
"grad_norm": 0.021816735403941915, |
|
"learning_rate": 1.7035756524392924e-05, |
|
"loss": 0.3993, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.655893536121673, |
|
"grad_norm": 0.02352607809906742, |
|
"learning_rate": 1.7012118619482376e-05, |
|
"loss": 0.4171, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6577946768060836, |
|
"grad_norm": 0.018309898170808483, |
|
"learning_rate": 1.6988403381310177e-05, |
|
"loss": 0.4375, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6596958174904943, |
|
"grad_norm": 0.023011889903058493, |
|
"learning_rate": 1.696461107142021e-05, |
|
"loss": 0.5271, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6615969581749049, |
|
"grad_norm": 0.022185873665259752, |
|
"learning_rate": 1.6940741952206342e-05, |
|
"loss": 0.4455, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6634980988593155, |
|
"grad_norm": 0.021145487112860135, |
|
"learning_rate": 1.691679628690953e-05, |
|
"loss": 0.4888, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6653992395437263, |
|
"grad_norm": 0.0197337711656993, |
|
"learning_rate": 1.6892774339614927e-05, |
|
"loss": 0.3221, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6673003802281369, |
|
"grad_norm": 0.021128667885235902, |
|
"learning_rate": 1.686867637524896e-05, |
|
"loss": 0.4796, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6692015209125475, |
|
"grad_norm": 0.021485021868132094, |
|
"learning_rate": 1.6844502659576414e-05, |
|
"loss": 0.3866, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6711026615969582, |
|
"grad_norm": 0.021674363310584633, |
|
"learning_rate": 1.6820253459197493e-05, |
|
"loss": 0.4131, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6730038022813688, |
|
"grad_norm": 0.022317690282530892, |
|
"learning_rate": 1.679592904154489e-05, |
|
"loss": 0.4718, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6749049429657795, |
|
"grad_norm": 0.018420112637605318, |
|
"learning_rate": 1.677152967488084e-05, |
|
"loss": 0.3682, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6768060836501901, |
|
"grad_norm": 0.021728437667337796, |
|
"learning_rate": 1.6747055628294134e-05, |
|
"loss": 0.4865, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6787072243346007, |
|
"grad_norm": 0.021879642157367866, |
|
"learning_rate": 1.6722507171697184e-05, |
|
"loss": 0.418, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6806083650190115, |
|
"grad_norm": 0.02041980559071294, |
|
"learning_rate": 1.669788457582304e-05, |
|
"loss": 0.44, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6825095057034221, |
|
"grad_norm": 0.02525296949818493, |
|
"learning_rate": 1.6673188112222394e-05, |
|
"loss": 0.4058, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6844106463878327, |
|
"grad_norm": 0.027087654040755537, |
|
"learning_rate": 1.6648418053260585e-05, |
|
"loss": 0.4206, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6863117870722434, |
|
"grad_norm": 0.02064549274424258, |
|
"learning_rate": 1.6623574672114596e-05, |
|
"loss": 0.5434, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.688212927756654, |
|
"grad_norm": 0.025193009760092363, |
|
"learning_rate": 1.6598658242770054e-05, |
|
"loss": 0.5539, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6901140684410646, |
|
"grad_norm": 0.023006148706161383, |
|
"learning_rate": 1.6573669040018202e-05, |
|
"loss": 0.4982, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6920152091254753, |
|
"grad_norm": 0.02394923199036728, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.4504, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6939163498098859, |
|
"grad_norm": 0.024973727525077836, |
|
"learning_rate": 1.652347341746737e-05, |
|
"loss": 0.4296, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6958174904942965, |
|
"grad_norm": 0.02229406442674028, |
|
"learning_rate": 1.6498267551251618e-05, |
|
"loss": 0.6645, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6977186311787072, |
|
"grad_norm": 0.02196890781900034, |
|
"learning_rate": 1.6472990018788884e-05, |
|
"loss": 0.4139, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6996197718631179, |
|
"grad_norm": 0.01864786240108333, |
|
"learning_rate": 1.644764109885284e-05, |
|
"loss": 0.3669, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7015209125475285, |
|
"grad_norm": 0.023341226120807423, |
|
"learning_rate": 1.642222107100446e-05, |
|
"loss": 0.4633, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7034220532319392, |
|
"grad_norm": 0.01791373151822567, |
|
"learning_rate": 1.6396730215588913e-05, |
|
"loss": 0.432, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7053231939163498, |
|
"grad_norm": 0.023304889020227138, |
|
"learning_rate": 1.6371168813732514e-05, |
|
"loss": 0.4637, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.7072243346007605, |
|
"grad_norm": 0.02409824499740041, |
|
"learning_rate": 1.6345537147339578e-05, |
|
"loss": 0.451, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.7091254752851711, |
|
"grad_norm": 0.023503094615128773, |
|
"learning_rate": 1.6319835499089358e-05, |
|
"loss": 0.5037, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.7110266159695817, |
|
"grad_norm": 0.02100086883115099, |
|
"learning_rate": 1.6294064152432878e-05, |
|
"loss": 0.3789, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.7129277566539924, |
|
"grad_norm": 0.02271881779830291, |
|
"learning_rate": 1.626822339158985e-05, |
|
"loss": 0.4531, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.714828897338403, |
|
"grad_norm": 0.02500592478382567, |
|
"learning_rate": 1.6242313501545522e-05, |
|
"loss": 0.423, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.7167300380228137, |
|
"grad_norm": 0.02235032483913223, |
|
"learning_rate": 1.621633476804752e-05, |
|
"loss": 0.4618, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.7186311787072244, |
|
"grad_norm": 0.02151840511807487, |
|
"learning_rate": 1.6190287477602716e-05, |
|
"loss": 0.487, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.720532319391635, |
|
"grad_norm": 0.025756572129519126, |
|
"learning_rate": 1.6164171917474078e-05, |
|
"loss": 0.4099, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.7224334600760456, |
|
"grad_norm": 0.02018147798619793, |
|
"learning_rate": 1.6137988375677466e-05, |
|
"loss": 0.3906, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7243346007604563, |
|
"grad_norm": 0.020746327994583716, |
|
"learning_rate": 1.6111737140978495e-05, |
|
"loss": 0.5042, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7262357414448669, |
|
"grad_norm": 0.02087825161781339, |
|
"learning_rate": 1.6085418502889315e-05, |
|
"loss": 0.4793, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7281368821292775, |
|
"grad_norm": 0.02597453558637949, |
|
"learning_rate": 1.6059032751665454e-05, |
|
"loss": 0.5275, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7300380228136882, |
|
"grad_norm": 0.023924733808265878, |
|
"learning_rate": 1.6032580178302585e-05, |
|
"loss": 0.4981, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7319391634980988, |
|
"grad_norm": 0.05994054536254508, |
|
"learning_rate": 1.600606107453333e-05, |
|
"loss": 0.5581, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7338403041825095, |
|
"grad_norm": 0.022744320177798528, |
|
"learning_rate": 1.597947573282405e-05, |
|
"loss": 0.5067, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7357414448669202, |
|
"grad_norm": 0.02367987566710115, |
|
"learning_rate": 1.5952824446371608e-05, |
|
"loss": 0.4065, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7376425855513308, |
|
"grad_norm": 0.022780020712552165, |
|
"learning_rate": 1.592610750910014e-05, |
|
"loss": 0.3968, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7395437262357415, |
|
"grad_norm": 0.02119299594456647, |
|
"learning_rate": 1.589932521565781e-05, |
|
"loss": 0.5469, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7414448669201521, |
|
"grad_norm": 0.019341262557617835, |
|
"learning_rate": 1.587247786141358e-05, |
|
"loss": 0.4319, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7433460076045627, |
|
"grad_norm": 0.02055570641111801, |
|
"learning_rate": 1.5845565742453906e-05, |
|
"loss": 0.4241, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7452471482889734, |
|
"grad_norm": 0.024031176474825826, |
|
"learning_rate": 1.581858915557953e-05, |
|
"loss": 0.4805, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.747148288973384, |
|
"grad_norm": 0.020151689729205665, |
|
"learning_rate": 1.5791548398302167e-05, |
|
"loss": 0.5204, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7490494296577946, |
|
"grad_norm": 0.022487690028723358, |
|
"learning_rate": 1.5764443768841234e-05, |
|
"loss": 0.4249, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7509505703422054, |
|
"grad_norm": 0.01982347263114696, |
|
"learning_rate": 1.5737275566120577e-05, |
|
"loss": 0.406, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.752851711026616, |
|
"grad_norm": 0.02086227032375837, |
|
"learning_rate": 1.5710044089765144e-05, |
|
"loss": 0.4371, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7547528517110266, |
|
"grad_norm": 0.02108925366040013, |
|
"learning_rate": 1.5682749640097708e-05, |
|
"loss": 0.442, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7566539923954373, |
|
"grad_norm": 0.023564638230995968, |
|
"learning_rate": 1.565539251813554e-05, |
|
"loss": 0.5355, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7585551330798479, |
|
"grad_norm": 0.020789849453501902, |
|
"learning_rate": 1.5627973025587093e-05, |
|
"loss": 0.4253, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7604562737642585, |
|
"grad_norm": 0.023234777071662778, |
|
"learning_rate": 1.560049146484868e-05, |
|
"loss": 0.4928, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7623574144486692, |
|
"grad_norm": 0.022799909007585953, |
|
"learning_rate": 1.5572948139001128e-05, |
|
"loss": 0.4247, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7642585551330798, |
|
"grad_norm": 0.021289848782095718, |
|
"learning_rate": 1.5545343351806443e-05, |
|
"loss": 0.508, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7661596958174905, |
|
"grad_norm": 0.021905746023760754, |
|
"learning_rate": 1.551767740770446e-05, |
|
"loss": 0.5947, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7680608365019012, |
|
"grad_norm": 0.02386151968622635, |
|
"learning_rate": 1.5489950611809484e-05, |
|
"loss": 0.4853, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7699619771863118, |
|
"grad_norm": 0.02351691541197354, |
|
"learning_rate": 1.5462163269906928e-05, |
|
"loss": 0.457, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7718631178707225, |
|
"grad_norm": 0.02703946419471593, |
|
"learning_rate": 1.5434315688449924e-05, |
|
"loss": 0.429, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7737642585551331, |
|
"grad_norm": 0.021253357245547846, |
|
"learning_rate": 1.5406408174555978e-05, |
|
"loss": 0.4011, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7756653992395437, |
|
"grad_norm": 0.021592073586737193, |
|
"learning_rate": 1.5378441036003543e-05, |
|
"loss": 0.3939, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7775665399239544, |
|
"grad_norm": 0.023635965388808643, |
|
"learning_rate": 1.535041458122865e-05, |
|
"loss": 0.4447, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.779467680608365, |
|
"grad_norm": 0.021614797502256346, |
|
"learning_rate": 1.5322329119321508e-05, |
|
"loss": 0.3896, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7813688212927756, |
|
"grad_norm": 0.029020909400450004, |
|
"learning_rate": 1.529418496002308e-05, |
|
"loss": 0.4969, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7832699619771863, |
|
"grad_norm": 0.025667462527941966, |
|
"learning_rate": 1.5265982413721662e-05, |
|
"loss": 0.4258, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.785171102661597, |
|
"grad_norm": 0.0219880878115105, |
|
"learning_rate": 1.5237721791449497e-05, |
|
"loss": 0.4472, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7870722433460076, |
|
"grad_norm": 0.020681701545778697, |
|
"learning_rate": 1.5209403404879305e-05, |
|
"loss": 0.5025, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7889733840304183, |
|
"grad_norm": 0.02569506503249528, |
|
"learning_rate": 1.5181027566320858e-05, |
|
"loss": 0.4951, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7908745247148289, |
|
"grad_norm": 0.02403074940855541, |
|
"learning_rate": 1.5152594588717544e-05, |
|
"loss": 0.4516, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7927756653992395, |
|
"grad_norm": 0.03162539286762685, |
|
"learning_rate": 1.5124104785642909e-05, |
|
"loss": 0.4576, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7946768060836502, |
|
"grad_norm": 0.02780725707898336, |
|
"learning_rate": 1.5095558471297196e-05, |
|
"loss": 0.4441, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7965779467680608, |
|
"grad_norm": 0.021938480615319306, |
|
"learning_rate": 1.5066955960503893e-05, |
|
"loss": 0.4821, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7984790874524715, |
|
"grad_norm": 0.022669187205589948, |
|
"learning_rate": 1.5038297568706244e-05, |
|
"loss": 0.3651, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8003802281368821, |
|
"grad_norm": 0.029651223858315957, |
|
"learning_rate": 1.5009583611963772e-05, |
|
"loss": 0.3709, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8022813688212928, |
|
"grad_norm": 0.020262767284707564, |
|
"learning_rate": 1.4980814406948806e-05, |
|
"loss": 0.3868, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8041825095057035, |
|
"grad_norm": 0.0313851795438847, |
|
"learning_rate": 1.4951990270942991e-05, |
|
"loss": 0.4345, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.8060836501901141, |
|
"grad_norm": 0.026505025778599832, |
|
"learning_rate": 1.492311152183376e-05, |
|
"loss": 0.4845, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.8079847908745247, |
|
"grad_norm": 0.024933250165890146, |
|
"learning_rate": 1.4894178478110856e-05, |
|
"loss": 0.5436, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8098859315589354, |
|
"grad_norm": 0.02668529314177586, |
|
"learning_rate": 1.4865191458862816e-05, |
|
"loss": 0.4722, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.811787072243346, |
|
"grad_norm": 0.023968647430953227, |
|
"learning_rate": 1.4836150783773442e-05, |
|
"loss": 0.4006, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.8136882129277566, |
|
"grad_norm": 0.028240700565109782, |
|
"learning_rate": 1.4807056773118276e-05, |
|
"loss": 0.4548, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.8155893536121673, |
|
"grad_norm": 0.024113176209241565, |
|
"learning_rate": 1.4777909747761085e-05, |
|
"loss": 0.4394, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.8174904942965779, |
|
"grad_norm": 0.023240692257416155, |
|
"learning_rate": 1.4748710029150296e-05, |
|
"loss": 0.5083, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8193916349809885, |
|
"grad_norm": 0.022014547992322716, |
|
"learning_rate": 1.4719457939315468e-05, |
|
"loss": 0.3793, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.8212927756653993, |
|
"grad_norm": 0.025056756565885647, |
|
"learning_rate": 1.4690153800863743e-05, |
|
"loss": 0.4505, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.8231939163498099, |
|
"grad_norm": 0.024078196797270524, |
|
"learning_rate": 1.4660797936976278e-05, |
|
"loss": 0.5511, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.8250950570342205, |
|
"grad_norm": 0.023506365080185394, |
|
"learning_rate": 1.4631390671404682e-05, |
|
"loss": 0.486, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.8269961977186312, |
|
"grad_norm": 0.02656548640736279, |
|
"learning_rate": 1.460193232846745e-05, |
|
"loss": 0.4877, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8288973384030418, |
|
"grad_norm": 0.02521957317818189, |
|
"learning_rate": 1.4572423233046386e-05, |
|
"loss": 0.4654, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8307984790874525, |
|
"grad_norm": 0.02696732985436355, |
|
"learning_rate": 1.4542863710583022e-05, |
|
"loss": 0.5085, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8326996197718631, |
|
"grad_norm": 0.024288469099333042, |
|
"learning_rate": 1.4513254087075015e-05, |
|
"loss": 0.5373, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8346007604562737, |
|
"grad_norm": 0.023081137704624097, |
|
"learning_rate": 1.4483594689072571e-05, |
|
"loss": 0.4279, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8365019011406845, |
|
"grad_norm": 0.02353653197134096, |
|
"learning_rate": 1.4453885843674837e-05, |
|
"loss": 0.445, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8384030418250951, |
|
"grad_norm": 0.021931438665748156, |
|
"learning_rate": 1.4424127878526278e-05, |
|
"loss": 0.372, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.8403041825095057, |
|
"grad_norm": 0.022816959298283885, |
|
"learning_rate": 1.4394321121813093e-05, |
|
"loss": 0.4521, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8422053231939164, |
|
"grad_norm": 0.022654432569100512, |
|
"learning_rate": 1.436446590225957e-05, |
|
"loss": 0.4813, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.844106463878327, |
|
"grad_norm": 0.022714918777658636, |
|
"learning_rate": 1.433456254912447e-05, |
|
"loss": 0.4993, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8460076045627376, |
|
"grad_norm": 0.02442999457743802, |
|
"learning_rate": 1.4304611392197399e-05, |
|
"loss": 0.4246, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8479087452471483, |
|
"grad_norm": 0.021928052281800055, |
|
"learning_rate": 1.427461276179517e-05, |
|
"loss": 0.4304, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8498098859315589, |
|
"grad_norm": 0.02141867354122271, |
|
"learning_rate": 1.4244566988758152e-05, |
|
"loss": 0.3977, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8517110266159695, |
|
"grad_norm": 0.02458743104943235, |
|
"learning_rate": 1.4214474404446633e-05, |
|
"loss": 0.4378, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8536121673003803, |
|
"grad_norm": 0.019632792154314082, |
|
"learning_rate": 1.4184335340737158e-05, |
|
"loss": 0.3955, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8555133079847909, |
|
"grad_norm": 0.02484904211495459, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.419, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8574144486692015, |
|
"grad_norm": 0.02272220324215502, |
|
"learning_rate": 1.4123919105189836e-05, |
|
"loss": 0.4037, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8593155893536122, |
|
"grad_norm": 0.026986410749335596, |
|
"learning_rate": 1.4093642599653406e-05, |
|
"loss": 0.5189, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8612167300380228, |
|
"grad_norm": 0.02094686405878515, |
|
"learning_rate": 1.40633209473145e-05, |
|
"loss": 0.4121, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8631178707224335, |
|
"grad_norm": 0.02805756954495035, |
|
"learning_rate": 1.4032954482575938e-05, |
|
"loss": 0.4802, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8650190114068441, |
|
"grad_norm": 0.024880650473757853, |
|
"learning_rate": 1.4002543540334766e-05, |
|
"loss": 0.4679, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8669201520912547, |
|
"grad_norm": 0.022025534936640348, |
|
"learning_rate": 1.3972088455978537e-05, |
|
"loss": 0.4339, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8688212927756654, |
|
"grad_norm": 0.02271172419542242, |
|
"learning_rate": 1.3941589565381635e-05, |
|
"loss": 0.4472, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.870722433460076, |
|
"grad_norm": 0.02456897431856478, |
|
"learning_rate": 1.391104720490156e-05, |
|
"loss": 0.5024, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8726235741444867, |
|
"grad_norm": 0.02458479535995488, |
|
"learning_rate": 1.3880461711375224e-05, |
|
"loss": 0.5078, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8745247148288974, |
|
"grad_norm": 0.02440410211111402, |
|
"learning_rate": 1.3849833422115221e-05, |
|
"loss": 0.4871, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.876425855513308, |
|
"grad_norm": 0.021736397843613076, |
|
"learning_rate": 1.3819162674906134e-05, |
|
"loss": 0.4741, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8783269961977186, |
|
"grad_norm": 0.020438052604190595, |
|
"learning_rate": 1.378844980800078e-05, |
|
"loss": 0.3974, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8802281368821293, |
|
"grad_norm": 0.022715925959570137, |
|
"learning_rate": 1.3757695160116502e-05, |
|
"loss": 0.434, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8821292775665399, |
|
"grad_norm": 0.024729238859494334, |
|
"learning_rate": 1.3726899070431423e-05, |
|
"loss": 0.4243, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8840304182509505, |
|
"grad_norm": 0.021886379292725234, |
|
"learning_rate": 1.3696061878580707e-05, |
|
"loss": 0.4074, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8859315589353612, |
|
"grad_norm": 0.022623550123516575, |
|
"learning_rate": 1.3665183924652817e-05, |
|
"loss": 0.4924, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8878326996197718, |
|
"grad_norm": 0.021674597567570526, |
|
"learning_rate": 1.3634265549185755e-05, |
|
"loss": 0.4528, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8897338403041825, |
|
"grad_norm": 0.021414986192180713, |
|
"learning_rate": 1.3603307093163319e-05, |
|
"loss": 0.3658, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8916349809885932, |
|
"grad_norm": 0.021523850295941727, |
|
"learning_rate": 1.3572308898011328e-05, |
|
"loss": 0.4219, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8935361216730038, |
|
"grad_norm": 0.022491621170717423, |
|
"learning_rate": 1.3541271305593878e-05, |
|
"loss": 0.5062, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8954372623574145, |
|
"grad_norm": 0.023578729751256165, |
|
"learning_rate": 1.3510194658209547e-05, |
|
"loss": 0.4714, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8973384030418251, |
|
"grad_norm": 0.02479993313265921, |
|
"learning_rate": 1.3479079298587634e-05, |
|
"loss": 0.4418, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8992395437262357, |
|
"grad_norm": 0.024770693363266767, |
|
"learning_rate": 1.3447925569884374e-05, |
|
"loss": 0.4312, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.9011406844106464, |
|
"grad_norm": 0.02503485087771604, |
|
"learning_rate": 1.3416733815679166e-05, |
|
"loss": 0.4861, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.903041825095057, |
|
"grad_norm": 0.02307394045917917, |
|
"learning_rate": 1.3385504379970764e-05, |
|
"loss": 0.4166, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9049429657794676, |
|
"grad_norm": 0.025894998951279422, |
|
"learning_rate": 1.3354237607173494e-05, |
|
"loss": 0.5021, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.9068441064638784, |
|
"grad_norm": 0.02128176686440997, |
|
"learning_rate": 1.3322933842113457e-05, |
|
"loss": 0.4473, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.908745247148289, |
|
"grad_norm": 0.023893280860003332, |
|
"learning_rate": 1.3291593430024727e-05, |
|
"loss": 0.4755, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.9106463878326996, |
|
"grad_norm": 0.02449762016338503, |
|
"learning_rate": 1.3260216716545534e-05, |
|
"loss": 0.3953, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.9125475285171103, |
|
"grad_norm": 0.021110069746746216, |
|
"learning_rate": 1.3228804047714462e-05, |
|
"loss": 0.3212, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9144486692015209, |
|
"grad_norm": 0.02299821393531202, |
|
"learning_rate": 1.319735576996663e-05, |
|
"loss": 0.4059, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.9163498098859315, |
|
"grad_norm": 0.022805233387983515, |
|
"learning_rate": 1.3165872230129869e-05, |
|
"loss": 0.4378, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.9182509505703422, |
|
"grad_norm": 0.022483195363811, |
|
"learning_rate": 1.3134353775420895e-05, |
|
"loss": 0.4952, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.9201520912547528, |
|
"grad_norm": 0.027980344075661134, |
|
"learning_rate": 1.3102800753441488e-05, |
|
"loss": 0.453, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.9220532319391636, |
|
"grad_norm": 0.022320233170224718, |
|
"learning_rate": 1.3071213512174655e-05, |
|
"loss": 0.4042, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9239543726235742, |
|
"grad_norm": 0.027021346941736732, |
|
"learning_rate": 1.3039592399980785e-05, |
|
"loss": 0.4668, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9258555133079848, |
|
"grad_norm": 0.02482265015460327, |
|
"learning_rate": 1.3007937765593818e-05, |
|
"loss": 0.5227, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.9277566539923955, |
|
"grad_norm": 0.02357348090540923, |
|
"learning_rate": 1.2976249958117395e-05, |
|
"loss": 0.4767, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9296577946768061, |
|
"grad_norm": 0.023443354906239822, |
|
"learning_rate": 1.2944529327021002e-05, |
|
"loss": 0.405, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.9315589353612167, |
|
"grad_norm": 0.02179663602896819, |
|
"learning_rate": 1.291277622213612e-05, |
|
"loss": 0.4287, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9334600760456274, |
|
"grad_norm": 0.023454518004330533, |
|
"learning_rate": 1.2880990993652379e-05, |
|
"loss": 0.3772, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.935361216730038, |
|
"grad_norm": 0.02290314421630447, |
|
"learning_rate": 1.2849173992113669e-05, |
|
"loss": 0.4915, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.9372623574144486, |
|
"grad_norm": 0.023222346475490627, |
|
"learning_rate": 1.2817325568414299e-05, |
|
"loss": 0.4789, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.9391634980988594, |
|
"grad_norm": 0.02409598567792761, |
|
"learning_rate": 1.2785446073795118e-05, |
|
"loss": 0.4782, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.94106463878327, |
|
"grad_norm": 0.022615260816886574, |
|
"learning_rate": 1.2753535859839638e-05, |
|
"loss": 0.433, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9429657794676806, |
|
"grad_norm": 0.025611694327824334, |
|
"learning_rate": 1.272159527847016e-05, |
|
"loss": 0.4573, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.9448669201520913, |
|
"grad_norm": 0.025768369769655665, |
|
"learning_rate": 1.2689624681943897e-05, |
|
"loss": 0.462, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.9467680608365019, |
|
"grad_norm": 0.02301862416038207, |
|
"learning_rate": 1.2657624422849077e-05, |
|
"loss": 0.4171, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9486692015209125, |
|
"grad_norm": 0.022425187287103614, |
|
"learning_rate": 1.2625594854101066e-05, |
|
"loss": 0.4152, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9505703422053232, |
|
"grad_norm": 0.02317505253670741, |
|
"learning_rate": 1.2593536328938471e-05, |
|
"loss": 0.4775, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9524714828897338, |
|
"grad_norm": 0.022725516030892436, |
|
"learning_rate": 1.2561449200919253e-05, |
|
"loss": 0.5123, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9543726235741445, |
|
"grad_norm": 0.02563269287207282, |
|
"learning_rate": 1.2529333823916807e-05, |
|
"loss": 0.4626, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9562737642585551, |
|
"grad_norm": 0.02440201438687382, |
|
"learning_rate": 1.2497190552116082e-05, |
|
"loss": 0.5584, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9581749049429658, |
|
"grad_norm": 0.025258409411193533, |
|
"learning_rate": 1.2465019740009662e-05, |
|
"loss": 0.4721, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.9600760456273765, |
|
"grad_norm": 0.02364236622754602, |
|
"learning_rate": 1.2432821742393854e-05, |
|
"loss": 0.4637, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9619771863117871, |
|
"grad_norm": 0.022744877336730626, |
|
"learning_rate": 1.2400596914364792e-05, |
|
"loss": 0.4163, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9638783269961977, |
|
"grad_norm": 0.02512981829788003, |
|
"learning_rate": 1.2368345611314508e-05, |
|
"loss": 0.4209, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9657794676806084, |
|
"grad_norm": 0.02396214976378719, |
|
"learning_rate": 1.2336068188927002e-05, |
|
"loss": 0.4876, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.967680608365019, |
|
"grad_norm": 0.024846858848635564, |
|
"learning_rate": 1.2303765003174342e-05, |
|
"loss": 0.4766, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9695817490494296, |
|
"grad_norm": 0.024647771640303358, |
|
"learning_rate": 1.2271436410312727e-05, |
|
"loss": 0.4745, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9714828897338403, |
|
"grad_norm": 0.028460909629351355, |
|
"learning_rate": 1.2239082766878557e-05, |
|
"loss": 0.443, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.973384030418251, |
|
"grad_norm": 0.026842716136457133, |
|
"learning_rate": 1.2206704429684504e-05, |
|
"loss": 0.416, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9752851711026616, |
|
"grad_norm": 0.023577587200559337, |
|
"learning_rate": 1.2174301755815572e-05, |
|
"loss": 0.4547, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9771863117870723, |
|
"grad_norm": 0.02434178157072622, |
|
"learning_rate": 1.2141875102625166e-05, |
|
"loss": 0.4154, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9790874524714829, |
|
"grad_norm": 0.022686551363613607, |
|
"learning_rate": 1.2109424827731144e-05, |
|
"loss": 0.4248, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9809885931558935, |
|
"grad_norm": 0.027765889183457126, |
|
"learning_rate": 1.2076951289011884e-05, |
|
"loss": 0.4786, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9828897338403042, |
|
"grad_norm": 0.026014404370096878, |
|
"learning_rate": 1.204445484460232e-05, |
|
"loss": 0.3835, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9847908745247148, |
|
"grad_norm": 0.02536068801523813, |
|
"learning_rate": 1.2011935852890004e-05, |
|
"loss": 0.4766, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9866920152091255, |
|
"grad_norm": 0.025270660975692583, |
|
"learning_rate": 1.1979394672511156e-05, |
|
"loss": 0.4271, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9885931558935361, |
|
"grad_norm": 0.023163661163739562, |
|
"learning_rate": 1.19468316623467e-05, |
|
"loss": 0.4191, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9904942965779467, |
|
"grad_norm": 0.025133903922973964, |
|
"learning_rate": 1.1914247181518312e-05, |
|
"loss": 0.453, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9923954372623575, |
|
"grad_norm": 0.022840078992228126, |
|
"learning_rate": 1.1881641589384456e-05, |
|
"loss": 0.3876, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9942965779467681, |
|
"grad_norm": 0.02952192013190572, |
|
"learning_rate": 1.1849015245536424e-05, |
|
"loss": 0.4796, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9961977186311787, |
|
"grad_norm": 0.0255544735356746, |
|
"learning_rate": 1.1816368509794365e-05, |
|
"loss": 0.4575, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9980988593155894, |
|
"grad_norm": 0.025426553898411263, |
|
"learning_rate": 1.1783701742203326e-05, |
|
"loss": 0.4142, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.02487998894271571, |
|
"learning_rate": 1.1751015303029272e-05, |
|
"loss": 0.4154, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.49419376254081726, |
|
"eval_runtime": 2.0606, |
|
"eval_samples_per_second": 2.912, |
|
"eval_steps_per_second": 0.485, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0019011406844107, |
|
"grad_norm": 0.026839867865076257, |
|
"learning_rate": 1.1718309552755118e-05, |
|
"loss": 0.5472, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0038022813688212, |
|
"grad_norm": 0.02475665144540004, |
|
"learning_rate": 1.1685584852076746e-05, |
|
"loss": 0.4676, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.005703422053232, |
|
"grad_norm": 0.02330183147946208, |
|
"learning_rate": 1.1652841561899042e-05, |
|
"loss": 0.3749, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.0076045627376427, |
|
"grad_norm": 0.025076421971174816, |
|
"learning_rate": 1.1620080043331901e-05, |
|
"loss": 0.5404, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0095057034220531, |
|
"grad_norm": 0.02418624605710623, |
|
"learning_rate": 1.1587300657686254e-05, |
|
"loss": 0.5074, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.0114068441064639, |
|
"grad_norm": 0.02479812200820215, |
|
"learning_rate": 1.1554503766470069e-05, |
|
"loss": 0.4338, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.0133079847908746, |
|
"grad_norm": 0.024884250210395493, |
|
"learning_rate": 1.1521689731384391e-05, |
|
"loss": 0.4654, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.015209125475285, |
|
"grad_norm": 0.026548403950977323, |
|
"learning_rate": 1.1488858914319321e-05, |
|
"loss": 0.4087, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.0171102661596958, |
|
"grad_norm": 0.025613189908928795, |
|
"learning_rate": 1.1456011677350052e-05, |
|
"loss": 0.5427, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.0190114068441065, |
|
"grad_norm": 0.023186535836589698, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.4261, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.020912547528517, |
|
"grad_norm": 0.025681515076642358, |
|
"learning_rate": 1.1390269392901096e-05, |
|
"loss": 0.4077, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.0228136882129277, |
|
"grad_norm": 0.021744852880452904, |
|
"learning_rate": 1.1357375070461241e-05, |
|
"loss": 0.4751, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.0247148288973384, |
|
"grad_norm": 0.028443593252172297, |
|
"learning_rate": 1.1324465778188846e-05, |
|
"loss": 0.4456, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.026615969581749, |
|
"grad_norm": 0.03000660451469647, |
|
"learning_rate": 1.1291541879024568e-05, |
|
"loss": 0.5253, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.0285171102661597, |
|
"grad_norm": 0.02368491275040552, |
|
"learning_rate": 1.1258603736070145e-05, |
|
"loss": 0.4023, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.0304182509505704, |
|
"grad_norm": 0.02836407575641823, |
|
"learning_rate": 1.1225651712584413e-05, |
|
"loss": 0.5033, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.032319391634981, |
|
"grad_norm": 0.022941031264003417, |
|
"learning_rate": 1.1192686171979288e-05, |
|
"loss": 0.3946, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.0342205323193916, |
|
"grad_norm": 0.023139941818018577, |
|
"learning_rate": 1.1159707477815756e-05, |
|
"loss": 0.4706, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.0361216730038023, |
|
"grad_norm": 0.023983638303565175, |
|
"learning_rate": 1.1126715993799875e-05, |
|
"loss": 0.5789, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.038022813688213, |
|
"grad_norm": 0.026461190964423308, |
|
"learning_rate": 1.1093712083778748e-05, |
|
"loss": 0.3951, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.0399239543726235, |
|
"grad_norm": 0.02236185807063053, |
|
"learning_rate": 1.1060696111736515e-05, |
|
"loss": 0.3543, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.0418250950570342, |
|
"grad_norm": 0.02192591241120657, |
|
"learning_rate": 1.1027668441790358e-05, |
|
"loss": 0.4706, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.043726235741445, |
|
"grad_norm": 0.02426010800732934, |
|
"learning_rate": 1.099462943818646e-05, |
|
"loss": 0.4082, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.0456273764258555, |
|
"grad_norm": 0.02508083322632236, |
|
"learning_rate": 1.0961579465295987e-05, |
|
"loss": 0.5272, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0475285171102662, |
|
"grad_norm": 0.03202071310568031, |
|
"learning_rate": 1.0928518887611099e-05, |
|
"loss": 0.4791, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.049429657794677, |
|
"grad_norm": 0.02210572731788688, |
|
"learning_rate": 1.0895448069740902e-05, |
|
"loss": 0.4891, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.0513307984790874, |
|
"grad_norm": 0.023820693600161792, |
|
"learning_rate": 1.0862367376407433e-05, |
|
"loss": 0.4378, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.053231939163498, |
|
"grad_norm": 0.021388417872325642, |
|
"learning_rate": 1.0829277172441648e-05, |
|
"loss": 0.4474, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.0551330798479088, |
|
"grad_norm": 0.026332353861382985, |
|
"learning_rate": 1.0796177822779384e-05, |
|
"loss": 0.4167, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0570342205323193, |
|
"grad_norm": 0.028186191098638017, |
|
"learning_rate": 1.0763069692457346e-05, |
|
"loss": 0.3917, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.05893536121673, |
|
"grad_norm": 0.026368775979214235, |
|
"learning_rate": 1.0729953146609076e-05, |
|
"loss": 0.4001, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.0608365019011408, |
|
"grad_norm": 0.0265502482480294, |
|
"learning_rate": 1.0696828550460928e-05, |
|
"loss": 0.4659, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.0627376425855513, |
|
"grad_norm": 0.02622664842688001, |
|
"learning_rate": 1.0663696269328034e-05, |
|
"loss": 0.4399, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.064638783269962, |
|
"grad_norm": 0.02349997729830601, |
|
"learning_rate": 1.0630556668610286e-05, |
|
"loss": 0.4721, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0665399239543727, |
|
"grad_norm": 0.02834767943397841, |
|
"learning_rate": 1.059741011378829e-05, |
|
"loss": 0.5075, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.0684410646387832, |
|
"grad_norm": 0.02288613867623036, |
|
"learning_rate": 1.0564256970419367e-05, |
|
"loss": 0.4565, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.070342205323194, |
|
"grad_norm": 0.023688637706669773, |
|
"learning_rate": 1.0531097604133473e-05, |
|
"loss": 0.4472, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.0722433460076046, |
|
"grad_norm": 0.024490569664163846, |
|
"learning_rate": 1.0497932380629207e-05, |
|
"loss": 0.5221, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0741444866920151, |
|
"grad_norm": 0.02782768534592333, |
|
"learning_rate": 1.0464761665669771e-05, |
|
"loss": 0.4004, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.0760456273764258, |
|
"grad_norm": 0.02669839048402323, |
|
"learning_rate": 1.0431585825078916e-05, |
|
"loss": 0.3402, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.0779467680608366, |
|
"grad_norm": 0.02651283388836949, |
|
"learning_rate": 1.0398405224736927e-05, |
|
"loss": 0.4832, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.079847908745247, |
|
"grad_norm": 0.026449286845883088, |
|
"learning_rate": 1.0365220230576592e-05, |
|
"loss": 0.4047, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.0817490494296578, |
|
"grad_norm": 0.025418648342092183, |
|
"learning_rate": 1.0332031208579133e-05, |
|
"loss": 0.4801, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.0836501901140685, |
|
"grad_norm": 0.025412976006421107, |
|
"learning_rate": 1.0298838524770212e-05, |
|
"loss": 0.4752, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.085551330798479, |
|
"grad_norm": 0.025562260205010442, |
|
"learning_rate": 1.0265642545215872e-05, |
|
"loss": 0.457, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.0874524714828897, |
|
"grad_norm": 0.02643263881346091, |
|
"learning_rate": 1.0232443636018502e-05, |
|
"loss": 0.4357, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.0893536121673004, |
|
"grad_norm": 0.024528515527150255, |
|
"learning_rate": 1.0199242163312794e-05, |
|
"loss": 0.4794, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.091254752851711, |
|
"grad_norm": 0.026385215484675005, |
|
"learning_rate": 1.0166038493261723e-05, |
|
"loss": 0.4806, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.0931558935361216, |
|
"grad_norm": 0.022033026900315677, |
|
"learning_rate": 1.013283299205249e-05, |
|
"loss": 0.3609, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.0950570342205324, |
|
"grad_norm": 0.023678677060218277, |
|
"learning_rate": 1.0099626025892491e-05, |
|
"loss": 0.4579, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.0969581749049429, |
|
"grad_norm": 0.02487159791018364, |
|
"learning_rate": 1.0066417961005283e-05, |
|
"loss": 0.4429, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.0988593155893536, |
|
"grad_norm": 0.028657863587199493, |
|
"learning_rate": 1.0033209163626539e-05, |
|
"loss": 0.4093, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.1007604562737643, |
|
"grad_norm": 0.024174264086675668, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3613, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.102661596958175, |
|
"grad_norm": 0.02551921343391446, |
|
"learning_rate": 9.966790836373465e-06, |
|
"loss": 0.5205, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.1045627376425855, |
|
"grad_norm": 0.028485515076974856, |
|
"learning_rate": 9.933582038994719e-06, |
|
"loss": 0.4851, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.1064638783269962, |
|
"grad_norm": 0.02504285375969555, |
|
"learning_rate": 9.90037397410751e-06, |
|
"loss": 0.4423, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.108365019011407, |
|
"grad_norm": 0.03088688093720148, |
|
"learning_rate": 9.867167007947511e-06, |
|
"loss": 0.5082, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.1102661596958174, |
|
"grad_norm": 0.026178357076301492, |
|
"learning_rate": 9.833961506738282e-06, |
|
"loss": 0.4638, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.1121673003802282, |
|
"grad_norm": 0.027152075516306527, |
|
"learning_rate": 9.80075783668721e-06, |
|
"loss": 0.5317, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.1140684410646389, |
|
"grad_norm": 0.026406197929923356, |
|
"learning_rate": 9.767556363981503e-06, |
|
"loss": 0.4423, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.1159695817490494, |
|
"grad_norm": 0.029606704129030288, |
|
"learning_rate": 9.734357454784131e-06, |
|
"loss": 0.4607, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.11787072243346, |
|
"grad_norm": 0.024228905576484227, |
|
"learning_rate": 9.701161475229791e-06, |
|
"loss": 0.4676, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.1197718631178708, |
|
"grad_norm": 0.028240824433468956, |
|
"learning_rate": 9.66796879142087e-06, |
|
"loss": 0.4688, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.1216730038022813, |
|
"grad_norm": 0.02619951444252266, |
|
"learning_rate": 9.634779769423412e-06, |
|
"loss": 0.4492, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.123574144486692, |
|
"grad_norm": 0.02778733641177876, |
|
"learning_rate": 9.601594775263073e-06, |
|
"loss": 0.4274, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.1254752851711027, |
|
"grad_norm": 0.026571662777596382, |
|
"learning_rate": 9.568414174921085e-06, |
|
"loss": 0.4036, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.1273764258555132, |
|
"grad_norm": 0.027953815742300783, |
|
"learning_rate": 9.535238334330234e-06, |
|
"loss": 0.48, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.129277566539924, |
|
"grad_norm": 0.028979318933333602, |
|
"learning_rate": 9.502067619370794e-06, |
|
"loss": 0.4805, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.1311787072243347, |
|
"grad_norm": 0.025679003425845764, |
|
"learning_rate": 9.468902395866532e-06, |
|
"loss": 0.4512, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.1330798479087452, |
|
"grad_norm": 0.02987754854820351, |
|
"learning_rate": 9.435743029580638e-06, |
|
"loss": 0.3995, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.1349809885931559, |
|
"grad_norm": 0.026169185432569683, |
|
"learning_rate": 9.402589886211711e-06, |
|
"loss": 0.4173, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.1368821292775666, |
|
"grad_norm": 0.025436753367301015, |
|
"learning_rate": 9.369443331389718e-06, |
|
"loss": 0.473, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.138783269961977, |
|
"grad_norm": 0.03206356955139165, |
|
"learning_rate": 9.336303730671968e-06, |
|
"loss": 0.3991, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.1406844106463878, |
|
"grad_norm": 0.02931513421234435, |
|
"learning_rate": 9.303171449539074e-06, |
|
"loss": 0.4343, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.1425855513307985, |
|
"grad_norm": 0.02902528636856751, |
|
"learning_rate": 9.270046853390924e-06, |
|
"loss": 0.4996, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.144486692015209, |
|
"grad_norm": 0.025005773856039703, |
|
"learning_rate": 9.236930307542654e-06, |
|
"loss": 0.3278, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.1463878326996197, |
|
"grad_norm": 0.02426738245019188, |
|
"learning_rate": 9.203822177220621e-06, |
|
"loss": 0.3638, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.1482889733840305, |
|
"grad_norm": 0.027177816967694415, |
|
"learning_rate": 9.170722827558357e-06, |
|
"loss": 0.4095, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.1501901140684412, |
|
"grad_norm": 0.0243722592720861, |
|
"learning_rate": 9.13763262359257e-06, |
|
"loss": 0.5409, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.1520912547528517, |
|
"grad_norm": 0.025710645397755146, |
|
"learning_rate": 9.104551930259101e-06, |
|
"loss": 0.39, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.1539923954372624, |
|
"grad_norm": 0.028241783918859797, |
|
"learning_rate": 9.071481112388905e-06, |
|
"loss": 0.4221, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.1558935361216731, |
|
"grad_norm": 0.02361782757674789, |
|
"learning_rate": 9.038420534704015e-06, |
|
"loss": 0.5406, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.1577946768060836, |
|
"grad_norm": 0.028580064019494145, |
|
"learning_rate": 9.005370561813545e-06, |
|
"loss": 0.4474, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.1596958174904943, |
|
"grad_norm": 0.024832970874222413, |
|
"learning_rate": 8.972331558209644e-06, |
|
"loss": 0.4104, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.161596958174905, |
|
"grad_norm": 0.02650404583878491, |
|
"learning_rate": 8.939303888263485e-06, |
|
"loss": 0.4492, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.1634980988593155, |
|
"grad_norm": 0.028814172792613935, |
|
"learning_rate": 8.906287916221259e-06, |
|
"loss": 0.475, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.1653992395437263, |
|
"grad_norm": 0.02414307982273957, |
|
"learning_rate": 8.873284006200129e-06, |
|
"loss": 0.3797, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.167300380228137, |
|
"grad_norm": 0.026513183843780715, |
|
"learning_rate": 8.840292522184247e-06, |
|
"loss": 0.4611, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.1692015209125475, |
|
"grad_norm": 0.027917195350160733, |
|
"learning_rate": 8.807313828020715e-06, |
|
"loss": 0.453, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.1711026615969582, |
|
"grad_norm": 0.027001830723944378, |
|
"learning_rate": 8.774348287415589e-06, |
|
"loss": 0.4705, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.173003802281369, |
|
"grad_norm": 0.027024796400000384, |
|
"learning_rate": 8.74139626392986e-06, |
|
"loss": 0.5125, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.1749049429657794, |
|
"grad_norm": 0.025733010017667625, |
|
"learning_rate": 8.708458120975436e-06, |
|
"loss": 0.4606, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.1768060836501901, |
|
"grad_norm": 0.02537306800520621, |
|
"learning_rate": 8.675534221811156e-06, |
|
"loss": 0.5052, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.1787072243346008, |
|
"grad_norm": 0.025502247996832712, |
|
"learning_rate": 8.64262492953876e-06, |
|
"loss": 0.3923, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.1806083650190113, |
|
"grad_norm": 0.0283037881188794, |
|
"learning_rate": 8.60973060709891e-06, |
|
"loss": 0.3562, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.182509505703422, |
|
"grad_norm": 0.02765397296367, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.4207, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.1844106463878328, |
|
"grad_norm": 0.030939294178597098, |
|
"learning_rate": 8.543988322649954e-06, |
|
"loss": 0.4353, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.1863117870722433, |
|
"grad_norm": 0.025900960501617727, |
|
"learning_rate": 8.511141085680684e-06, |
|
"loss": 0.4533, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.188212927756654, |
|
"grad_norm": 0.02336257345992659, |
|
"learning_rate": 8.478310268615612e-06, |
|
"loss": 0.4378, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.1901140684410647, |
|
"grad_norm": 0.03104086432751278, |
|
"learning_rate": 8.445496233529934e-06, |
|
"loss": 0.4014, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.1920152091254752, |
|
"grad_norm": 0.026279681702473717, |
|
"learning_rate": 8.41269934231375e-06, |
|
"loss": 0.5791, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.193916349809886, |
|
"grad_norm": 0.026431098394421208, |
|
"learning_rate": 8.3799199566681e-06, |
|
"loss": 0.4104, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.1958174904942966, |
|
"grad_norm": 0.025842256802881006, |
|
"learning_rate": 8.34715843810096e-06, |
|
"loss": 0.3822, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.1977186311787071, |
|
"grad_norm": 0.025700224411035477, |
|
"learning_rate": 8.314415147923254e-06, |
|
"loss": 0.4173, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1996197718631179, |
|
"grad_norm": 0.028760898752997432, |
|
"learning_rate": 8.281690447244887e-06, |
|
"loss": 0.5265, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.2015209125475286, |
|
"grad_norm": 0.027983533875392992, |
|
"learning_rate": 8.248984696970732e-06, |
|
"loss": 0.4234, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.203422053231939, |
|
"grad_norm": 0.02995083103885102, |
|
"learning_rate": 8.216298257796677e-06, |
|
"loss": 0.4052, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.2053231939163498, |
|
"grad_norm": 0.02622577127605968, |
|
"learning_rate": 8.183631490205636e-06, |
|
"loss": 0.3944, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.2072243346007605, |
|
"grad_norm": 0.027365500007942224, |
|
"learning_rate": 8.150984754463578e-06, |
|
"loss": 0.4624, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.209125475285171, |
|
"grad_norm": 0.027106768146702834, |
|
"learning_rate": 8.118358410615545e-06, |
|
"loss": 0.5294, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.2110266159695817, |
|
"grad_norm": 0.028174780302516515, |
|
"learning_rate": 8.08575281848169e-06, |
|
"loss": 0.535, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.2129277566539924, |
|
"grad_norm": 0.02494806903289972, |
|
"learning_rate": 8.0531683376533e-06, |
|
"loss": 0.4464, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.214828897338403, |
|
"grad_norm": 0.026813886692445315, |
|
"learning_rate": 8.020605327488846e-06, |
|
"loss": 0.4196, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.2167300380228137, |
|
"grad_norm": 0.030178054600389817, |
|
"learning_rate": 7.988064147110001e-06, |
|
"loss": 0.426, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.2186311787072244, |
|
"grad_norm": 0.027322836613505857, |
|
"learning_rate": 7.955545155397684e-06, |
|
"loss": 0.4922, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.2205323193916349, |
|
"grad_norm": 0.02851456463701536, |
|
"learning_rate": 7.923048710988119e-06, |
|
"loss": 0.3909, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.2224334600760456, |
|
"grad_norm": 0.029679647372534257, |
|
"learning_rate": 7.890575172268858e-06, |
|
"loss": 0.4655, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.2243346007604563, |
|
"grad_norm": 0.024011488075073195, |
|
"learning_rate": 7.858124897374837e-06, |
|
"loss": 0.3861, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.2262357414448668, |
|
"grad_norm": 0.025094837748294702, |
|
"learning_rate": 7.825698244184432e-06, |
|
"loss": 0.4222, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.2281368821292775, |
|
"grad_norm": 0.025610173350570063, |
|
"learning_rate": 7.7932955703155e-06, |
|
"loss": 0.473, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.2300380228136882, |
|
"grad_norm": 0.02352347182601007, |
|
"learning_rate": 7.760917233121443e-06, |
|
"loss": 0.3626, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.231939163498099, |
|
"grad_norm": 0.029283299091673688, |
|
"learning_rate": 7.728563589687275e-06, |
|
"loss": 0.4963, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.2338403041825095, |
|
"grad_norm": 0.030862682323859268, |
|
"learning_rate": 7.696234996825663e-06, |
|
"loss": 0.3789, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.2357414448669202, |
|
"grad_norm": 0.03168967860296208, |
|
"learning_rate": 7.663931811073003e-06, |
|
"loss": 0.4766, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.2376425855513309, |
|
"grad_norm": 0.02423696133628203, |
|
"learning_rate": 7.631654388685496e-06, |
|
"loss": 0.3808, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.2395437262357414, |
|
"grad_norm": 0.03104175426538144, |
|
"learning_rate": 7.599403085635208e-06, |
|
"loss": 0.5122, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.241444866920152, |
|
"grad_norm": 0.029467045004493608, |
|
"learning_rate": 7.567178257606147e-06, |
|
"loss": 0.4188, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.2433460076045628, |
|
"grad_norm": 0.02627856743434748, |
|
"learning_rate": 7.534980259990341e-06, |
|
"loss": 0.4134, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.2452471482889733, |
|
"grad_norm": 0.026443976993739387, |
|
"learning_rate": 7.50280944788392e-06, |
|
"loss": 0.4495, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.247148288973384, |
|
"grad_norm": 0.02910290850363252, |
|
"learning_rate": 7.470666176083193e-06, |
|
"loss": 0.4956, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.2490494296577948, |
|
"grad_norm": 0.030787111442524107, |
|
"learning_rate": 7.438550799080746e-06, |
|
"loss": 0.4578, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.2509505703422052, |
|
"grad_norm": 0.02755054067169159, |
|
"learning_rate": 7.40646367106153e-06, |
|
"loss": 0.4393, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.252851711026616, |
|
"grad_norm": 0.028551333737617562, |
|
"learning_rate": 7.3744051458989395e-06, |
|
"loss": 0.4782, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.2547528517110267, |
|
"grad_norm": 0.029957333643541806, |
|
"learning_rate": 7.342375577150928e-06, |
|
"loss": 0.4624, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2566539923954372, |
|
"grad_norm": 0.02808482409369459, |
|
"learning_rate": 7.310375318056107e-06, |
|
"loss": 0.4679, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.258555133079848, |
|
"grad_norm": 0.02588158997964679, |
|
"learning_rate": 7.278404721529843e-06, |
|
"loss": 0.5797, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.2604562737642586, |
|
"grad_norm": 0.023897308281060525, |
|
"learning_rate": 7.246464140160365e-06, |
|
"loss": 0.443, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.2623574144486693, |
|
"grad_norm": 0.031053266329604933, |
|
"learning_rate": 7.214553926204884e-06, |
|
"loss": 0.4107, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.2642585551330798, |
|
"grad_norm": 0.026719606285468933, |
|
"learning_rate": 7.182674431585703e-06, |
|
"loss": 0.5636, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.2661596958174905, |
|
"grad_norm": 0.02306740449177855, |
|
"learning_rate": 7.150826007886334e-06, |
|
"loss": 0.4318, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.2680608365019013, |
|
"grad_norm": 0.026119267635482755, |
|
"learning_rate": 7.119009006347625e-06, |
|
"loss": 0.3306, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.2699619771863118, |
|
"grad_norm": 0.03419415738994974, |
|
"learning_rate": 7.087223777863883e-06, |
|
"loss": 0.5362, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.2718631178707225, |
|
"grad_norm": 0.03072698649650123, |
|
"learning_rate": 7.055470672979003e-06, |
|
"loss": 0.4542, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.2737642585551332, |
|
"grad_norm": 0.026887865333932157, |
|
"learning_rate": 7.023750041882609e-06, |
|
"loss": 0.5468, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2756653992395437, |
|
"grad_norm": 0.02859574453492066, |
|
"learning_rate": 6.992062234406185e-06, |
|
"loss": 0.4279, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.2775665399239544, |
|
"grad_norm": 0.02866149676116349, |
|
"learning_rate": 6.960407600019217e-06, |
|
"loss": 0.4671, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.2794676806083651, |
|
"grad_norm": 0.024968391509913344, |
|
"learning_rate": 6.9287864878253475e-06, |
|
"loss": 0.3833, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.2813688212927756, |
|
"grad_norm": 0.03457718232305868, |
|
"learning_rate": 6.897199246558515e-06, |
|
"loss": 0.4175, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.2832699619771863, |
|
"grad_norm": 0.02896136132570871, |
|
"learning_rate": 6.865646224579108e-06, |
|
"loss": 0.4343, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.285171102661597, |
|
"grad_norm": 0.029212521848368124, |
|
"learning_rate": 6.834127769870134e-06, |
|
"loss": 0.4066, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.2870722433460076, |
|
"grad_norm": 0.02799355491203608, |
|
"learning_rate": 6.802644230033373e-06, |
|
"loss": 0.4299, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.2889733840304183, |
|
"grad_norm": 0.029789337038738203, |
|
"learning_rate": 6.771195952285541e-06, |
|
"loss": 0.4238, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.290874524714829, |
|
"grad_norm": 0.03367895441324516, |
|
"learning_rate": 6.739783283454469e-06, |
|
"loss": 0.4823, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.2927756653992395, |
|
"grad_norm": 0.028940087009468582, |
|
"learning_rate": 6.708406569975274e-06, |
|
"loss": 0.4209, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2946768060836502, |
|
"grad_norm": 0.027738242534913166, |
|
"learning_rate": 6.6770661578865444e-06, |
|
"loss": 0.47, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.296577946768061, |
|
"grad_norm": 0.026705324113941907, |
|
"learning_rate": 6.645762392826509e-06, |
|
"loss": 0.3689, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.2984790874524714, |
|
"grad_norm": 0.028661736682716873, |
|
"learning_rate": 6.614495620029238e-06, |
|
"loss": 0.4969, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.3003802281368821, |
|
"grad_norm": 0.026180052367738408, |
|
"learning_rate": 6.583266184320836e-06, |
|
"loss": 0.4422, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.3022813688212929, |
|
"grad_norm": 0.029666386897892704, |
|
"learning_rate": 6.552074430115624e-06, |
|
"loss": 0.5087, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3041825095057034, |
|
"grad_norm": 0.026792793282078037, |
|
"learning_rate": 6.520920701412371e-06, |
|
"loss": 0.4812, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.306083650190114, |
|
"grad_norm": 0.02551359544734154, |
|
"learning_rate": 6.489805341790456e-06, |
|
"loss": 0.4641, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.3079847908745248, |
|
"grad_norm": 0.026500067110995663, |
|
"learning_rate": 6.458728694406124e-06, |
|
"loss": 0.4662, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.3098859315589353, |
|
"grad_norm": 0.03395026623518602, |
|
"learning_rate": 6.427691101988673e-06, |
|
"loss": 0.3852, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.311787072243346, |
|
"grad_norm": 0.02634387079965298, |
|
"learning_rate": 6.396692906836686e-06, |
|
"loss": 0.3745, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.3136882129277567, |
|
"grad_norm": 0.024158130217925743, |
|
"learning_rate": 6.3657344508142495e-06, |
|
"loss": 0.3748, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.3155893536121672, |
|
"grad_norm": 0.028848449256611498, |
|
"learning_rate": 6.334816075347185e-06, |
|
"loss": 0.442, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.317490494296578, |
|
"grad_norm": 0.03023585725462282, |
|
"learning_rate": 6.303938121419295e-06, |
|
"loss": 0.416, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.3193916349809887, |
|
"grad_norm": 0.026370730125347006, |
|
"learning_rate": 6.273100929568579e-06, |
|
"loss": 0.3592, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.3212927756653992, |
|
"grad_norm": 0.02727386863891104, |
|
"learning_rate": 6.242304839883502e-06, |
|
"loss": 0.4762, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.3231939163498099, |
|
"grad_norm": 0.030668183566592767, |
|
"learning_rate": 6.211550191999223e-06, |
|
"loss": 0.5194, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.3250950570342206, |
|
"grad_norm": 0.02972347024127305, |
|
"learning_rate": 6.18083732509387e-06, |
|
"loss": 0.4843, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.326996197718631, |
|
"grad_norm": 0.026145838925778292, |
|
"learning_rate": 6.150166577884781e-06, |
|
"loss": 0.386, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.3288973384030418, |
|
"grad_norm": 0.032041397582118325, |
|
"learning_rate": 6.119538288624778e-06, |
|
"loss": 0.4001, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.3307984790874525, |
|
"grad_norm": 0.029828153624763563, |
|
"learning_rate": 6.088952795098442e-06, |
|
"loss": 0.4283, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.332699619771863, |
|
"grad_norm": 0.031995216287676834, |
|
"learning_rate": 6.058410434618367e-06, |
|
"loss": 0.4421, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.3346007604562737, |
|
"grad_norm": 0.031637583686470815, |
|
"learning_rate": 6.027911544021465e-06, |
|
"loss": 0.4724, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.3365019011406845, |
|
"grad_norm": 0.02779799173287801, |
|
"learning_rate": 5.997456459665237e-06, |
|
"loss": 0.5191, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.338403041825095, |
|
"grad_norm": 0.026787215560895514, |
|
"learning_rate": 5.967045517424062e-06, |
|
"loss": 0.3672, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.3403041825095057, |
|
"grad_norm": 0.028848537313750235, |
|
"learning_rate": 5.936679052685505e-06, |
|
"loss": 0.4277, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.3422053231939164, |
|
"grad_norm": 0.028783359031467698, |
|
"learning_rate": 5.906357400346596e-06, |
|
"loss": 0.4509, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.3441064638783269, |
|
"grad_norm": 0.02773116659224034, |
|
"learning_rate": 5.876080894810167e-06, |
|
"loss": 0.5577, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.3460076045627376, |
|
"grad_norm": 0.028553697016596574, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.3898, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.3479087452471483, |
|
"grad_norm": 0.028156392246010412, |
|
"learning_rate": 5.815664659262845e-06, |
|
"loss": 0.3766, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.3498098859315588, |
|
"grad_norm": 0.03100657087735765, |
|
"learning_rate": 5.78552559555337e-06, |
|
"loss": 0.5583, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.3517110266159695, |
|
"grad_norm": 0.02797485074428679, |
|
"learning_rate": 5.755433011241851e-06, |
|
"loss": 0.4785, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.3536121673003803, |
|
"grad_norm": 0.027358375412801707, |
|
"learning_rate": 5.725387238204831e-06, |
|
"loss": 0.4732, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.3555133079847907, |
|
"grad_norm": 0.030912775982331196, |
|
"learning_rate": 5.695388607802603e-06, |
|
"loss": 0.5375, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.3574144486692015, |
|
"grad_norm": 0.032068076675599765, |
|
"learning_rate": 5.665437450875534e-06, |
|
"loss": 0.4606, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.3593155893536122, |
|
"grad_norm": 0.03223317604676853, |
|
"learning_rate": 5.635534097740435e-06, |
|
"loss": 0.4024, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.3612167300380227, |
|
"grad_norm": 0.02682575918004717, |
|
"learning_rate": 5.605678878186911e-06, |
|
"loss": 0.3428, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.3631178707224334, |
|
"grad_norm": 0.03148540229771045, |
|
"learning_rate": 5.575872121473722e-06, |
|
"loss": 0.3892, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.3650190114068441, |
|
"grad_norm": 0.029382666135706115, |
|
"learning_rate": 5.546114156325166e-06, |
|
"loss": 0.5357, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.3669201520912546, |
|
"grad_norm": 0.02919874406990327, |
|
"learning_rate": 5.516405310927431e-06, |
|
"loss": 0.464, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.3688212927756653, |
|
"grad_norm": 0.02896813714737485, |
|
"learning_rate": 5.4867459129249846e-06, |
|
"loss": 0.5409, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.370722433460076, |
|
"grad_norm": 0.034416303220981784, |
|
"learning_rate": 5.4571362894169795e-06, |
|
"loss": 0.4672, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.3726235741444868, |
|
"grad_norm": 0.024902435863464658, |
|
"learning_rate": 5.427576766953615e-06, |
|
"loss": 0.4113, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.3745247148288973, |
|
"grad_norm": 0.027717131676122143, |
|
"learning_rate": 5.398067671532554e-06, |
|
"loss": 0.4107, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.376425855513308, |
|
"grad_norm": 0.02819798108310211, |
|
"learning_rate": 5.368609328595323e-06, |
|
"loss": 0.54, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.3783269961977187, |
|
"grad_norm": 0.0318198691073443, |
|
"learning_rate": 5.339202063023727e-06, |
|
"loss": 0.4745, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.3802281368821292, |
|
"grad_norm": 0.03160178400979231, |
|
"learning_rate": 5.309846199136258e-06, |
|
"loss": 0.5534, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.38212927756654, |
|
"grad_norm": 0.031203987967182555, |
|
"learning_rate": 5.280542060684535e-06, |
|
"loss": 0.4872, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.3840304182509506, |
|
"grad_norm": 0.03212939770548475, |
|
"learning_rate": 5.2512899708497086e-06, |
|
"loss": 0.4987, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.3859315589353614, |
|
"grad_norm": 0.02667203925098415, |
|
"learning_rate": 5.222090252238916e-06, |
|
"loss": 0.4661, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.3878326996197718, |
|
"grad_norm": 0.03050695800339695, |
|
"learning_rate": 5.192943226881724e-06, |
|
"loss": 0.5221, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3897338403041826, |
|
"grad_norm": 0.027805559765800788, |
|
"learning_rate": 5.163849216226562e-06, |
|
"loss": 0.408, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.3916349809885933, |
|
"grad_norm": 0.02909423144948114, |
|
"learning_rate": 5.134808541137183e-06, |
|
"loss": 0.4871, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.3935361216730038, |
|
"grad_norm": 0.03294086065938786, |
|
"learning_rate": 5.105821521889147e-06, |
|
"loss": 0.5285, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.3954372623574145, |
|
"grad_norm": 0.027754896702892988, |
|
"learning_rate": 5.076888478166247e-06, |
|
"loss": 0.4322, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.3973384030418252, |
|
"grad_norm": 0.03290516325990418, |
|
"learning_rate": 5.048009729057012e-06, |
|
"loss": 0.4647, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.3992395437262357, |
|
"grad_norm": 0.030937260456841767, |
|
"learning_rate": 5.0191855930511946e-06, |
|
"loss": 0.4194, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.4011406844106464, |
|
"grad_norm": 0.0277872161185395, |
|
"learning_rate": 4.990416388036233e-06, |
|
"loss": 0.4097, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.4030418250950571, |
|
"grad_norm": 0.030180714624574015, |
|
"learning_rate": 4.961702431293759e-06, |
|
"loss": 0.3394, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.4049429657794676, |
|
"grad_norm": 0.02913012334006182, |
|
"learning_rate": 4.933044039496107e-06, |
|
"loss": 0.46, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.4068441064638784, |
|
"grad_norm": 0.03039527344098277, |
|
"learning_rate": 4.904441528702806e-06, |
|
"loss": 0.4149, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.408745247148289, |
|
"grad_norm": 0.02949216153384184, |
|
"learning_rate": 4.875895214357093e-06, |
|
"loss": 0.4067, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.4106463878326996, |
|
"grad_norm": 0.029355805600736497, |
|
"learning_rate": 4.847405411282462e-06, |
|
"loss": 0.4412, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.4125475285171103, |
|
"grad_norm": 0.029833345308483448, |
|
"learning_rate": 4.818972433679145e-06, |
|
"loss": 0.4834, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.414448669201521, |
|
"grad_norm": 0.031040892623573117, |
|
"learning_rate": 4.790596595120699e-06, |
|
"loss": 0.4235, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.4163498098859315, |
|
"grad_norm": 0.029418025601028568, |
|
"learning_rate": 4.762278208550505e-06, |
|
"loss": 0.5524, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.4182509505703422, |
|
"grad_norm": 0.03234656547481261, |
|
"learning_rate": 4.734017586278337e-06, |
|
"loss": 0.455, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.420152091254753, |
|
"grad_norm": 0.030112020587859428, |
|
"learning_rate": 4.7058150399769245e-06, |
|
"loss": 0.5177, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.4220532319391634, |
|
"grad_norm": 0.028737634075618794, |
|
"learning_rate": 4.677670880678493e-06, |
|
"loss": 0.5981, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.4239543726235742, |
|
"grad_norm": 0.02811256918101417, |
|
"learning_rate": 4.649585418771348e-06, |
|
"loss": 0.4547, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.4258555133079849, |
|
"grad_norm": 0.028878580084001036, |
|
"learning_rate": 4.621558963996458e-06, |
|
"loss": 0.428, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.4277566539923954, |
|
"grad_norm": 0.03111349099552106, |
|
"learning_rate": 4.593591825444028e-06, |
|
"loss": 0.5579, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.429657794676806, |
|
"grad_norm": 0.028704224469254278, |
|
"learning_rate": 4.565684311550077e-06, |
|
"loss": 0.4346, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.4315589353612168, |
|
"grad_norm": 0.029538862975289022, |
|
"learning_rate": 4.537836730093077e-06, |
|
"loss": 0.3984, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.4334600760456273, |
|
"grad_norm": 0.02920814557830504, |
|
"learning_rate": 4.510049388190518e-06, |
|
"loss": 0.4732, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.435361216730038, |
|
"grad_norm": 0.026042563331556207, |
|
"learning_rate": 4.482322592295541e-06, |
|
"loss": 0.4252, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.4372623574144487, |
|
"grad_norm": 0.03446984414969546, |
|
"learning_rate": 4.454656648193559e-06, |
|
"loss": 0.5051, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.4391634980988592, |
|
"grad_norm": 0.031148293151505677, |
|
"learning_rate": 4.427051860998877e-06, |
|
"loss": 0.4379, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.44106463878327, |
|
"grad_norm": 0.03187960963872275, |
|
"learning_rate": 4.399508535151321e-06, |
|
"loss": 0.4388, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.4429657794676807, |
|
"grad_norm": 0.029156787441216785, |
|
"learning_rate": 4.372026974412907e-06, |
|
"loss": 0.4099, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.4448669201520912, |
|
"grad_norm": 0.027570371450881585, |
|
"learning_rate": 4.344607481864466e-06, |
|
"loss": 0.3871, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.446768060836502, |
|
"grad_norm": 0.03001707256533982, |
|
"learning_rate": 4.317250359902295e-06, |
|
"loss": 0.4918, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.4486692015209126, |
|
"grad_norm": 0.02815136784810877, |
|
"learning_rate": 4.2899559102348585e-06, |
|
"loss": 0.489, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.450570342205323, |
|
"grad_norm": 0.030887177367103905, |
|
"learning_rate": 4.262724433879427e-06, |
|
"loss": 0.4149, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.4524714828897338, |
|
"grad_norm": 0.03101054435233135, |
|
"learning_rate": 4.235556231158765e-06, |
|
"loss": 0.382, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.4543726235741445, |
|
"grad_norm": 0.03003088571504904, |
|
"learning_rate": 4.208451601697836e-06, |
|
"loss": 0.4431, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.456273764258555, |
|
"grad_norm": 0.041707849733573374, |
|
"learning_rate": 4.181410844420473e-06, |
|
"loss": 0.5137, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.4581749049429658, |
|
"grad_norm": 0.028879053883781557, |
|
"learning_rate": 4.154434257546095e-06, |
|
"loss": 0.404, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.4600760456273765, |
|
"grad_norm": 0.031166888939069805, |
|
"learning_rate": 4.127522138586424e-06, |
|
"loss": 0.4769, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.461977186311787, |
|
"grad_norm": 0.030755634558385713, |
|
"learning_rate": 4.10067478434219e-06, |
|
"loss": 0.4582, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.4638783269961977, |
|
"grad_norm": 0.02834069018800523, |
|
"learning_rate": 4.073892490899865e-06, |
|
"loss": 0.428, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.4657794676806084, |
|
"grad_norm": 0.029920699324498402, |
|
"learning_rate": 4.047175553628397e-06, |
|
"loss": 0.3844, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.467680608365019, |
|
"grad_norm": 0.029994581045541858, |
|
"learning_rate": 4.020524267175954e-06, |
|
"loss": 0.4341, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.4695817490494296, |
|
"grad_norm": 0.030296431261687077, |
|
"learning_rate": 3.993938925466674e-06, |
|
"loss": 0.4781, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.4714828897338403, |
|
"grad_norm": 0.031388201776416984, |
|
"learning_rate": 3.96741982169742e-06, |
|
"loss": 0.403, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.4733840304182508, |
|
"grad_norm": 0.034323278285153046, |
|
"learning_rate": 3.9409672483345465e-06, |
|
"loss": 0.4479, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.4752851711026616, |
|
"grad_norm": 0.039442138478036716, |
|
"learning_rate": 3.914581497110684e-06, |
|
"loss": 0.4226, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.4771863117870723, |
|
"grad_norm": 0.027820650801430504, |
|
"learning_rate": 3.888262859021508e-06, |
|
"loss": 0.4751, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.4790874524714828, |
|
"grad_norm": 0.028212566710192757, |
|
"learning_rate": 3.862011624322534e-06, |
|
"loss": 0.3768, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.4809885931558935, |
|
"grad_norm": 0.027663868858047506, |
|
"learning_rate": 3.835828082525925e-06, |
|
"loss": 0.3845, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.4828897338403042, |
|
"grad_norm": 0.0286387603298508, |
|
"learning_rate": 3.8097125223972864e-06, |
|
"loss": 0.4723, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.4847908745247147, |
|
"grad_norm": 0.028742371931207277, |
|
"learning_rate": 3.7836652319524835e-06, |
|
"loss": 0.4091, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.4866920152091254, |
|
"grad_norm": 0.028776691349877988, |
|
"learning_rate": 3.7576864984544814e-06, |
|
"loss": 0.4418, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.4885931558935361, |
|
"grad_norm": 0.02717344017433737, |
|
"learning_rate": 3.73177660841015e-06, |
|
"loss": 0.375, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.4904942965779466, |
|
"grad_norm": 0.04105268953972606, |
|
"learning_rate": 3.7059358475671225e-06, |
|
"loss": 0.434, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.4923954372623573, |
|
"grad_norm": 0.03127383121023246, |
|
"learning_rate": 3.680164500910646e-06, |
|
"loss": 0.3745, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.494296577946768, |
|
"grad_norm": 0.029885424011204434, |
|
"learning_rate": 3.654462852660423e-06, |
|
"loss": 0.5187, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.4961977186311788, |
|
"grad_norm": 0.030127846598248572, |
|
"learning_rate": 3.6288311862674885e-06, |
|
"loss": 0.423, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.4980988593155893, |
|
"grad_norm": 0.033965255384920984, |
|
"learning_rate": 3.6032697844110896e-06, |
|
"loss": 0.4487, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.031051910837305945, |
|
"learning_rate": 3.5777789289955454e-06, |
|
"loss": 0.5404, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.5019011406844105, |
|
"grad_norm": 0.027096542766531128, |
|
"learning_rate": 3.5523589011471592e-06, |
|
"loss": 0.4042, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5038022813688214, |
|
"grad_norm": 0.03435456762542711, |
|
"learning_rate": 3.527009981211119e-06, |
|
"loss": 0.4893, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.505703422053232, |
|
"grad_norm": 0.027520683723539555, |
|
"learning_rate": 3.5017324487483873e-06, |
|
"loss": 0.4118, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.5076045627376424, |
|
"grad_norm": 0.029455592603562925, |
|
"learning_rate": 3.47652658253263e-06, |
|
"loss": 0.4488, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.5095057034220534, |
|
"grad_norm": 0.03054107669871993, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.5746, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.5114068441064639, |
|
"grad_norm": 0.03169123866581408, |
|
"learning_rate": 3.4263309599818017e-06, |
|
"loss": 0.3971, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.5133079847908744, |
|
"grad_norm": 0.028699094422273308, |
|
"learning_rate": 3.4013417572299446e-06, |
|
"loss": 0.5152, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.5152091254752853, |
|
"grad_norm": 0.027529958113480905, |
|
"learning_rate": 3.37642532788541e-06, |
|
"loss": 0.4457, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.5171102661596958, |
|
"grad_norm": 0.03048754581317044, |
|
"learning_rate": 3.3515819467394184e-06, |
|
"loss": 0.4127, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.5190114068441065, |
|
"grad_norm": 0.02821106287426813, |
|
"learning_rate": 3.326811887777607e-06, |
|
"loss": 0.3581, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.5209125475285172, |
|
"grad_norm": 0.025512659096068077, |
|
"learning_rate": 3.3021154241769606e-06, |
|
"loss": 0.3813, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.5228136882129277, |
|
"grad_norm": 0.027111936862849854, |
|
"learning_rate": 3.2774928283028153e-06, |
|
"loss": 0.4563, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.5247148288973384, |
|
"grad_norm": 0.029468115910336434, |
|
"learning_rate": 3.2529443717058693e-06, |
|
"loss": 0.4347, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.5266159695817492, |
|
"grad_norm": 0.03144089627867526, |
|
"learning_rate": 3.228470325119164e-06, |
|
"loss": 0.5228, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.5285171102661597, |
|
"grad_norm": 0.026828502717138106, |
|
"learning_rate": 3.20407095845511e-06, |
|
"loss": 0.3327, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.5304182509505704, |
|
"grad_norm": 0.03191218461014265, |
|
"learning_rate": 3.179746540802506e-06, |
|
"loss": 0.4629, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.532319391634981, |
|
"grad_norm": 0.029064440550334705, |
|
"learning_rate": 3.155497340423588e-06, |
|
"loss": 0.3646, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.5342205323193916, |
|
"grad_norm": 0.03303936902708768, |
|
"learning_rate": 3.1313236247510414e-06, |
|
"loss": 0.4376, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.5361216730038023, |
|
"grad_norm": 0.029358567199211188, |
|
"learning_rate": 3.107225660385077e-06, |
|
"loss": 0.4566, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.538022813688213, |
|
"grad_norm": 0.029522564844707316, |
|
"learning_rate": 3.0832037130904748e-06, |
|
"loss": 0.465, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.5399239543726235, |
|
"grad_norm": 0.030610433655364134, |
|
"learning_rate": 3.0592580477936606e-06, |
|
"loss": 0.4463, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.5418250950570342, |
|
"grad_norm": 0.030867638795095506, |
|
"learning_rate": 3.035388928579792e-06, |
|
"loss": 0.4166, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.543726235741445, |
|
"grad_norm": 0.031003703162365476, |
|
"learning_rate": 3.011596618689825e-06, |
|
"loss": 0.4822, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.5456273764258555, |
|
"grad_norm": 0.025768323397602613, |
|
"learning_rate": 2.9878813805176252e-06, |
|
"loss": 0.3659, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.5475285171102662, |
|
"grad_norm": 0.02887988122441581, |
|
"learning_rate": 2.9642434756070793e-06, |
|
"loss": 0.3947, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.549429657794677, |
|
"grad_norm": 0.04214948236847536, |
|
"learning_rate": 2.940683164649194e-06, |
|
"loss": 0.4057, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.5513307984790874, |
|
"grad_norm": 0.028795233112838173, |
|
"learning_rate": 2.9172007074792342e-06, |
|
"loss": 0.367, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.553231939163498, |
|
"grad_norm": 0.04269869786294826, |
|
"learning_rate": 2.8937963630738517e-06, |
|
"loss": 0.4783, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.5551330798479088, |
|
"grad_norm": 0.03976255951688538, |
|
"learning_rate": 2.87047038954823e-06, |
|
"loss": 0.4598, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.5570342205323193, |
|
"grad_norm": 0.02756738259971169, |
|
"learning_rate": 2.8472230441532365e-06, |
|
"loss": 0.3534, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.55893536121673, |
|
"grad_norm": 0.032031044063694496, |
|
"learning_rate": 2.8240545832725963e-06, |
|
"loss": 0.437, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.5608365019011408, |
|
"grad_norm": 0.03232689680617355, |
|
"learning_rate": 2.8009652624200436e-06, |
|
"loss": 0.3954, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.5627376425855513, |
|
"grad_norm": 0.028779225361362428, |
|
"learning_rate": 2.7779553362365184e-06, |
|
"loss": 0.3788, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.564638783269962, |
|
"grad_norm": 0.033053817697612495, |
|
"learning_rate": 2.755025058487364e-06, |
|
"loss": 0.5015, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.5665399239543727, |
|
"grad_norm": 0.026957049890508244, |
|
"learning_rate": 2.7321746820595084e-06, |
|
"loss": 0.3915, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.5684410646387832, |
|
"grad_norm": 0.029741526560517362, |
|
"learning_rate": 2.709404458958693e-06, |
|
"loss": 0.4319, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.570342205323194, |
|
"grad_norm": 0.034766455283583235, |
|
"learning_rate": 2.6867146403066833e-06, |
|
"loss": 0.4974, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.5722433460076046, |
|
"grad_norm": 0.02835777392351599, |
|
"learning_rate": 2.6641054763385044e-06, |
|
"loss": 0.3743, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.5741444866920151, |
|
"grad_norm": 0.031431886268975404, |
|
"learning_rate": 2.6415772163996845e-06, |
|
"loss": 0.562, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.5760456273764258, |
|
"grad_norm": 0.02852970629061991, |
|
"learning_rate": 2.619130108943494e-06, |
|
"loss": 0.507, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.5779467680608366, |
|
"grad_norm": 0.03200964545830204, |
|
"learning_rate": 2.5967644015282146e-06, |
|
"loss": 0.4173, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.579847908745247, |
|
"grad_norm": 0.030109870902287034, |
|
"learning_rate": 2.5744803408144026e-06, |
|
"loss": 0.4756, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.5817490494296578, |
|
"grad_norm": 0.02879971785543682, |
|
"learning_rate": 2.5522781725621814e-06, |
|
"loss": 0.4296, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.5836501901140685, |
|
"grad_norm": 0.030572562350088872, |
|
"learning_rate": 2.530158141628515e-06, |
|
"loss": 0.3608, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.585551330798479, |
|
"grad_norm": 0.029997871888884576, |
|
"learning_rate": 2.508120491964512e-06, |
|
"loss": 0.4984, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.5874524714828897, |
|
"grad_norm": 0.0375523224485888, |
|
"learning_rate": 2.486165466612751e-06, |
|
"loss": 0.5286, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.5893536121673004, |
|
"grad_norm": 0.03166794783148593, |
|
"learning_rate": 2.464293307704566e-06, |
|
"loss": 0.3404, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.591254752851711, |
|
"grad_norm": 0.029065306165181683, |
|
"learning_rate": 2.4425042564574186e-06, |
|
"loss": 0.3846, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.5931558935361216, |
|
"grad_norm": 0.03659011859307511, |
|
"learning_rate": 2.4207985531722034e-06, |
|
"loss": 0.4462, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.5950570342205324, |
|
"grad_norm": 0.03237371162013363, |
|
"learning_rate": 2.3991764372306113e-06, |
|
"loss": 0.4106, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.5969581749049429, |
|
"grad_norm": 0.03163202922364184, |
|
"learning_rate": 2.377638147092497e-06, |
|
"loss": 0.4557, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5988593155893536, |
|
"grad_norm": 0.029894103192715567, |
|
"learning_rate": 2.3561839202932344e-06, |
|
"loss": 0.5256, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.6007604562737643, |
|
"grad_norm": 0.031980223022173494, |
|
"learning_rate": 2.3348139934411008e-06, |
|
"loss": 0.4452, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.6026615969581748, |
|
"grad_norm": 0.03143298206428353, |
|
"learning_rate": 2.3135286022146785e-06, |
|
"loss": 0.4315, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.6045627376425855, |
|
"grad_norm": 0.030631399015622986, |
|
"learning_rate": 2.292327981360245e-06, |
|
"loss": 0.4798, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.6064638783269962, |
|
"grad_norm": 0.030773177319533385, |
|
"learning_rate": 2.271212364689176e-06, |
|
"loss": 0.4573, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.6083650190114067, |
|
"grad_norm": 0.02942170999285307, |
|
"learning_rate": 2.2501819850753925e-06, |
|
"loss": 0.4104, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.6102661596958177, |
|
"grad_norm": 0.032044391662416384, |
|
"learning_rate": 2.229237074452768e-06, |
|
"loss": 0.3595, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.6121673003802282, |
|
"grad_norm": 0.02648000190922172, |
|
"learning_rate": 2.2083778638125796e-06, |
|
"loss": 0.4179, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.6140684410646386, |
|
"grad_norm": 0.0323786043598909, |
|
"learning_rate": 2.1876045832009694e-06, |
|
"loss": 0.4312, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.6159695817490496, |
|
"grad_norm": 0.031962574305962506, |
|
"learning_rate": 2.16691746171639e-06, |
|
"loss": 0.4414, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.61787072243346, |
|
"grad_norm": 0.028988170537215887, |
|
"learning_rate": 2.1463167275070863e-06, |
|
"loss": 0.3482, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.6197718631178706, |
|
"grad_norm": 0.03211596814985751, |
|
"learning_rate": 2.125802607768588e-06, |
|
"loss": 0.4426, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.6216730038022815, |
|
"grad_norm": 0.028882375149180312, |
|
"learning_rate": 2.1053753287411895e-06, |
|
"loss": 0.4839, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.623574144486692, |
|
"grad_norm": 0.0325186577163931, |
|
"learning_rate": 2.08503511570746e-06, |
|
"loss": 0.3888, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.6254752851711025, |
|
"grad_norm": 0.0278026193043055, |
|
"learning_rate": 2.064782192989765e-06, |
|
"loss": 0.4297, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.6273764258555135, |
|
"grad_norm": 0.02686969817654916, |
|
"learning_rate": 2.0446167839477815e-06, |
|
"loss": 0.3861, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.629277566539924, |
|
"grad_norm": 0.030445402524440293, |
|
"learning_rate": 2.0245391109760437e-06, |
|
"loss": 0.4997, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.6311787072243344, |
|
"grad_norm": 0.029801769119460805, |
|
"learning_rate": 2.0045493955014915e-06, |
|
"loss": 0.347, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.6330798479087454, |
|
"grad_norm": 0.02683243241644894, |
|
"learning_rate": 1.984647857981017e-06, |
|
"loss": 0.4397, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.6349809885931559, |
|
"grad_norm": 0.03053157289256201, |
|
"learning_rate": 1.96483471789904e-06, |
|
"loss": 0.5046, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.6368821292775664, |
|
"grad_norm": 0.03218333973180804, |
|
"learning_rate": 1.9451101937650963e-06, |
|
"loss": 0.4193, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.6387832699619773, |
|
"grad_norm": 0.03187704409089051, |
|
"learning_rate": 1.925474503111412e-06, |
|
"loss": 0.4847, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.6406844106463878, |
|
"grad_norm": 0.029870834823852343, |
|
"learning_rate": 1.905927862490512e-06, |
|
"loss": 0.412, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.6425855513307985, |
|
"grad_norm": 0.03351141391858019, |
|
"learning_rate": 1.8864704874728346e-06, |
|
"loss": 0.4288, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.6444866920152093, |
|
"grad_norm": 0.031248306742379217, |
|
"learning_rate": 1.8671025926443464e-06, |
|
"loss": 0.3744, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.6463878326996197, |
|
"grad_norm": 0.027804428306329856, |
|
"learning_rate": 1.8478243916041882e-06, |
|
"loss": 0.49, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.6482889733840305, |
|
"grad_norm": 0.03248108305698628, |
|
"learning_rate": 1.828636096962304e-06, |
|
"loss": 0.4218, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.6501901140684412, |
|
"grad_norm": 0.030985566129981643, |
|
"learning_rate": 1.8095379203371044e-06, |
|
"loss": 0.3549, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.6520912547528517, |
|
"grad_norm": 0.029278682974998815, |
|
"learning_rate": 1.7905300723531393e-06, |
|
"loss": 0.4989, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.6539923954372624, |
|
"grad_norm": 0.03294348867295606, |
|
"learning_rate": 1.771612762638758e-06, |
|
"loss": 0.473, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.6558935361216731, |
|
"grad_norm": 0.032118173778418836, |
|
"learning_rate": 1.7527861998238094e-06, |
|
"loss": 0.5291, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.6577946768060836, |
|
"grad_norm": 0.02952093686854592, |
|
"learning_rate": 1.7340505915373495e-06, |
|
"loss": 0.4796, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.6596958174904943, |
|
"grad_norm": 0.029492671747804372, |
|
"learning_rate": 1.7154061444053239e-06, |
|
"loss": 0.3782, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.661596958174905, |
|
"grad_norm": 0.030817515608522434, |
|
"learning_rate": 1.6968530640483126e-06, |
|
"loss": 0.4224, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.6634980988593155, |
|
"grad_norm": 0.0274807609917878, |
|
"learning_rate": 1.6783915550792652e-06, |
|
"loss": 0.3294, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.6653992395437263, |
|
"grad_norm": 0.029413278920099938, |
|
"learning_rate": 1.660021821101222e-06, |
|
"loss": 0.5466, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.667300380228137, |
|
"grad_norm": 0.02918447464387044, |
|
"learning_rate": 1.6417440647050853e-06, |
|
"loss": 0.5098, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.6692015209125475, |
|
"grad_norm": 0.03082479764728793, |
|
"learning_rate": 1.6235584874673848e-06, |
|
"loss": 0.4297, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.6711026615969582, |
|
"grad_norm": 0.032422475501209885, |
|
"learning_rate": 1.6054652899480472e-06, |
|
"loss": 0.4871, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.673003802281369, |
|
"grad_norm": 0.03141292425621774, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.4569, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6749049429657794, |
|
"grad_norm": 0.0330903525633864, |
|
"learning_rate": 1.5695568312079156e-06, |
|
"loss": 0.4469, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.6768060836501901, |
|
"grad_norm": 0.031179229475447295, |
|
"learning_rate": 1.5517419660041277e-06, |
|
"loss": 0.5497, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.6787072243346008, |
|
"grad_norm": 0.03228263078110552, |
|
"learning_rate": 1.534020272548349e-06, |
|
"loss": 0.4859, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.6806083650190113, |
|
"grad_norm": 0.025923393407134707, |
|
"learning_rate": 1.5163919462845622e-06, |
|
"loss": 0.4196, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.682509505703422, |
|
"grad_norm": 0.028661954301765564, |
|
"learning_rate": 1.4988571816270402e-06, |
|
"loss": 0.4335, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.6844106463878328, |
|
"grad_norm": 0.027963802864611236, |
|
"learning_rate": 1.4814161719582132e-06, |
|
"loss": 0.3977, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.6863117870722433, |
|
"grad_norm": 0.032875478037560815, |
|
"learning_rate": 1.4640691096265358e-06, |
|
"loss": 0.629, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.688212927756654, |
|
"grad_norm": 0.0374925249439201, |
|
"learning_rate": 1.4468161859443609e-06, |
|
"loss": 0.3625, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.6901140684410647, |
|
"grad_norm": 0.03185769773842431, |
|
"learning_rate": 1.4296575911858268e-06, |
|
"loss": 0.4556, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.6920152091254752, |
|
"grad_norm": 0.02897215891587595, |
|
"learning_rate": 1.412593514584777e-06, |
|
"loss": 0.5004, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.693916349809886, |
|
"grad_norm": 0.029630191567608207, |
|
"learning_rate": 1.3956241443326423e-06, |
|
"loss": 0.4363, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.6958174904942966, |
|
"grad_norm": 0.03190130281641041, |
|
"learning_rate": 1.378749667576399e-06, |
|
"loss": 0.4588, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.6977186311787071, |
|
"grad_norm": 0.031127873011931113, |
|
"learning_rate": 1.3619702704164783e-06, |
|
"loss": 0.4142, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.6996197718631179, |
|
"grad_norm": 0.032640279775106196, |
|
"learning_rate": 1.3452861379047289e-06, |
|
"loss": 0.4733, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.7015209125475286, |
|
"grad_norm": 0.035620076327424, |
|
"learning_rate": 1.3286974540423747e-06, |
|
"loss": 0.4201, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.703422053231939, |
|
"grad_norm": 0.028735453072537963, |
|
"learning_rate": 1.3122044017779768e-06, |
|
"loss": 0.457, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.7053231939163498, |
|
"grad_norm": 0.034944169046894284, |
|
"learning_rate": 1.2958071630054214e-06, |
|
"loss": 0.5117, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.7072243346007605, |
|
"grad_norm": 0.033183863875221604, |
|
"learning_rate": 1.279505918561923e-06, |
|
"loss": 0.4804, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.709125475285171, |
|
"grad_norm": 0.030811121531080534, |
|
"learning_rate": 1.2633008482260146e-06, |
|
"loss": 0.4716, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.7110266159695817, |
|
"grad_norm": 0.03345376038096608, |
|
"learning_rate": 1.2471921307155655e-06, |
|
"loss": 0.4737, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.7129277566539924, |
|
"grad_norm": 0.037879982302966894, |
|
"learning_rate": 1.2311799436858275e-06, |
|
"loss": 0.5145, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.714828897338403, |
|
"grad_norm": 0.03125840637334411, |
|
"learning_rate": 1.2152644637274603e-06, |
|
"loss": 0.5485, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.7167300380228137, |
|
"grad_norm": 0.02870599348239609, |
|
"learning_rate": 1.1994458663645836e-06, |
|
"loss": 0.444, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.7186311787072244, |
|
"grad_norm": 0.028065572422978687, |
|
"learning_rate": 1.1837243260528542e-06, |
|
"loss": 0.5199, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.7205323193916349, |
|
"grad_norm": 0.029566805517605176, |
|
"learning_rate": 1.168100016177528e-06, |
|
"loss": 0.4565, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.7224334600760456, |
|
"grad_norm": 0.03360875256380791, |
|
"learning_rate": 1.1525731090515536e-06, |
|
"loss": 0.4899, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.7243346007604563, |
|
"grad_norm": 0.027981726615800055, |
|
"learning_rate": 1.137143775913675e-06, |
|
"loss": 0.4513, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.7262357414448668, |
|
"grad_norm": 0.030425084620758658, |
|
"learning_rate": 1.1218121869265365e-06, |
|
"loss": 0.3923, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.7281368821292775, |
|
"grad_norm": 0.03058876706165132, |
|
"learning_rate": 1.1065785111748117e-06, |
|
"loss": 0.4462, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.7300380228136882, |
|
"grad_norm": 0.02646253874188642, |
|
"learning_rate": 1.0914429166633355e-06, |
|
"loss": 0.4134, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.7319391634980987, |
|
"grad_norm": 0.0324500004362468, |
|
"learning_rate": 1.076405570315252e-06, |
|
"loss": 0.4459, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.7338403041825095, |
|
"grad_norm": 0.031065005091304766, |
|
"learning_rate": 1.0614666379701732e-06, |
|
"loss": 0.4725, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.7357414448669202, |
|
"grad_norm": 0.03012688741381645, |
|
"learning_rate": 1.046626284382356e-06, |
|
"loss": 0.5102, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.7376425855513307, |
|
"grad_norm": 0.03454189690967679, |
|
"learning_rate": 1.0318846732188737e-06, |
|
"loss": 0.4659, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.7395437262357416, |
|
"grad_norm": 0.029516089989690384, |
|
"learning_rate": 1.017241967057816e-06, |
|
"loss": 0.4616, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.741444866920152, |
|
"grad_norm": 0.0332903672973333, |
|
"learning_rate": 1.0026983273865055e-06, |
|
"loss": 0.401, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.7433460076045626, |
|
"grad_norm": 0.027987782732721567, |
|
"learning_rate": 9.882539145997027e-07, |
|
"loss": 0.4547, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.7452471482889735, |
|
"grad_norm": 0.030911921172495468, |
|
"learning_rate": 9.739088879978409e-07, |
|
"loss": 0.5301, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.747148288973384, |
|
"grad_norm": 0.026292516823905276, |
|
"learning_rate": 9.59663405785277e-07, |
|
"loss": 0.3759, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.7490494296577945, |
|
"grad_norm": 0.03205077235432636, |
|
"learning_rate": 9.455176250685338e-07, |
|
"loss": 0.4899, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.7509505703422055, |
|
"grad_norm": 0.028832804448268377, |
|
"learning_rate": 9.314717018545838e-07, |
|
"loss": 0.4195, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.752851711026616, |
|
"grad_norm": 0.028850241158891822, |
|
"learning_rate": 9.17525791049112e-07, |
|
"loss": 0.5263, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.7547528517110265, |
|
"grad_norm": 0.02997489825275393, |
|
"learning_rate": 9.036800464548157e-07, |
|
"loss": 0.5086, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.7566539923954374, |
|
"grad_norm": 0.03263348036982503, |
|
"learning_rate": 8.899346207697135e-07, |
|
"loss": 0.4947, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.758555133079848, |
|
"grad_norm": 0.030306018578941444, |
|
"learning_rate": 8.762896655854481e-07, |
|
"loss": 0.4048, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.7604562737642584, |
|
"grad_norm": 0.030759365752075392, |
|
"learning_rate": 8.627453313856249e-07, |
|
"loss": 0.5442, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.7623574144486693, |
|
"grad_norm": 0.031728909675695, |
|
"learning_rate": 8.493017675441495e-07, |
|
"loss": 0.4838, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.7642585551330798, |
|
"grad_norm": 0.030547999204859254, |
|
"learning_rate": 8.359591223235785e-07, |
|
"loss": 0.4355, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.7661596958174905, |
|
"grad_norm": 0.026057203038169253, |
|
"learning_rate": 8.227175428734868e-07, |
|
"loss": 0.4242, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.7680608365019013, |
|
"grad_norm": 0.029784743824152658, |
|
"learning_rate": 8.095771752288451e-07, |
|
"loss": 0.4369, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.7699619771863118, |
|
"grad_norm": 0.03225492850134359, |
|
"learning_rate": 7.965381643084069e-07, |
|
"loss": 0.3777, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.7718631178707225, |
|
"grad_norm": 0.03269172748299184, |
|
"learning_rate": 7.83600653913108e-07, |
|
"loss": 0.469, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.7737642585551332, |
|
"grad_norm": 0.030918866700995487, |
|
"learning_rate": 7.707647867244927e-07, |
|
"loss": 0.4663, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.7756653992395437, |
|
"grad_norm": 0.028030328242848276, |
|
"learning_rate": 7.580307043031232e-07, |
|
"loss": 0.5157, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.7775665399239544, |
|
"grad_norm": 0.033417474341330244, |
|
"learning_rate": 7.453985470870284e-07, |
|
"loss": 0.5319, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.7794676806083651, |
|
"grad_norm": 0.03507760060732998, |
|
"learning_rate": 7.328684543901598e-07, |
|
"loss": 0.4532, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.7813688212927756, |
|
"grad_norm": 0.028273930230182195, |
|
"learning_rate": 7.204405644008416e-07, |
|
"loss": 0.4521, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.7832699619771863, |
|
"grad_norm": 0.03158701741075583, |
|
"learning_rate": 7.081150141802518e-07, |
|
"loss": 0.448, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.785171102661597, |
|
"grad_norm": 0.031314206591594035, |
|
"learning_rate": 6.958919396609231e-07, |
|
"loss": 0.4567, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.7870722433460076, |
|
"grad_norm": 0.03157721386615864, |
|
"learning_rate": 6.837714756452241e-07, |
|
"loss": 0.4545, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.7889733840304183, |
|
"grad_norm": 0.02892824183275648, |
|
"learning_rate": 6.717537558038845e-07, |
|
"loss": 0.4609, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.790874524714829, |
|
"grad_norm": 0.03734105942938574, |
|
"learning_rate": 6.598389126745209e-07, |
|
"loss": 0.4129, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.7927756653992395, |
|
"grad_norm": 0.028796100332848178, |
|
"learning_rate": 6.480270776601682e-07, |
|
"loss": 0.4933, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.7946768060836502, |
|
"grad_norm": 0.02724205290254438, |
|
"learning_rate": 6.36318381027835e-07, |
|
"loss": 0.3796, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.796577946768061, |
|
"grad_norm": 0.029765219535193906, |
|
"learning_rate": 6.247129519070728e-07, |
|
"loss": 0.4206, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.7984790874524714, |
|
"grad_norm": 0.031351558482127034, |
|
"learning_rate": 6.132109182885382e-07, |
|
"loss": 0.3921, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.8003802281368821, |
|
"grad_norm": 0.03313115821415064, |
|
"learning_rate": 6.018124070225928e-07, |
|
"loss": 0.357, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.8022813688212929, |
|
"grad_norm": 0.02923966035182855, |
|
"learning_rate": 5.905175438178979e-07, |
|
"loss": 0.5295, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.8041825095057034, |
|
"grad_norm": 0.027446344432688532, |
|
"learning_rate": 5.793264532400311e-07, |
|
"loss": 0.4596, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.806083650190114, |
|
"grad_norm": 0.03034572172867263, |
|
"learning_rate": 5.68239258710116e-07, |
|
"loss": 0.4495, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.8079847908745248, |
|
"grad_norm": 0.03137500197835899, |
|
"learning_rate": 5.572560825034523e-07, |
|
"loss": 0.4195, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.8098859315589353, |
|
"grad_norm": 0.02687436007598942, |
|
"learning_rate": 5.463770457481732e-07, |
|
"loss": 0.4481, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.811787072243346, |
|
"grad_norm": 0.033298984336612214, |
|
"learning_rate": 5.35602268423906e-07, |
|
"loss": 0.4481, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.8136882129277567, |
|
"grad_norm": 0.03074715608006006, |
|
"learning_rate": 5.249318693604577e-07, |
|
"loss": 0.4081, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.8155893536121672, |
|
"grad_norm": 0.032907124189825374, |
|
"learning_rate": 5.143659662364931e-07, |
|
"loss": 0.45, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.817490494296578, |
|
"grad_norm": 0.02894943546277151, |
|
"learning_rate": 5.039046755782417e-07, |
|
"loss": 0.5292, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.8193916349809887, |
|
"grad_norm": 0.03144122062146257, |
|
"learning_rate": 4.935481127582131e-07, |
|
"loss": 0.4253, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.8212927756653992, |
|
"grad_norm": 0.029991897988623707, |
|
"learning_rate": 4.83296391993926e-07, |
|
"loss": 0.4225, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.8231939163498099, |
|
"grad_norm": 0.03296037001549121, |
|
"learning_rate": 4.7314962634664616e-07, |
|
"loss": 0.428, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.8250950570342206, |
|
"grad_norm": 0.03194095308465852, |
|
"learning_rate": 4.631079277201389e-07, |
|
"loss": 0.5275, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.826996197718631, |
|
"grad_norm": 0.030900947330494876, |
|
"learning_rate": 4.5317140685943726e-07, |
|
"loss": 0.4579, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.8288973384030418, |
|
"grad_norm": 0.034840033361790874, |
|
"learning_rate": 4.433401733496201e-07, |
|
"loss": 0.4375, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.8307984790874525, |
|
"grad_norm": 0.029344448848243988, |
|
"learning_rate": 4.3361433561460274e-07, |
|
"loss": 0.4094, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.832699619771863, |
|
"grad_norm": 0.03246508942021766, |
|
"learning_rate": 4.2399400091594154e-07, |
|
"loss": 0.4402, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.8346007604562737, |
|
"grad_norm": 0.02628871622217315, |
|
"learning_rate": 4.14479275351648e-07, |
|
"loss": 0.4588, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.8365019011406845, |
|
"grad_norm": 0.03170635068076932, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.4594, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.838403041825095, |
|
"grad_norm": 0.03101671357271921, |
|
"learning_rate": 3.9576707019350903e-07, |
|
"loss": 0.526, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.8403041825095057, |
|
"grad_norm": 0.030729567695037974, |
|
"learning_rate": 3.865697969675164e-07, |
|
"loss": 0.6355, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.8422053231939164, |
|
"grad_norm": 0.034474475882619045, |
|
"learning_rate": 3.7747854560931996e-07, |
|
"loss": 0.4457, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.8441064638783269, |
|
"grad_norm": 0.03421378885683852, |
|
"learning_rate": 3.684934163819309e-07, |
|
"loss": 0.462, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.8460076045627376, |
|
"grad_norm": 0.03363185402579016, |
|
"learning_rate": 3.596145083779912e-07, |
|
"loss": 0.5014, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.8479087452471483, |
|
"grad_norm": 0.030306139921095637, |
|
"learning_rate": 3.508419195186774e-07, |
|
"loss": 0.4979, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.8498098859315588, |
|
"grad_norm": 0.03157930603234884, |
|
"learning_rate": 3.421757465526243e-07, |
|
"loss": 0.479, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.8517110266159695, |
|
"grad_norm": 0.03173802078060801, |
|
"learning_rate": 3.33616085054862e-07, |
|
"loss": 0.3983, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.8536121673003803, |
|
"grad_norm": 0.031450770993991316, |
|
"learning_rate": 3.2516302942574794e-07, |
|
"loss": 0.545, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.8555133079847907, |
|
"grad_norm": 0.032790034715735716, |
|
"learning_rate": 3.1681667288994353e-07, |
|
"loss": 0.4353, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.8574144486692015, |
|
"grad_norm": 0.03199939935220213, |
|
"learning_rate": 3.0857710749537585e-07, |
|
"loss": 0.364, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.8593155893536122, |
|
"grad_norm": 0.03373185092711587, |
|
"learning_rate": 3.0044442411222066e-07, |
|
"loss": 0.4758, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.8612167300380227, |
|
"grad_norm": 0.030669861295071797, |
|
"learning_rate": 2.9241871243190555e-07, |
|
"loss": 0.3871, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.8631178707224336, |
|
"grad_norm": 0.02976642847865338, |
|
"learning_rate": 2.845000609661208e-07, |
|
"loss": 0.4292, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.8650190114068441, |
|
"grad_norm": 0.029865230773212417, |
|
"learning_rate": 2.7668855704583997e-07, |
|
"loss": 0.4625, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.8669201520912546, |
|
"grad_norm": 0.029861179822064734, |
|
"learning_rate": 2.689842868203563e-07, |
|
"loss": 0.5372, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.8688212927756656, |
|
"grad_norm": 0.03191778594102872, |
|
"learning_rate": 2.6138733525633896e-07, |
|
"loss": 0.5021, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.870722433460076, |
|
"grad_norm": 0.034314296915894583, |
|
"learning_rate": 2.5389778613688744e-07, |
|
"loss": 0.4439, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.8726235741444865, |
|
"grad_norm": 0.03774853955023176, |
|
"learning_rate": 2.46515722060614e-07, |
|
"loss": 0.5214, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.8745247148288975, |
|
"grad_norm": 0.03368332286177959, |
|
"learning_rate": 2.392412244407294e-07, |
|
"loss": 0.4704, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.876425855513308, |
|
"grad_norm": 0.03186220294437927, |
|
"learning_rate": 2.3207437350414418e-07, |
|
"loss": 0.4223, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.8783269961977185, |
|
"grad_norm": 0.03615903282922533, |
|
"learning_rate": 2.2501524829059208e-07, |
|
"loss": 0.3987, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.8802281368821294, |
|
"grad_norm": 0.030441277038341368, |
|
"learning_rate": 2.180639266517448e-07, |
|
"loss": 0.4291, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.88212927756654, |
|
"grad_norm": 0.031277987851696665, |
|
"learning_rate": 2.1122048525036409e-07, |
|
"loss": 0.4889, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8840304182509504, |
|
"grad_norm": 0.029387610313056912, |
|
"learning_rate": 2.0448499955945223e-07, |
|
"loss": 0.4611, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.8859315589353614, |
|
"grad_norm": 0.030591474090613247, |
|
"learning_rate": 1.9785754386142164e-07, |
|
"loss": 0.5012, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.8878326996197718, |
|
"grad_norm": 0.028714589339226205, |
|
"learning_rate": 1.9133819124727003e-07, |
|
"loss": 0.4686, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.8897338403041823, |
|
"grad_norm": 0.02959569862183423, |
|
"learning_rate": 1.8492701361578326e-07, |
|
"loss": 0.4696, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.8916349809885933, |
|
"grad_norm": 0.0328146527760817, |
|
"learning_rate": 1.7862408167273472e-07, |
|
"loss": 0.3678, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.8935361216730038, |
|
"grad_norm": 0.029123499221954324, |
|
"learning_rate": 1.724294649301095e-07, |
|
"loss": 0.436, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.8954372623574145, |
|
"grad_norm": 0.0293269284819944, |
|
"learning_rate": 1.6634323170533928e-07, |
|
"loss": 0.4569, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.8973384030418252, |
|
"grad_norm": 0.027814429410490966, |
|
"learning_rate": 1.6036544912054087e-07, |
|
"loss": 0.3659, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.8992395437262357, |
|
"grad_norm": 0.0327477941814678, |
|
"learning_rate": 1.544961831017855e-07, |
|
"loss": 0.476, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.9011406844106464, |
|
"grad_norm": 0.03443055528922134, |
|
"learning_rate": 1.487354983783673e-07, |
|
"loss": 0.4419, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.9030418250950571, |
|
"grad_norm": 0.02594462226434814, |
|
"learning_rate": 1.430834584820895e-07, |
|
"loss": 0.4449, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.9049429657794676, |
|
"grad_norm": 0.03471230118951238, |
|
"learning_rate": 1.375401257465625e-07, |
|
"loss": 0.4715, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.9068441064638784, |
|
"grad_norm": 0.030751789179219086, |
|
"learning_rate": 1.3210556130652031e-07, |
|
"loss": 0.3707, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.908745247148289, |
|
"grad_norm": 0.03136393500105467, |
|
"learning_rate": 1.2677982509714415e-07, |
|
"loss": 0.429, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.9106463878326996, |
|
"grad_norm": 0.0324151007308297, |
|
"learning_rate": 1.2156297585339872e-07, |
|
"loss": 0.4923, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.9125475285171103, |
|
"grad_norm": 0.030257241474964322, |
|
"learning_rate": 1.1645507110938925e-07, |
|
"loss": 0.3798, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.914448669201521, |
|
"grad_norm": 0.03529946764667141, |
|
"learning_rate": 1.1145616719772545e-07, |
|
"loss": 0.4688, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.9163498098859315, |
|
"grad_norm": 0.0309723338831087, |
|
"learning_rate": 1.0656631924889749e-07, |
|
"loss": 0.5193, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.9182509505703422, |
|
"grad_norm": 0.031474237973403506, |
|
"learning_rate": 1.0178558119067316e-07, |
|
"loss": 0.3985, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.920152091254753, |
|
"grad_norm": 0.03147376204925686, |
|
"learning_rate": 9.711400574749507e-08, |
|
"loss": 0.4797, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.9220532319391634, |
|
"grad_norm": 0.028614904232453716, |
|
"learning_rate": 9.255164443990994e-08, |
|
"loss": 0.4645, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.9239543726235742, |
|
"grad_norm": 0.030365026569649265, |
|
"learning_rate": 8.809854758399017e-08, |
|
"loss": 0.495, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.9258555133079849, |
|
"grad_norm": 0.02958828619871872, |
|
"learning_rate": 8.375476429078543e-08, |
|
"loss": 0.4736, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.9277566539923954, |
|
"grad_norm": 0.03578924085646632, |
|
"learning_rate": 7.952034246577977e-08, |
|
"loss": 0.4667, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.929657794676806, |
|
"grad_norm": 0.03039288957416989, |
|
"learning_rate": 7.539532880836087e-08, |
|
"loss": 0.3965, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.9315589353612168, |
|
"grad_norm": 0.030213945855540075, |
|
"learning_rate": 7.137976881130826e-08, |
|
"loss": 0.3838, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.9334600760456273, |
|
"grad_norm": 0.02884747685368914, |
|
"learning_rate": 6.747370676028819e-08, |
|
"loss": 0.4683, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.935361216730038, |
|
"grad_norm": 0.03421752604319262, |
|
"learning_rate": 6.367718573336845e-08, |
|
"loss": 0.4621, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.9372623574144487, |
|
"grad_norm": 0.03343470147649224, |
|
"learning_rate": 5.999024760054095e-08, |
|
"loss": 0.466, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.9391634980988592, |
|
"grad_norm": 0.027789701521310304, |
|
"learning_rate": 5.641293302326323e-08, |
|
"loss": 0.4209, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.94106463878327, |
|
"grad_norm": 0.03664919177396004, |
|
"learning_rate": 5.2945281454003236e-08, |
|
"loss": 0.45, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.9429657794676807, |
|
"grad_norm": 0.03512768344119699, |
|
"learning_rate": 4.958733113581415e-08, |
|
"loss": 0.5002, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.9448669201520912, |
|
"grad_norm": 0.032285534675066084, |
|
"learning_rate": 4.6339119101902475e-08, |
|
"loss": 0.4585, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.946768060836502, |
|
"grad_norm": 0.028015055200238535, |
|
"learning_rate": 4.320068117522835e-08, |
|
"loss": 0.462, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.9486692015209126, |
|
"grad_norm": 0.026824398589096972, |
|
"learning_rate": 4.0172051968101474e-08, |
|
"loss": 0.3529, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.950570342205323, |
|
"grad_norm": 0.029707737491614556, |
|
"learning_rate": 3.7253264881809137e-08, |
|
"loss": 0.465, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.9524714828897338, |
|
"grad_norm": 0.03173165055291673, |
|
"learning_rate": 3.4444352106242086e-08, |
|
"loss": 0.4333, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.9543726235741445, |
|
"grad_norm": 0.027659676233207857, |
|
"learning_rate": 3.174534461953593e-08, |
|
"loss": 0.4004, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.956273764258555, |
|
"grad_norm": 0.027404644233989556, |
|
"learning_rate": 2.915627218774142e-08, |
|
"loss": 0.3796, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.9581749049429658, |
|
"grad_norm": 0.026599247935916007, |
|
"learning_rate": 2.667716336448356e-08, |
|
"loss": 0.3555, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.9600760456273765, |
|
"grad_norm": 0.03292570768996639, |
|
"learning_rate": 2.430804549065302e-08, |
|
"loss": 0.4199, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.961977186311787, |
|
"grad_norm": 0.029654656511763144, |
|
"learning_rate": 2.2048944694104123e-08, |
|
"loss": 0.4099, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.9638783269961977, |
|
"grad_norm": 0.029568119617424664, |
|
"learning_rate": 1.989988588936509e-08, |
|
"loss": 0.5667, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.9657794676806084, |
|
"grad_norm": 0.03279552786438916, |
|
"learning_rate": 1.7860892777367133e-08, |
|
"loss": 0.4053, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.967680608365019, |
|
"grad_norm": 0.027159340862676504, |
|
"learning_rate": 1.5931987845176912e-08, |
|
"loss": 0.4461, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.9695817490494296, |
|
"grad_norm": 0.03157329108514617, |
|
"learning_rate": 1.411319236575337e-08, |
|
"loss": 0.388, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.9714828897338403, |
|
"grad_norm": 0.031103650357148457, |
|
"learning_rate": 1.2404526397711281e-08, |
|
"loss": 0.4071, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.9733840304182508, |
|
"grad_norm": 0.030963116179377713, |
|
"learning_rate": 1.0806008785100297e-08, |
|
"loss": 0.3987, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.9752851711026616, |
|
"grad_norm": 0.031098576501293572, |
|
"learning_rate": 9.317657157197347e-09, |
|
"loss": 0.4902, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.9771863117870723, |
|
"grad_norm": 0.03523165445859305, |
|
"learning_rate": 7.93948792831234e-09, |
|
"loss": 0.5261, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.9790874524714828, |
|
"grad_norm": 0.03432032449507075, |
|
"learning_rate": 6.671516297606095e-09, |
|
"loss": 0.5043, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.9809885931558935, |
|
"grad_norm": 0.028102832320697466, |
|
"learning_rate": 5.513756248924917e-09, |
|
"loss": 0.4402, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.9828897338403042, |
|
"grad_norm": 0.029094175427136368, |
|
"learning_rate": 4.466220550641831e-09, |
|
"loss": 0.4499, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.9847908745247147, |
|
"grad_norm": 0.030662117911304455, |
|
"learning_rate": 3.528920755523357e-09, |
|
"loss": 0.492, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.9866920152091256, |
|
"grad_norm": 0.036622107235485084, |
|
"learning_rate": 2.701867200592956e-09, |
|
"loss": 0.417, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.9885931558935361, |
|
"grad_norm": 0.03176507464018935, |
|
"learning_rate": 1.9850690070266633e-09, |
|
"loss": 0.5901, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.9904942965779466, |
|
"grad_norm": 0.028746410795910952, |
|
"learning_rate": 1.378534080042071e-09, |
|
"loss": 0.4125, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.9923954372623576, |
|
"grad_norm": 0.03168022954272395, |
|
"learning_rate": 8.822691088195001e-10, |
|
"loss": 0.4419, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.994296577946768, |
|
"grad_norm": 0.03596484061103507, |
|
"learning_rate": 4.962795664265052e-10, |
|
"loss": 0.4668, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.9961977186311786, |
|
"grad_norm": 0.03278898993455145, |
|
"learning_rate": 2.2056970975459223e-10, |
|
"loss": 0.4626, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9980988593155895, |
|
"grad_norm": 0.02636590369941191, |
|
"learning_rate": 5.514257947369928e-11, |
|
"loss": 0.3786, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.03134952245123963, |
|
"learning_rate": 0.0, |
|
"loss": 0.4029, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.4882815182209015, |
|
"eval_runtime": 2.0788, |
|
"eval_samples_per_second": 2.886, |
|
"eval_steps_per_second": 0.481, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1052, |
|
"total_flos": 1.6469503016697856e+16, |
|
"train_loss": 0.46453300517888124, |
|
"train_runtime": 22645.592, |
|
"train_samples_per_second": 1.486, |
|
"train_steps_per_second": 0.046 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1052, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6469503016697856e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|