| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9992229992229992, | |
| "eval_steps": 500, | |
| "global_step": 643, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.001554001554001554, | |
| "grad_norm": 1.0753202631674548, | |
| "learning_rate": 1e-05, | |
| "loss": 0.5508, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.003108003108003108, | |
| "grad_norm": 1.394325863785031, | |
| "learning_rate": 9.999940321631158e-06, | |
| "loss": 0.6286, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004662004662004662, | |
| "grad_norm": 0.8138734995659078, | |
| "learning_rate": 9.999761287949237e-06, | |
| "loss": 0.4343, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.006216006216006216, | |
| "grad_norm": 1.4834629168746152, | |
| "learning_rate": 9.99946290322801e-06, | |
| "loss": 0.5391, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.00777000777000777, | |
| "grad_norm": 0.6636994391139434, | |
| "learning_rate": 9.999045174590324e-06, | |
| "loss": 0.5696, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009324009324009324, | |
| "grad_norm": 0.8144572025546154, | |
| "learning_rate": 9.998508112007925e-06, | |
| "loss": 0.8279, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.010878010878010878, | |
| "grad_norm": 0.6969071538976613, | |
| "learning_rate": 9.997851728301219e-06, | |
| "loss": 0.537, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.012432012432012432, | |
| "grad_norm": 0.8735249365273472, | |
| "learning_rate": 9.99707603913897e-06, | |
| "loss": 0.6761, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.013986013986013986, | |
| "grad_norm": 0.7027334655844945, | |
| "learning_rate": 9.996181063037924e-06, | |
| "loss": 0.5301, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01554001554001554, | |
| "grad_norm": 0.8373746380293126, | |
| "learning_rate": 9.995166821362368e-06, | |
| "loss": 0.6674, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.017094017094017096, | |
| "grad_norm": 0.984420779214608, | |
| "learning_rate": 9.994033338323612e-06, | |
| "loss": 0.5113, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.018648018648018648, | |
| "grad_norm": 0.7622743125468773, | |
| "learning_rate": 9.99278064097943e-06, | |
| "loss": 0.6431, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.020202020202020204, | |
| "grad_norm": 0.7377189725798596, | |
| "learning_rate": 9.991408759233394e-06, | |
| "loss": 0.6131, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.021756021756021756, | |
| "grad_norm": 0.9016901262329209, | |
| "learning_rate": 9.989917725834166e-06, | |
| "loss": 0.5208, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.023310023310023312, | |
| "grad_norm": 0.7060988613807518, | |
| "learning_rate": 9.988307576374727e-06, | |
| "loss": 0.5591, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.024864024864024864, | |
| "grad_norm": 1.965431280490754, | |
| "learning_rate": 9.986578349291514e-06, | |
| "loss": 0.6333, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02641802641802642, | |
| "grad_norm": 0.6204824016696763, | |
| "learning_rate": 9.984730085863504e-06, | |
| "loss": 0.4729, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.027972027972027972, | |
| "grad_norm": 0.6629661100589302, | |
| "learning_rate": 9.982762830211239e-06, | |
| "loss": 0.41, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.029526029526029528, | |
| "grad_norm": 0.679676793160211, | |
| "learning_rate": 9.980676629295763e-06, | |
| "loss": 0.5619, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.03108003108003108, | |
| "grad_norm": 0.7228425066613519, | |
| "learning_rate": 9.9784715329175e-06, | |
| "loss": 0.5637, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03263403263403263, | |
| "grad_norm": 1.4494500979247202, | |
| "learning_rate": 9.976147593715074e-06, | |
| "loss": 0.4784, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03418803418803419, | |
| "grad_norm": 0.9490720634807803, | |
| "learning_rate": 9.973704867164044e-06, | |
| "loss": 0.775, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.035742035742035744, | |
| "grad_norm": 0.7173074724185691, | |
| "learning_rate": 9.971143411575585e-06, | |
| "loss": 0.5456, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.037296037296037296, | |
| "grad_norm": 1.813950013627374, | |
| "learning_rate": 9.968463288095096e-06, | |
| "loss": 0.5494, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03885003885003885, | |
| "grad_norm": 0.764359484896435, | |
| "learning_rate": 9.965664560700734e-06, | |
| "loss": 0.6135, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04040404040404041, | |
| "grad_norm": 0.8961015996035883, | |
| "learning_rate": 9.962747296201891e-06, | |
| "loss": 0.4535, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.04195804195804196, | |
| "grad_norm": 0.7061951973387899, | |
| "learning_rate": 9.959711564237603e-06, | |
| "loss": 0.5766, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.04351204351204351, | |
| "grad_norm": 0.9186274326988478, | |
| "learning_rate": 9.956557437274887e-06, | |
| "loss": 0.4126, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.045066045066045064, | |
| "grad_norm": 1.0005571619677003, | |
| "learning_rate": 9.953284990607e-06, | |
| "loss": 0.5344, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.046620046620046623, | |
| "grad_norm": 1.0384488624813175, | |
| "learning_rate": 9.949894302351653e-06, | |
| "loss": 0.686, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.048174048174048176, | |
| "grad_norm": 1.1091587053056344, | |
| "learning_rate": 9.946385453449145e-06, | |
| "loss": 0.7023, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04972804972804973, | |
| "grad_norm": 0.8221842041481426, | |
| "learning_rate": 9.942758527660429e-06, | |
| "loss": 0.4769, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.05128205128205128, | |
| "grad_norm": 0.8497884172706046, | |
| "learning_rate": 9.93901361156511e-06, | |
| "loss": 0.5504, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.05283605283605284, | |
| "grad_norm": 0.6872331365190385, | |
| "learning_rate": 9.935150794559379e-06, | |
| "loss": 0.5369, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.05439005439005439, | |
| "grad_norm": 0.8777872780147628, | |
| "learning_rate": 9.931170168853886e-06, | |
| "loss": 0.657, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.055944055944055944, | |
| "grad_norm": 0.6487410029703777, | |
| "learning_rate": 9.927071829471531e-06, | |
| "loss": 0.4006, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.057498057498057496, | |
| "grad_norm": 0.6468395282548087, | |
| "learning_rate": 9.922855874245197e-06, | |
| "loss": 0.4699, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.059052059052059055, | |
| "grad_norm": 0.821140351961526, | |
| "learning_rate": 9.918522403815414e-06, | |
| "loss": 0.5927, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.06060606060606061, | |
| "grad_norm": 0.7910492113722175, | |
| "learning_rate": 9.914071521627964e-06, | |
| "loss": 0.4792, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.06216006216006216, | |
| "grad_norm": 1.036910491968146, | |
| "learning_rate": 9.909503333931402e-06, | |
| "loss": 0.699, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06371406371406371, | |
| "grad_norm": 0.8196334764356895, | |
| "learning_rate": 9.904817949774524e-06, | |
| "loss": 0.7333, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.06526806526806526, | |
| "grad_norm": 0.6798779713989535, | |
| "learning_rate": 9.900015481003762e-06, | |
| "loss": 0.6433, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06682206682206682, | |
| "grad_norm": 0.9642768659293228, | |
| "learning_rate": 9.895096042260517e-06, | |
| "loss": 0.5257, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06837606837606838, | |
| "grad_norm": 1.7689631037060651, | |
| "learning_rate": 9.890059750978425e-06, | |
| "loss": 0.6034, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06993006993006994, | |
| "grad_norm": 0.7443934495191462, | |
| "learning_rate": 9.88490672738054e-06, | |
| "loss": 0.6089, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07148407148407149, | |
| "grad_norm": 1.3800385551010166, | |
| "learning_rate": 9.879637094476482e-06, | |
| "loss": 0.555, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.07303807303807304, | |
| "grad_norm": 0.6934112455074183, | |
| "learning_rate": 9.874250978059489e-06, | |
| "loss": 0.5253, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.07459207459207459, | |
| "grad_norm": 0.728895851371575, | |
| "learning_rate": 9.86874850670342e-06, | |
| "loss": 0.5111, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.07614607614607614, | |
| "grad_norm": 0.705671764219133, | |
| "learning_rate": 9.863129811759678e-06, | |
| "loss": 0.5166, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.0777000777000777, | |
| "grad_norm": 0.9004429060865304, | |
| "learning_rate": 9.857395027354085e-06, | |
| "loss": 0.4981, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07925407925407925, | |
| "grad_norm": 0.6186440018603123, | |
| "learning_rate": 9.85154429038367e-06, | |
| "loss": 0.3806, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.08080808080808081, | |
| "grad_norm": 0.7725834636904632, | |
| "learning_rate": 9.845577740513409e-06, | |
| "loss": 0.4948, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.08236208236208237, | |
| "grad_norm": 0.9253638464116425, | |
| "learning_rate": 9.83949552017289e-06, | |
| "loss": 0.5285, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.08391608391608392, | |
| "grad_norm": 0.7656584472395311, | |
| "learning_rate": 9.833297774552905e-06, | |
| "loss": 0.6342, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.08547008547008547, | |
| "grad_norm": 0.8998211569830732, | |
| "learning_rate": 9.826984651601998e-06, | |
| "loss": 0.6178, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08702408702408702, | |
| "grad_norm": 0.8685832029444266, | |
| "learning_rate": 9.820556302022916e-06, | |
| "loss": 0.6903, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.08857808857808858, | |
| "grad_norm": 0.7304781373527222, | |
| "learning_rate": 9.814012879269031e-06, | |
| "loss": 0.5054, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.09013209013209013, | |
| "grad_norm": 0.9941249676710022, | |
| "learning_rate": 9.80735453954066e-06, | |
| "loss": 0.5752, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.09168609168609168, | |
| "grad_norm": 0.8636774782845266, | |
| "learning_rate": 9.800581441781342e-06, | |
| "loss": 0.472, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.09324009324009325, | |
| "grad_norm": 0.8037019830533219, | |
| "learning_rate": 9.79369374767405e-06, | |
| "loss": 0.451, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0947940947940948, | |
| "grad_norm": 0.7569996171253018, | |
| "learning_rate": 9.786691621637322e-06, | |
| "loss": 0.5862, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.09634809634809635, | |
| "grad_norm": 0.718132158990271, | |
| "learning_rate": 9.779575230821344e-06, | |
| "loss": 0.57, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.0979020979020979, | |
| "grad_norm": 0.8861885775634778, | |
| "learning_rate": 9.772344745103955e-06, | |
| "loss": 0.661, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.09945609945609946, | |
| "grad_norm": 0.7647017941329705, | |
| "learning_rate": 9.76500033708659e-06, | |
| "loss": 0.5032, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.10101010101010101, | |
| "grad_norm": 0.6503958573653673, | |
| "learning_rate": 9.757542182090165e-06, | |
| "loss": 0.5113, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.10256410256410256, | |
| "grad_norm": 0.8514535986961426, | |
| "learning_rate": 9.749970458150893e-06, | |
| "loss": 0.4582, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.10411810411810411, | |
| "grad_norm": 0.8115540624516915, | |
| "learning_rate": 9.742285346016024e-06, | |
| "loss": 0.5324, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.10567210567210568, | |
| "grad_norm": 0.8716261250005453, | |
| "learning_rate": 9.734487029139544e-06, | |
| "loss": 0.5622, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.10722610722610723, | |
| "grad_norm": 0.6674304497161411, | |
| "learning_rate": 9.726575693677782e-06, | |
| "loss": 0.4194, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.10878010878010878, | |
| "grad_norm": 1.0158040848497383, | |
| "learning_rate": 9.718551528484979e-06, | |
| "loss": 0.4802, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11033411033411034, | |
| "grad_norm": 0.595576459482197, | |
| "learning_rate": 9.710414725108771e-06, | |
| "loss": 0.4594, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.11188811188811189, | |
| "grad_norm": 0.9680714108769358, | |
| "learning_rate": 9.702165477785618e-06, | |
| "loss": 0.5109, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.11344211344211344, | |
| "grad_norm": 0.7314195273632106, | |
| "learning_rate": 9.69380398343617e-06, | |
| "loss": 0.4759, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.11499611499611499, | |
| "grad_norm": 0.8578361036597735, | |
| "learning_rate": 9.685330441660564e-06, | |
| "loss": 0.5533, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.11655011655011654, | |
| "grad_norm": 1.890522114467283, | |
| "learning_rate": 9.676745054733661e-06, | |
| "loss": 0.6364, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11810411810411811, | |
| "grad_norm": 0.8238570929416833, | |
| "learning_rate": 9.668048027600217e-06, | |
| "loss": 0.5262, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.11965811965811966, | |
| "grad_norm": 0.7000587405463369, | |
| "learning_rate": 9.659239567869989e-06, | |
| "loss": 0.5541, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.12121212121212122, | |
| "grad_norm": 1.0275499560212522, | |
| "learning_rate": 9.650319885812777e-06, | |
| "loss": 0.6356, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.12276612276612277, | |
| "grad_norm": 0.9584707284083808, | |
| "learning_rate": 9.641289194353418e-06, | |
| "loss": 0.5393, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.12432012432012432, | |
| "grad_norm": 0.7661012737973905, | |
| "learning_rate": 9.632147709066682e-06, | |
| "loss": 0.5864, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1258741258741259, | |
| "grad_norm": 0.8981339358976846, | |
| "learning_rate": 9.622895648172141e-06, | |
| "loss": 0.5001, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.12742812742812742, | |
| "grad_norm": 0.9136632663354124, | |
| "learning_rate": 9.613533232528956e-06, | |
| "loss": 0.5989, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.128982128982129, | |
| "grad_norm": 1.0752006190192398, | |
| "learning_rate": 9.604060685630608e-06, | |
| "loss": 0.6551, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.13053613053613053, | |
| "grad_norm": 0.6746196580403023, | |
| "learning_rate": 9.594478233599551e-06, | |
| "loss": 0.5763, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.1320901320901321, | |
| "grad_norm": 1.0560596133360578, | |
| "learning_rate": 9.584786105181831e-06, | |
| "loss": 0.626, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.13364413364413363, | |
| "grad_norm": 1.9322491414214575, | |
| "learning_rate": 9.574984531741613e-06, | |
| "loss": 0.6028, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.1351981351981352, | |
| "grad_norm": 1.1199274751748922, | |
| "learning_rate": 9.565073747255665e-06, | |
| "loss": 0.5536, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.13675213675213677, | |
| "grad_norm": 1.2142928266989612, | |
| "learning_rate": 9.555053988307764e-06, | |
| "loss": 0.5881, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.1383061383061383, | |
| "grad_norm": 0.886265217836807, | |
| "learning_rate": 9.544925494083062e-06, | |
| "loss": 0.4521, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.13986013986013987, | |
| "grad_norm": 0.7758293999026016, | |
| "learning_rate": 9.53468850636236e-06, | |
| "loss": 0.6255, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1414141414141414, | |
| "grad_norm": 0.8222144113614714, | |
| "learning_rate": 9.524343269516354e-06, | |
| "loss": 0.5643, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.14296814296814297, | |
| "grad_norm": 0.7971504019254859, | |
| "learning_rate": 9.513890030499786e-06, | |
| "loss": 0.5469, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.1445221445221445, | |
| "grad_norm": 1.0988108202778002, | |
| "learning_rate": 9.503329038845556e-06, | |
| "loss": 0.6065, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.14607614607614608, | |
| "grad_norm": 0.9770953296063096, | |
| "learning_rate": 9.492660546658771e-06, | |
| "loss": 0.4678, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.14763014763014762, | |
| "grad_norm": 0.7001359835404304, | |
| "learning_rate": 9.481884808610712e-06, | |
| "loss": 0.5186, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.14918414918414918, | |
| "grad_norm": 1.028392902002761, | |
| "learning_rate": 9.471002081932767e-06, | |
| "loss": 0.5347, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.15073815073815075, | |
| "grad_norm": 0.6722329878713879, | |
| "learning_rate": 9.460012626410286e-06, | |
| "loss": 0.5138, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1522921522921523, | |
| "grad_norm": 1.0396069509533319, | |
| "learning_rate": 9.448916704376384e-06, | |
| "loss": 0.5388, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 0.788486690631997, | |
| "learning_rate": 9.437714580705671e-06, | |
| "loss": 0.4044, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.1554001554001554, | |
| "grad_norm": 0.7359447373312309, | |
| "learning_rate": 9.426406522807932e-06, | |
| "loss": 0.6637, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.15695415695415696, | |
| "grad_norm": 1.0558555591940144, | |
| "learning_rate": 9.414992800621749e-06, | |
| "loss": 0.4842, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.1585081585081585, | |
| "grad_norm": 0.8538402867666735, | |
| "learning_rate": 9.40347368660805e-06, | |
| "loss": 0.51, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.16006216006216006, | |
| "grad_norm": 0.5016420650271773, | |
| "learning_rate": 9.39184945574361e-06, | |
| "loss": 0.3733, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.16161616161616163, | |
| "grad_norm": 0.8043995407764333, | |
| "learning_rate": 9.380120385514484e-06, | |
| "loss": 0.6424, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.16317016317016317, | |
| "grad_norm": 0.7432969785115889, | |
| "learning_rate": 9.368286755909383e-06, | |
| "loss": 0.5918, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.16472416472416473, | |
| "grad_norm": 0.7800858860573905, | |
| "learning_rate": 9.356348849412991e-06, | |
| "loss": 0.5557, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.16627816627816627, | |
| "grad_norm": 0.8792710300993529, | |
| "learning_rate": 9.344306950999226e-06, | |
| "loss": 0.5359, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.16783216783216784, | |
| "grad_norm": 1.0983876112460325, | |
| "learning_rate": 9.332161348124426e-06, | |
| "loss": 0.6833, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.16938616938616938, | |
| "grad_norm": 0.8026939716617563, | |
| "learning_rate": 9.319912330720502e-06, | |
| "loss": 0.5503, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.17094017094017094, | |
| "grad_norm": 0.7137107427499185, | |
| "learning_rate": 9.307560191188e-06, | |
| "loss": 0.4826, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.17249417249417248, | |
| "grad_norm": 0.8145843383585528, | |
| "learning_rate": 9.295105224389144e-06, | |
| "loss": 0.5508, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.17404817404817405, | |
| "grad_norm": 0.6415967276646262, | |
| "learning_rate": 9.282547727640767e-06, | |
| "loss": 0.6075, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.17560217560217561, | |
| "grad_norm": 0.6695998673267839, | |
| "learning_rate": 9.269888000707243e-06, | |
| "loss": 0.4768, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.17715617715617715, | |
| "grad_norm": 0.692586482200001, | |
| "learning_rate": 9.25712634579331e-06, | |
| "loss": 0.5651, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.17871017871017872, | |
| "grad_norm": 0.9490969710584504, | |
| "learning_rate": 9.244263067536872e-06, | |
| "loss": 0.4607, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.18026418026418026, | |
| "grad_norm": 0.7448331565841488, | |
| "learning_rate": 9.23129847300171e-06, | |
| "loss": 0.5287, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 0.7568020783154017, | |
| "learning_rate": 9.218232871670168e-06, | |
| "loss": 0.5876, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.18337218337218336, | |
| "grad_norm": 0.703790461691835, | |
| "learning_rate": 9.205066575435754e-06, | |
| "loss": 0.5477, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.18492618492618493, | |
| "grad_norm": 0.7355757023568825, | |
| "learning_rate": 9.191799898595706e-06, | |
| "loss": 0.4819, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.1864801864801865, | |
| "grad_norm": 0.7718259998249563, | |
| "learning_rate": 9.178433157843474e-06, | |
| "loss": 0.5529, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.18803418803418803, | |
| "grad_norm": 0.7325746328877619, | |
| "learning_rate": 9.164966672261171e-06, | |
| "loss": 0.471, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1895881895881896, | |
| "grad_norm": 0.7807440267754963, | |
| "learning_rate": 9.151400763311958e-06, | |
| "loss": 0.5616, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.19114219114219114, | |
| "grad_norm": 0.6694238548360774, | |
| "learning_rate": 9.13773575483236e-06, | |
| "loss": 0.5637, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.1926961926961927, | |
| "grad_norm": 0.7619298527686099, | |
| "learning_rate": 9.123971973024543e-06, | |
| "loss": 0.5222, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.19425019425019424, | |
| "grad_norm": 0.8077572349157733, | |
| "learning_rate": 9.110109746448527e-06, | |
| "loss": 0.4792, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1958041958041958, | |
| "grad_norm": 0.7156882422608634, | |
| "learning_rate": 9.09614940601434e-06, | |
| "loss": 0.5079, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.19735819735819735, | |
| "grad_norm": 0.8839790656748958, | |
| "learning_rate": 9.08209128497412e-06, | |
| "loss": 0.6167, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1989121989121989, | |
| "grad_norm": 0.7361442782777586, | |
| "learning_rate": 9.06793571891416e-06, | |
| "loss": 0.573, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.20046620046620048, | |
| "grad_norm": 0.6874615113965296, | |
| "learning_rate": 9.053683045746897e-06, | |
| "loss": 0.5625, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.20202020202020202, | |
| "grad_norm": 1.1187739918068127, | |
| "learning_rate": 9.039333605702844e-06, | |
| "loss": 0.5386, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.20357420357420358, | |
| "grad_norm": 0.7935185219891059, | |
| "learning_rate": 9.024887741322475e-06, | |
| "loss": 0.4354, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.20512820512820512, | |
| "grad_norm": 0.6755878221667591, | |
| "learning_rate": 9.010345797448037e-06, | |
| "loss": 0.4871, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.2066822066822067, | |
| "grad_norm": 0.8821400973796808, | |
| "learning_rate": 8.995708121215325e-06, | |
| "loss": 0.5988, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.20823620823620823, | |
| "grad_norm": 0.8963468623381899, | |
| "learning_rate": 8.980975062045398e-06, | |
| "loss": 0.4808, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.2097902097902098, | |
| "grad_norm": 0.7605330355025913, | |
| "learning_rate": 8.96614697163623e-06, | |
| "loss": 0.5307, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.21134421134421136, | |
| "grad_norm": 0.8377449496608493, | |
| "learning_rate": 8.95122420395432e-06, | |
| "loss": 0.5855, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.2128982128982129, | |
| "grad_norm": 0.6874533009837056, | |
| "learning_rate": 8.936207115226242e-06, | |
| "loss": 0.5025, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.21445221445221446, | |
| "grad_norm": 1.0615886301023565, | |
| "learning_rate": 8.921096063930141e-06, | |
| "loss": 0.5858, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.216006216006216, | |
| "grad_norm": 0.7440356443459726, | |
| "learning_rate": 8.905891410787174e-06, | |
| "loss": 0.4721, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.21756021756021757, | |
| "grad_norm": 0.9780044181786849, | |
| "learning_rate": 8.8905935187529e-06, | |
| "loss": 0.5506, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2191142191142191, | |
| "grad_norm": 0.7269242314361574, | |
| "learning_rate": 8.875202753008614e-06, | |
| "loss": 0.5308, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.22066822066822067, | |
| "grad_norm": 0.8667053692132669, | |
| "learning_rate": 8.859719480952637e-06, | |
| "loss": 0.5679, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 0.7257081688438616, | |
| "learning_rate": 8.844144072191537e-06, | |
| "loss": 0.4858, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.22377622377622378, | |
| "grad_norm": 0.6542430837557276, | |
| "learning_rate": 8.828476898531308e-06, | |
| "loss": 0.5328, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.22533022533022534, | |
| "grad_norm": 0.7971170503328316, | |
| "learning_rate": 8.812718333968498e-06, | |
| "loss": 0.5695, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.22688422688422688, | |
| "grad_norm": 0.8486913376544397, | |
| "learning_rate": 8.79686875468128e-06, | |
| "loss": 0.5336, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.22843822843822845, | |
| "grad_norm": 1.0063509221057145, | |
| "learning_rate": 8.780928539020467e-06, | |
| "loss": 0.4731, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.22999222999222999, | |
| "grad_norm": 1.084598842228243, | |
| "learning_rate": 8.764898067500488e-06, | |
| "loss": 0.5336, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.23154623154623155, | |
| "grad_norm": 0.8477855123394982, | |
| "learning_rate": 8.7487777227903e-06, | |
| "loss": 0.7133, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.2331002331002331, | |
| "grad_norm": 0.7433328633942683, | |
| "learning_rate": 8.732567889704253e-06, | |
| "loss": 0.4906, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.23465423465423466, | |
| "grad_norm": 1.3157111994706745, | |
| "learning_rate": 8.716268955192908e-06, | |
| "loss": 0.4115, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.23620823620823622, | |
| "grad_norm": 0.7915085431469301, | |
| "learning_rate": 8.699881308333794e-06, | |
| "loss": 0.4529, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.23776223776223776, | |
| "grad_norm": 0.7093953259845076, | |
| "learning_rate": 8.683405340322123e-06, | |
| "loss": 0.5795, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.23931623931623933, | |
| "grad_norm": 0.9148926328978699, | |
| "learning_rate": 8.666841444461456e-06, | |
| "loss": 0.5371, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.24087024087024086, | |
| "grad_norm": 0.8451708726467831, | |
| "learning_rate": 8.650190016154307e-06, | |
| "loss": 0.6872, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.24242424242424243, | |
| "grad_norm": 0.7575098139209872, | |
| "learning_rate": 8.633451452892707e-06, | |
| "loss": 0.5262, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.24397824397824397, | |
| "grad_norm": 0.8039436989228282, | |
| "learning_rate": 8.616626154248717e-06, | |
| "loss": 0.5936, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.24553224553224554, | |
| "grad_norm": 0.9111018315972883, | |
| "learning_rate": 8.59971452186489e-06, | |
| "loss": 0.433, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.24708624708624707, | |
| "grad_norm": 0.7730492291768862, | |
| "learning_rate": 8.582716959444679e-06, | |
| "loss": 0.4906, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.24864024864024864, | |
| "grad_norm": 0.7508532423751979, | |
| "learning_rate": 8.565633872742803e-06, | |
| "loss": 0.5951, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2501942501942502, | |
| "grad_norm": 0.7979075113160532, | |
| "learning_rate": 8.548465669555564e-06, | |
| "loss": 0.5487, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.2517482517482518, | |
| "grad_norm": 2.822907214340435, | |
| "learning_rate": 8.531212759711103e-06, | |
| "loss": 0.5006, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.2533022533022533, | |
| "grad_norm": 0.7951356785770967, | |
| "learning_rate": 8.51387555505963e-06, | |
| "loss": 0.4786, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.25485625485625485, | |
| "grad_norm": 0.7944890316200554, | |
| "learning_rate": 8.496454469463583e-06, | |
| "loss": 0.5428, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.2564102564102564, | |
| "grad_norm": 0.885097829322448, | |
| "learning_rate": 8.478949918787746e-06, | |
| "loss": 0.5559, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.257964257964258, | |
| "grad_norm": 0.8765537416075171, | |
| "learning_rate": 8.461362320889338e-06, | |
| "loss": 0.6328, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.2595182595182595, | |
| "grad_norm": 0.7600338630468001, | |
| "learning_rate": 8.443692095608019e-06, | |
| "loss": 0.4644, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.26107226107226106, | |
| "grad_norm": 0.7771144701749865, | |
| "learning_rate": 8.425939664755874e-06, | |
| "loss": 0.4637, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.26262626262626265, | |
| "grad_norm": 1.066507788727023, | |
| "learning_rate": 8.408105452107353e-06, | |
| "loss": 0.4977, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.2641802641802642, | |
| "grad_norm": 1.0331827740490165, | |
| "learning_rate": 8.390189883389143e-06, | |
| "loss": 0.4936, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.26573426573426573, | |
| "grad_norm": 0.6598501232172324, | |
| "learning_rate": 8.37219338627001e-06, | |
| "loss": 0.4534, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.26728826728826727, | |
| "grad_norm": 0.7813638132356793, | |
| "learning_rate": 8.354116390350594e-06, | |
| "loss": 0.5658, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.26884226884226886, | |
| "grad_norm": 0.9169947439806584, | |
| "learning_rate": 8.335959327153148e-06, | |
| "loss": 0.6649, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.2703962703962704, | |
| "grad_norm": 0.9173506379895737, | |
| "learning_rate": 8.317722630111233e-06, | |
| "loss": 0.6389, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.27195027195027194, | |
| "grad_norm": 0.9071927842627939, | |
| "learning_rate": 8.299406734559385e-06, | |
| "loss": 0.428, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.27350427350427353, | |
| "grad_norm": 0.7688021245301622, | |
| "learning_rate": 8.281012077722712e-06, | |
| "loss": 0.5133, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.27505827505827507, | |
| "grad_norm": 0.6639447866723064, | |
| "learning_rate": 8.26253909870646e-06, | |
| "loss": 0.5077, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.2766122766122766, | |
| "grad_norm": 0.6947258135733688, | |
| "learning_rate": 8.24398823848553e-06, | |
| "loss": 0.5527, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.27816627816627815, | |
| "grad_norm": 0.8705601245175114, | |
| "learning_rate": 8.225359939893954e-06, | |
| "loss": 0.4838, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.27972027972027974, | |
| "grad_norm": 0.6618526787787719, | |
| "learning_rate": 8.206654647614323e-06, | |
| "loss": 0.3804, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2812742812742813, | |
| "grad_norm": 1.0338275501153558, | |
| "learning_rate": 8.18787280816717e-06, | |
| "loss": 0.5362, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.2828282828282828, | |
| "grad_norm": 0.7923177578469405, | |
| "learning_rate": 8.169014869900308e-06, | |
| "loss": 0.5842, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.28438228438228436, | |
| "grad_norm": 0.6396399402988676, | |
| "learning_rate": 8.150081282978139e-06, | |
| "loss": 0.4271, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.28593628593628595, | |
| "grad_norm": 0.8865011660122173, | |
| "learning_rate": 8.131072499370897e-06, | |
| "loss": 0.5489, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.2874902874902875, | |
| "grad_norm": 0.9138845420481327, | |
| "learning_rate": 8.111988972843859e-06, | |
| "loss": 0.5943, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.289044289044289, | |
| "grad_norm": 1.1268837902712043, | |
| "learning_rate": 8.09283115894652e-06, | |
| "loss": 0.5491, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.2905982905982906, | |
| "grad_norm": 0.7444814008859322, | |
| "learning_rate": 8.073599515001713e-06, | |
| "loss": 0.4997, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.29215229215229216, | |
| "grad_norm": 0.8044881882512651, | |
| "learning_rate": 8.054294500094697e-06, | |
| "loss": 0.5325, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2937062937062937, | |
| "grad_norm": 0.8146942609420201, | |
| "learning_rate": 8.034916575062188e-06, | |
| "loss": 0.5437, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.29526029526029524, | |
| "grad_norm": 0.7872597021358523, | |
| "learning_rate": 8.015466202481371e-06, | |
| "loss": 0.4712, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.29681429681429683, | |
| "grad_norm": 0.7193071970265177, | |
| "learning_rate": 7.995943846658852e-06, | |
| "loss": 0.5397, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.29836829836829837, | |
| "grad_norm": 0.8391823684267, | |
| "learning_rate": 7.976349973619567e-06, | |
| "loss": 0.5765, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.2999222999222999, | |
| "grad_norm": 0.6845170782635249, | |
| "learning_rate": 7.956685051095672e-06, | |
| "loss": 0.56, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.3014763014763015, | |
| "grad_norm": 0.7252420419163962, | |
| "learning_rate": 7.936949548515364e-06, | |
| "loss": 0.605, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.30303030303030304, | |
| "grad_norm": 0.7875283859706272, | |
| "learning_rate": 7.917143936991688e-06, | |
| "loss": 0.5004, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.3045843045843046, | |
| "grad_norm": 0.6047235742618552, | |
| "learning_rate": 7.897268689311278e-06, | |
| "loss": 0.4562, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.3061383061383061, | |
| "grad_norm": 0.5825984478127255, | |
| "learning_rate": 7.877324279923078e-06, | |
| "loss": 0.451, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 0.7369125343894832, | |
| "learning_rate": 7.857311184927015e-06, | |
| "loss": 0.5195, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.30924630924630925, | |
| "grad_norm": 0.6104305536066545, | |
| "learning_rate": 7.837229882062638e-06, | |
| "loss": 0.531, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.3108003108003108, | |
| "grad_norm": 0.9139389379071885, | |
| "learning_rate": 7.817080850697705e-06, | |
| "loss": 0.4996, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3123543123543124, | |
| "grad_norm": 0.8513425536888274, | |
| "learning_rate": 7.796864571816745e-06, | |
| "loss": 0.3977, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.3139083139083139, | |
| "grad_norm": 0.7739897197051825, | |
| "learning_rate": 7.77658152800958e-06, | |
| "loss": 0.5569, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.31546231546231546, | |
| "grad_norm": 0.7242668166031772, | |
| "learning_rate": 7.756232203459794e-06, | |
| "loss": 0.4652, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.317016317016317, | |
| "grad_norm": 0.7649888193815342, | |
| "learning_rate": 7.735817083933189e-06, | |
| "loss": 0.4699, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.3185703185703186, | |
| "grad_norm": 1.0274699034423136, | |
| "learning_rate": 7.715336656766176e-06, | |
| "loss": 0.5284, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3201243201243201, | |
| "grad_norm": 0.6976554827980787, | |
| "learning_rate": 7.69479141085415e-06, | |
| "loss": 0.4875, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.32167832167832167, | |
| "grad_norm": 1.130786196712028, | |
| "learning_rate": 7.674181836639819e-06, | |
| "loss": 0.4214, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.32323232323232326, | |
| "grad_norm": 0.7607350344632271, | |
| "learning_rate": 7.653508426101488e-06, | |
| "loss": 0.4803, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.3247863247863248, | |
| "grad_norm": 0.7396245146929729, | |
| "learning_rate": 7.632771672741326e-06, | |
| "loss": 0.5967, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.32634032634032634, | |
| "grad_norm": 0.9687489401453343, | |
| "learning_rate": 7.611972071573579e-06, | |
| "loss": 0.5753, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3278943278943279, | |
| "grad_norm": 0.7739827653387099, | |
| "learning_rate": 7.591110119112757e-06, | |
| "loss": 0.495, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.32944832944832947, | |
| "grad_norm": 0.8201270448540147, | |
| "learning_rate": 7.5701863133617735e-06, | |
| "loss": 0.562, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.331002331002331, | |
| "grad_norm": 0.8576029389701612, | |
| "learning_rate": 7.549201153800073e-06, | |
| "loss": 0.5381, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.33255633255633255, | |
| "grad_norm": 0.6841266898837828, | |
| "learning_rate": 7.528155141371688e-06, | |
| "loss": 0.467, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.3341103341103341, | |
| "grad_norm": 0.7802991357810948, | |
| "learning_rate": 7.507048778473296e-06, | |
| "loss": 0.4966, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.3356643356643357, | |
| "grad_norm": 0.7218564126997288, | |
| "learning_rate": 7.485882568942222e-06, | |
| "loss": 0.5112, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.3372183372183372, | |
| "grad_norm": 0.7400107616507341, | |
| "learning_rate": 7.464657018044411e-06, | |
| "loss": 0.6446, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.33877233877233875, | |
| "grad_norm": 0.9014902134428892, | |
| "learning_rate": 7.443372632462363e-06, | |
| "loss": 0.5053, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.34032634032634035, | |
| "grad_norm": 0.7329909522650422, | |
| "learning_rate": 7.422029920283044e-06, | |
| "loss": 0.5443, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.3418803418803419, | |
| "grad_norm": 0.6892541253744358, | |
| "learning_rate": 7.400629390985753e-06, | |
| "loss": 0.4837, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3434343434343434, | |
| "grad_norm": 0.7723348206252342, | |
| "learning_rate": 7.379171555429965e-06, | |
| "loss": 0.5755, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.34498834498834496, | |
| "grad_norm": 7.664546431285865, | |
| "learning_rate": 7.357656925843125e-06, | |
| "loss": 0.4786, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.34654234654234656, | |
| "grad_norm": 0.7853221971786, | |
| "learning_rate": 7.336086015808439e-06, | |
| "loss": 0.4729, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.3480963480963481, | |
| "grad_norm": 0.7864246657192466, | |
| "learning_rate": 7.314459340252593e-06, | |
| "loss": 0.5848, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.34965034965034963, | |
| "grad_norm": 0.7588019542197073, | |
| "learning_rate": 7.2927774154334765e-06, | |
| "loss": 0.4884, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.35120435120435123, | |
| "grad_norm": 0.6496638579237419, | |
| "learning_rate": 7.271040758927852e-06, | |
| "loss": 0.4323, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.35275835275835277, | |
| "grad_norm": 0.6618044442655401, | |
| "learning_rate": 7.2492498896190015e-06, | |
| "loss": 0.5797, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3543123543123543, | |
| "grad_norm": 0.8288426526834968, | |
| "learning_rate": 7.227405327684339e-06, | |
| "loss": 0.5113, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.35586635586635584, | |
| "grad_norm": 0.6693277294293136, | |
| "learning_rate": 7.205507594582994e-06, | |
| "loss": 0.4158, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.35742035742035744, | |
| "grad_norm": 0.834098167457632, | |
| "learning_rate": 7.183557213043365e-06, | |
| "loss": 0.4826, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.358974358974359, | |
| "grad_norm": 0.7563577310691308, | |
| "learning_rate": 7.161554707050637e-06, | |
| "loss": 0.4767, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3605283605283605, | |
| "grad_norm": 0.718266653620817, | |
| "learning_rate": 7.1395006018342774e-06, | |
| "loss": 0.5423, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.3620823620823621, | |
| "grad_norm": 0.7794408706255476, | |
| "learning_rate": 7.117395423855496e-06, | |
| "loss": 0.5394, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.7269215366898196, | |
| "learning_rate": 7.09523970079468e-06, | |
| "loss": 0.5705, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.3651903651903652, | |
| "grad_norm": 0.9408390684558995, | |
| "learning_rate": 7.073033961538793e-06, | |
| "loss": 0.6389, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.3667443667443667, | |
| "grad_norm": 0.6638828664952492, | |
| "learning_rate": 7.050778736168757e-06, | |
| "loss": 0.5542, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3682983682983683, | |
| "grad_norm": 0.8460797947106127, | |
| "learning_rate": 7.028474555946787e-06, | |
| "loss": 0.5444, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.36985236985236986, | |
| "grad_norm": 0.7665734815326051, | |
| "learning_rate": 7.006121953303724e-06, | |
| "loss": 0.5229, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.3714063714063714, | |
| "grad_norm": 0.8309718789678989, | |
| "learning_rate": 6.983721461826312e-06, | |
| "loss": 0.5784, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.372960372960373, | |
| "grad_norm": 0.8222921080795965, | |
| "learning_rate": 6.9612736162444695e-06, | |
| "loss": 0.4292, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3745143745143745, | |
| "grad_norm": 0.7527448252012365, | |
| "learning_rate": 6.938778952418519e-06, | |
| "loss": 0.4729, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.37606837606837606, | |
| "grad_norm": 0.8985906116004181, | |
| "learning_rate": 6.916238007326399e-06, | |
| "loss": 0.5655, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.3776223776223776, | |
| "grad_norm": 0.6113924311221383, | |
| "learning_rate": 6.893651319050842e-06, | |
| "loss": 0.5263, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.3791763791763792, | |
| "grad_norm": 0.7817006095897586, | |
| "learning_rate": 6.871019426766537e-06, | |
| "loss": 0.4632, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.38073038073038074, | |
| "grad_norm": 1.7226586227932226, | |
| "learning_rate": 6.8483428707272456e-06, | |
| "loss": 0.5609, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.3822843822843823, | |
| "grad_norm": 0.8080544757443882, | |
| "learning_rate": 6.825622192252922e-06, | |
| "loss": 0.5424, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.3838383838383838, | |
| "grad_norm": 0.8602129359285826, | |
| "learning_rate": 6.802857933716774e-06, | |
| "loss": 0.4986, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.3853923853923854, | |
| "grad_norm": 0.7014559071633292, | |
| "learning_rate": 6.7800506385323335e-06, | |
| "loss": 0.5079, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.38694638694638694, | |
| "grad_norm": 0.8917239030684251, | |
| "learning_rate": 6.757200851140468e-06, | |
| "loss": 0.5281, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.3885003885003885, | |
| "grad_norm": 0.7447990452878435, | |
| "learning_rate": 6.734309116996392e-06, | |
| "loss": 0.6071, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3900543900543901, | |
| "grad_norm": 0.7472396252981742, | |
| "learning_rate": 6.711375982556648e-06, | |
| "loss": 0.5418, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.3916083916083916, | |
| "grad_norm": 0.6987079676338328, | |
| "learning_rate": 6.688401995266061e-06, | |
| "loss": 0.4444, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.39316239316239315, | |
| "grad_norm": 0.8025151202499369, | |
| "learning_rate": 6.665387703544661e-06, | |
| "loss": 0.5082, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.3947163947163947, | |
| "grad_norm": 0.8425567278002095, | |
| "learning_rate": 6.642333656774607e-06, | |
| "loss": 0.4212, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.3962703962703963, | |
| "grad_norm": 0.7875874739440923, | |
| "learning_rate": 6.61924040528706e-06, | |
| "loss": 0.4969, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.3978243978243978, | |
| "grad_norm": 0.645057270258725, | |
| "learning_rate": 6.596108500349054e-06, | |
| "loss": 0.4857, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.39937839937839936, | |
| "grad_norm": 0.670409599043594, | |
| "learning_rate": 6.572938494150332e-06, | |
| "loss": 0.5544, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.40093240093240096, | |
| "grad_norm": 0.6198181436713772, | |
| "learning_rate": 6.54973093979016e-06, | |
| "loss": 0.4725, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.4024864024864025, | |
| "grad_norm": 0.7662890791531904, | |
| "learning_rate": 6.526486391264137e-06, | |
| "loss": 0.5041, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.40404040404040403, | |
| "grad_norm": 0.71665149094791, | |
| "learning_rate": 6.503205403450957e-06, | |
| "loss": 0.5193, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.40559440559440557, | |
| "grad_norm": 0.8140997620947699, | |
| "learning_rate": 6.479888532099175e-06, | |
| "loss": 0.5243, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.40714840714840717, | |
| "grad_norm": 0.624858848374034, | |
| "learning_rate": 6.4565363338139245e-06, | |
| "loss": 0.3996, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.4087024087024087, | |
| "grad_norm": 0.7647591367675671, | |
| "learning_rate": 6.433149366043652e-06, | |
| "loss": 0.4658, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.41025641025641024, | |
| "grad_norm": 0.6633892543296002, | |
| "learning_rate": 6.409728187066789e-06, | |
| "loss": 0.4452, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.41181041181041184, | |
| "grad_norm": 0.9241797213745755, | |
| "learning_rate": 6.386273355978442e-06, | |
| "loss": 0.5173, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.4133644133644134, | |
| "grad_norm": 0.9790400835539028, | |
| "learning_rate": 6.3627854326770326e-06, | |
| "loss": 0.5611, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.4149184149184149, | |
| "grad_norm": 0.7010295847973484, | |
| "learning_rate": 6.339264977850943e-06, | |
| "loss": 0.6185, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.41647241647241645, | |
| "grad_norm": 0.9024237646983181, | |
| "learning_rate": 6.3157125529651205e-06, | |
| "loss": 0.4375, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.41802641802641805, | |
| "grad_norm": 0.728576137938516, | |
| "learning_rate": 6.292128720247692e-06, | |
| "loss": 0.416, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.4195804195804196, | |
| "grad_norm": 0.7209803232506808, | |
| "learning_rate": 6.268514042676519e-06, | |
| "loss": 0.4931, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4211344211344211, | |
| "grad_norm": 0.9852718978800553, | |
| "learning_rate": 6.244869083965777e-06, | |
| "loss": 0.6138, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.4226884226884227, | |
| "grad_norm": 0.8544813586101946, | |
| "learning_rate": 6.221194408552494e-06, | |
| "loss": 0.4828, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.42424242424242425, | |
| "grad_norm": 0.5339139693029649, | |
| "learning_rate": 6.197490581583078e-06, | |
| "loss": 0.3763, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.4257964257964258, | |
| "grad_norm": 0.7029889613516929, | |
| "learning_rate": 6.173758168899814e-06, | |
| "loss": 0.5003, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.42735042735042733, | |
| "grad_norm": 0.6697709635144842, | |
| "learning_rate": 6.149997737027377e-06, | |
| "loss": 0.5023, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.4289044289044289, | |
| "grad_norm": 0.7711140122998004, | |
| "learning_rate": 6.126209853159293e-06, | |
| "loss": 0.5458, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.43045843045843046, | |
| "grad_norm": 0.7959237764492787, | |
| "learning_rate": 6.102395085144406e-06, | |
| "loss": 0.5436, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.432012432012432, | |
| "grad_norm": 0.7725608290998155, | |
| "learning_rate": 6.078554001473317e-06, | |
| "loss": 0.5174, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.43356643356643354, | |
| "grad_norm": 1.0803231102952509, | |
| "learning_rate": 6.054687171264822e-06, | |
| "loss": 0.6228, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.43512043512043513, | |
| "grad_norm": 0.8198164427592709, | |
| "learning_rate": 6.030795164252321e-06, | |
| "loss": 0.4489, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4366744366744367, | |
| "grad_norm": 0.9826854880546106, | |
| "learning_rate": 6.006878550770213e-06, | |
| "loss": 0.4776, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.4382284382284382, | |
| "grad_norm": 0.6853341082656256, | |
| "learning_rate": 5.982937901740296e-06, | |
| "loss": 0.5168, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.4397824397824398, | |
| "grad_norm": 0.8527642374724453, | |
| "learning_rate": 5.958973788658115e-06, | |
| "loss": 0.4427, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.44133644133644134, | |
| "grad_norm": 0.820049645998347, | |
| "learning_rate": 5.934986783579349e-06, | |
| "loss": 0.5325, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.4428904428904429, | |
| "grad_norm": 0.8635505053006802, | |
| "learning_rate": 5.91097745910613e-06, | |
| "loss": 0.5158, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.6163515215798748, | |
| "learning_rate": 5.886946388373387e-06, | |
| "loss": 0.5613, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.445998445998446, | |
| "grad_norm": 0.7114308768259194, | |
| "learning_rate": 5.862894145035158e-06, | |
| "loss": 0.5812, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.44755244755244755, | |
| "grad_norm": 0.7695278464891274, | |
| "learning_rate": 5.83882130325091e-06, | |
| "loss": 0.5216, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.4491064491064491, | |
| "grad_norm": 0.7953550029299292, | |
| "learning_rate": 5.814728437671808e-06, | |
| "loss": 0.3821, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.4506604506604507, | |
| "grad_norm": 0.8080489538709861, | |
| "learning_rate": 5.7906161234270234e-06, | |
| "loss": 0.4982, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.4522144522144522, | |
| "grad_norm": 0.7349261540661857, | |
| "learning_rate": 5.766484936109987e-06, | |
| "loss": 0.4715, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.45376845376845376, | |
| "grad_norm": 1.0100168672849925, | |
| "learning_rate": 5.7423354517646616e-06, | |
| "loss": 0.4585, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.4553224553224553, | |
| "grad_norm": 0.7711000819084326, | |
| "learning_rate": 5.718168246871775e-06, | |
| "loss": 0.5841, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.4568764568764569, | |
| "grad_norm": 0.9388643582133742, | |
| "learning_rate": 5.69398389833508e-06, | |
| "loss": 0.5857, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.45843045843045843, | |
| "grad_norm": 0.6391229423057062, | |
| "learning_rate": 5.669782983467562e-06, | |
| "loss": 0.5786, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.45998445998445997, | |
| "grad_norm": 0.7381704493689959, | |
| "learning_rate": 5.645566079977673e-06, | |
| "loss": 0.4531, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.6427062310992607, | |
| "learning_rate": 5.621333765955529e-06, | |
| "loss": 0.4516, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.4630924630924631, | |
| "grad_norm": 0.6045207793918917, | |
| "learning_rate": 5.5970866198591235e-06, | |
| "loss": 0.5904, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.46464646464646464, | |
| "grad_norm": 0.7486268299657493, | |
| "learning_rate": 5.572825220500505e-06, | |
| "loss": 0.5559, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.4662004662004662, | |
| "grad_norm": 0.8393377063443249, | |
| "learning_rate": 5.548550147031971e-06, | |
| "loss": 0.2951, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4677544677544678, | |
| "grad_norm": 0.5563021839864102, | |
| "learning_rate": 5.524261978932234e-06, | |
| "loss": 0.4705, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.4693084693084693, | |
| "grad_norm": 0.5244277581252728, | |
| "learning_rate": 5.4999612959925995e-06, | |
| "loss": 0.5826, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.47086247086247085, | |
| "grad_norm": 0.8967222238475693, | |
| "learning_rate": 5.475648678303112e-06, | |
| "loss": 0.4326, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.47241647241647244, | |
| "grad_norm": 0.8597788121495409, | |
| "learning_rate": 5.451324706238721e-06, | |
| "loss": 0.5024, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.473970473970474, | |
| "grad_norm": 0.6219585515878757, | |
| "learning_rate": 5.426989960445415e-06, | |
| "loss": 0.4228, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4755244755244755, | |
| "grad_norm": 0.87442477445858, | |
| "learning_rate": 5.402645021826367e-06, | |
| "loss": 0.5451, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.47707847707847706, | |
| "grad_norm": 0.7018801509337828, | |
| "learning_rate": 5.3782904715280705e-06, | |
| "loss": 0.454, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.47863247863247865, | |
| "grad_norm": 0.7870901274907686, | |
| "learning_rate": 5.35392689092646e-06, | |
| "loss": 0.6209, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4801864801864802, | |
| "grad_norm": 0.8404067379482166, | |
| "learning_rate": 5.329554861613031e-06, | |
| "loss": 0.4577, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.48174048174048173, | |
| "grad_norm": 0.8609659868189161, | |
| "learning_rate": 5.3051749653809685e-06, | |
| "loss": 0.516, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.48329448329448327, | |
| "grad_norm": 0.6451369228669567, | |
| "learning_rate": 5.2807877842112475e-06, | |
| "loss": 0.529, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.48484848484848486, | |
| "grad_norm": 0.7089279571799519, | |
| "learning_rate": 5.256393900258747e-06, | |
| "loss": 0.55, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.4864024864024864, | |
| "grad_norm": 0.6009435199914196, | |
| "learning_rate": 5.231993895838348e-06, | |
| "loss": 0.4524, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.48795648795648794, | |
| "grad_norm": 0.7399390807603545, | |
| "learning_rate": 5.207588353411032e-06, | |
| "loss": 0.5826, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.48951048951048953, | |
| "grad_norm": 0.6358169721821743, | |
| "learning_rate": 5.183177855569989e-06, | |
| "loss": 0.4686, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.49106449106449107, | |
| "grad_norm": 0.834159373189627, | |
| "learning_rate": 5.158762985026694e-06, | |
| "loss": 0.6194, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.4926184926184926, | |
| "grad_norm": 0.672932649828031, | |
| "learning_rate": 5.1343443245970095e-06, | |
| "loss": 0.5593, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.49417249417249415, | |
| "grad_norm": 0.7305995766455743, | |
| "learning_rate": 5.10992245718726e-06, | |
| "loss": 0.4943, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.49572649572649574, | |
| "grad_norm": 1.1658079579901208, | |
| "learning_rate": 5.085497965780335e-06, | |
| "loss": 0.5031, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.4972804972804973, | |
| "grad_norm": 0.9841093713953577, | |
| "learning_rate": 5.061071433421754e-06, | |
| "loss": 0.5508, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4988344988344988, | |
| "grad_norm": 0.7841115815090715, | |
| "learning_rate": 5.0366434432057624e-06, | |
| "loss": 0.563, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.5003885003885004, | |
| "grad_norm": 0.5645683948415432, | |
| "learning_rate": 5.012214578261402e-06, | |
| "loss": 0.4011, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.5019425019425019, | |
| "grad_norm": 0.8438933215498013, | |
| "learning_rate": 4.987785421738599e-06, | |
| "loss": 0.5832, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.5034965034965035, | |
| "grad_norm": 0.5640735635986946, | |
| "learning_rate": 4.963356556794238e-06, | |
| "loss": 0.401, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.5050505050505051, | |
| "grad_norm": 1.2747685452336512, | |
| "learning_rate": 4.938928566578247e-06, | |
| "loss": 0.5392, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5066045066045066, | |
| "grad_norm": 0.6877059270681696, | |
| "learning_rate": 4.914502034219667e-06, | |
| "loss": 0.439, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.5081585081585082, | |
| "grad_norm": 0.705842825427767, | |
| "learning_rate": 4.890077542812742e-06, | |
| "loss": 0.5187, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.5097125097125097, | |
| "grad_norm": 0.5947821340173784, | |
| "learning_rate": 4.865655675402993e-06, | |
| "loss": 0.4178, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.5112665112665112, | |
| "grad_norm": 0.6757902851130476, | |
| "learning_rate": 4.841237014973305e-06, | |
| "loss": 0.493, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.5128205128205128, | |
| "grad_norm": 0.6256933230220865, | |
| "learning_rate": 4.8168221444300124e-06, | |
| "loss": 0.4499, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5143745143745144, | |
| "grad_norm": 0.6390396367290697, | |
| "learning_rate": 4.7924116465889684e-06, | |
| "loss": 0.4783, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.515928515928516, | |
| "grad_norm": 0.7039920639754556, | |
| "learning_rate": 4.768006104161655e-06, | |
| "loss": 0.4862, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.5174825174825175, | |
| "grad_norm": 0.6996136377759004, | |
| "learning_rate": 4.743606099741255e-06, | |
| "loss": 0.43, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.519036519036519, | |
| "grad_norm": 0.5635901754632139, | |
| "learning_rate": 4.719212215788753e-06, | |
| "loss": 0.3489, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.5205905205905206, | |
| "grad_norm": 1.0174320744839043, | |
| "learning_rate": 4.6948250346190315e-06, | |
| "loss": 0.4664, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.5221445221445221, | |
| "grad_norm": 0.7281418344534993, | |
| "learning_rate": 4.670445138386971e-06, | |
| "loss": 0.4357, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.5236985236985237, | |
| "grad_norm": 0.7444913580894218, | |
| "learning_rate": 4.646073109073542e-06, | |
| "loss": 0.5164, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.5252525252525253, | |
| "grad_norm": 0.6453883418849363, | |
| "learning_rate": 4.621709528471931e-06, | |
| "loss": 0.4876, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.5268065268065268, | |
| "grad_norm": 0.6332116311781928, | |
| "learning_rate": 4.5973549781736335e-06, | |
| "loss": 0.4512, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.5283605283605284, | |
| "grad_norm": 0.7839883177892899, | |
| "learning_rate": 4.573010039554587e-06, | |
| "loss": 0.4864, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5299145299145299, | |
| "grad_norm": 0.574123724971613, | |
| "learning_rate": 4.548675293761281e-06, | |
| "loss": 0.5942, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.5314685314685315, | |
| "grad_norm": 0.7341359415325991, | |
| "learning_rate": 4.524351321696889e-06, | |
| "loss": 0.6479, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.533022533022533, | |
| "grad_norm": 0.7042514483644169, | |
| "learning_rate": 4.500038704007402e-06, | |
| "loss": 0.5163, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5345765345765345, | |
| "grad_norm": 0.7905795068611852, | |
| "learning_rate": 4.475738021067768e-06, | |
| "loss": 0.4238, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5361305361305362, | |
| "grad_norm": 0.6659802197095699, | |
| "learning_rate": 4.451449852968031e-06, | |
| "loss": 0.5456, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5376845376845377, | |
| "grad_norm": 0.790921006825035, | |
| "learning_rate": 4.427174779499498e-06, | |
| "loss": 0.3841, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5392385392385393, | |
| "grad_norm": 0.7752025731889072, | |
| "learning_rate": 4.402913380140878e-06, | |
| "loss": 0.5737, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5407925407925408, | |
| "grad_norm": 0.69589495472164, | |
| "learning_rate": 4.378666234044471e-06, | |
| "loss": 0.5731, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.5423465423465423, | |
| "grad_norm": 0.6620238929419696, | |
| "learning_rate": 4.354433920022328e-06, | |
| "loss": 0.6002, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.5439005439005439, | |
| "grad_norm": 0.7640658489542932, | |
| "learning_rate": 4.3302170165324385e-06, | |
| "loss": 0.5681, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.8733971833765328, | |
| "learning_rate": 4.306016101664921e-06, | |
| "loss": 0.5128, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.5470085470085471, | |
| "grad_norm": 1.0078351927972078, | |
| "learning_rate": 4.281831753128226e-06, | |
| "loss": 0.5179, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.5485625485625486, | |
| "grad_norm": 0.5776801607508973, | |
| "learning_rate": 4.257664548235341e-06, | |
| "loss": 0.5191, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.5501165501165501, | |
| "grad_norm": 0.7595818677545988, | |
| "learning_rate": 4.233515063890013e-06, | |
| "loss": 0.4912, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.5516705516705517, | |
| "grad_norm": 0.6606204189468279, | |
| "learning_rate": 4.209383876572977e-06, | |
| "loss": 0.5694, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.5532245532245532, | |
| "grad_norm": 0.7228853812417981, | |
| "learning_rate": 4.1852715623281934e-06, | |
| "loss": 0.5009, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.5547785547785548, | |
| "grad_norm": 0.679061167051694, | |
| "learning_rate": 4.161178696749092e-06, | |
| "loss": 0.4821, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.5563325563325563, | |
| "grad_norm": 1.034495766490782, | |
| "learning_rate": 4.1371058549648425e-06, | |
| "loss": 0.5308, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.5578865578865578, | |
| "grad_norm": 0.7686980644117477, | |
| "learning_rate": 4.1130536116266155e-06, | |
| "loss": 0.489, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.5594405594405595, | |
| "grad_norm": 0.9681286219055543, | |
| "learning_rate": 4.089022540893871e-06, | |
| "loss": 0.5282, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.560994560994561, | |
| "grad_norm": 0.8743676508733846, | |
| "learning_rate": 4.0650132164206515e-06, | |
| "loss": 0.5241, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.5625485625485626, | |
| "grad_norm": 0.6062796295682528, | |
| "learning_rate": 4.041026211341886e-06, | |
| "loss": 0.5007, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.5641025641025641, | |
| "grad_norm": 0.8866048922703949, | |
| "learning_rate": 4.017062098259707e-06, | |
| "loss": 0.5478, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.5656565656565656, | |
| "grad_norm": 0.6320850311198043, | |
| "learning_rate": 3.9931214492297875e-06, | |
| "loss": 0.5351, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.5672105672105672, | |
| "grad_norm": 0.6435052023704503, | |
| "learning_rate": 3.969204835747681e-06, | |
| "loss": 0.4535, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5687645687645687, | |
| "grad_norm": 0.6824957419803489, | |
| "learning_rate": 3.945312828735179e-06, | |
| "loss": 0.4962, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.5703185703185704, | |
| "grad_norm": 0.6630522920673861, | |
| "learning_rate": 3.921445998526684e-06, | |
| "loss": 0.5484, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.5718725718725719, | |
| "grad_norm": 0.6203973778780169, | |
| "learning_rate": 3.897604914855596e-06, | |
| "loss": 0.4307, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.5734265734265734, | |
| "grad_norm": 0.8691644246672308, | |
| "learning_rate": 3.873790146840709e-06, | |
| "loss": 0.4494, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.574980574980575, | |
| "grad_norm": 0.6583038284757291, | |
| "learning_rate": 3.8500022629726246e-06, | |
| "loss": 0.5461, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5765345765345765, | |
| "grad_norm": 0.6205452786275579, | |
| "learning_rate": 3.8262418311001884e-06, | |
| "loss": 0.441, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.578088578088578, | |
| "grad_norm": 0.8090078519631057, | |
| "learning_rate": 3.8025094184169254e-06, | |
| "loss": 0.4396, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5796425796425796, | |
| "grad_norm": 3.7824694886306442, | |
| "learning_rate": 3.778805591447505e-06, | |
| "loss": 0.5243, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5811965811965812, | |
| "grad_norm": 0.7728659065744149, | |
| "learning_rate": 3.7551309160342233e-06, | |
| "loss": 0.492, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.5827505827505828, | |
| "grad_norm": 0.6144384200403961, | |
| "learning_rate": 3.731485957323483e-06, | |
| "loss": 0.5093, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5843045843045843, | |
| "grad_norm": 0.7022871161510597, | |
| "learning_rate": 3.707871279752309e-06, | |
| "loss": 0.5471, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5858585858585859, | |
| "grad_norm": 0.7563905294320689, | |
| "learning_rate": 3.68428744703488e-06, | |
| "loss": 0.5169, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5874125874125874, | |
| "grad_norm": 0.7546808388382039, | |
| "learning_rate": 3.6607350221490593e-06, | |
| "loss": 0.5465, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5889665889665889, | |
| "grad_norm": 2.017091506116899, | |
| "learning_rate": 3.6372145673229683e-06, | |
| "loss": 0.5379, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5905205905205905, | |
| "grad_norm": 1.0384918566794876, | |
| "learning_rate": 3.613726644021559e-06, | |
| "loss": 0.5533, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5920745920745921, | |
| "grad_norm": 0.7843829856867381, | |
| "learning_rate": 3.590271812933212e-06, | |
| "loss": 0.5192, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5936285936285937, | |
| "grad_norm": 0.8917403925552857, | |
| "learning_rate": 3.5668506339563502e-06, | |
| "loss": 0.5273, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5951825951825952, | |
| "grad_norm": 0.8080693974342459, | |
| "learning_rate": 3.5434636661860776e-06, | |
| "loss": 0.4502, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5967365967365967, | |
| "grad_norm": 0.8589025653140446, | |
| "learning_rate": 3.5201114679008286e-06, | |
| "loss": 0.4597, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5982905982905983, | |
| "grad_norm": 0.8451337336955351, | |
| "learning_rate": 3.4967945965490434e-06, | |
| "loss": 0.5479, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5998445998445998, | |
| "grad_norm": 0.6022399299860023, | |
| "learning_rate": 3.4735136087358646e-06, | |
| "loss": 0.3617, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.6013986013986014, | |
| "grad_norm": 1.091419134601723, | |
| "learning_rate": 3.450269060209841e-06, | |
| "loss": 0.4422, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.602952602952603, | |
| "grad_norm": 0.7663320014407212, | |
| "learning_rate": 3.42706150584967e-06, | |
| "loss": 0.5793, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.6045066045066045, | |
| "grad_norm": 0.8338372079331721, | |
| "learning_rate": 3.4038914996509464e-06, | |
| "loss": 0.6015, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.6060606060606061, | |
| "grad_norm": 0.8601272441851022, | |
| "learning_rate": 3.3807595947129405e-06, | |
| "loss": 0.6391, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6076146076146076, | |
| "grad_norm": 0.8548829786482054, | |
| "learning_rate": 3.357666343225396e-06, | |
| "loss": 0.5055, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.6091686091686092, | |
| "grad_norm": 0.7149586625595873, | |
| "learning_rate": 3.3346122964553407e-06, | |
| "loss": 0.4305, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.6107226107226107, | |
| "grad_norm": 0.7734311233562257, | |
| "learning_rate": 3.3115980047339415e-06, | |
| "loss": 0.4676, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.6122766122766122, | |
| "grad_norm": 0.7524053415700651, | |
| "learning_rate": 3.288624017443353e-06, | |
| "loss": 0.6177, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.6138306138306139, | |
| "grad_norm": 0.6577165811691974, | |
| "learning_rate": 3.265690883003609e-06, | |
| "loss": 0.4062, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.7818702560735775, | |
| "learning_rate": 3.2427991488595334e-06, | |
| "loss": 0.7139, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.616938616938617, | |
| "grad_norm": 0.8337417209233989, | |
| "learning_rate": 3.219949361467668e-06, | |
| "loss": 0.3658, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.6184926184926185, | |
| "grad_norm": 0.6408051420484582, | |
| "learning_rate": 3.197142066283225e-06, | |
| "loss": 0.4037, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.62004662004662, | |
| "grad_norm": 0.6924590018612481, | |
| "learning_rate": 3.174377807747079e-06, | |
| "loss": 0.3915, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.6216006216006216, | |
| "grad_norm": 0.7521284979964744, | |
| "learning_rate": 3.1516571292727553e-06, | |
| "loss": 0.5316, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6231546231546231, | |
| "grad_norm": 0.6902359570029046, | |
| "learning_rate": 3.128980573233465e-06, | |
| "loss": 0.3485, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.6247086247086248, | |
| "grad_norm": 0.8616508180715082, | |
| "learning_rate": 3.1063486809491595e-06, | |
| "loss": 0.5077, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.6262626262626263, | |
| "grad_norm": 0.7958497698850551, | |
| "learning_rate": 3.0837619926736027e-06, | |
| "loss": 0.4225, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.6278166278166278, | |
| "grad_norm": 0.7109428935226295, | |
| "learning_rate": 3.061221047581482e-06, | |
| "loss": 0.6677, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.6293706293706294, | |
| "grad_norm": 0.7470001438392854, | |
| "learning_rate": 3.038726383755531e-06, | |
| "loss": 0.4242, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.6309246309246309, | |
| "grad_norm": 0.6742001176378809, | |
| "learning_rate": 3.0162785381736893e-06, | |
| "loss": 0.4731, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.6324786324786325, | |
| "grad_norm": 0.8148615875421817, | |
| "learning_rate": 2.9938780466962768e-06, | |
| "loss": 0.5124, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.634032634032634, | |
| "grad_norm": 0.629279814363528, | |
| "learning_rate": 2.9715254440532147e-06, | |
| "loss": 0.5542, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.6355866355866356, | |
| "grad_norm": 0.7893368478824807, | |
| "learning_rate": 2.9492212638312458e-06, | |
| "loss": 0.4712, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.6371406371406372, | |
| "grad_norm": 0.700072426705422, | |
| "learning_rate": 2.9269660384612064e-06, | |
| "loss": 0.5445, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6386946386946387, | |
| "grad_norm": 0.6730347447205193, | |
| "learning_rate": 2.90476029920532e-06, | |
| "loss": 0.569, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.6402486402486403, | |
| "grad_norm": 1.8040910895752915, | |
| "learning_rate": 2.882604576144505e-06, | |
| "loss": 0.5713, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.6418026418026418, | |
| "grad_norm": 0.6061113607030476, | |
| "learning_rate": 2.8604993981657247e-06, | |
| "loss": 0.4493, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.6433566433566433, | |
| "grad_norm": 1.350624074574079, | |
| "learning_rate": 2.8384452929493645e-06, | |
| "loss": 0.4243, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.6449106449106449, | |
| "grad_norm": 1.020418478286547, | |
| "learning_rate": 2.8164427869566367e-06, | |
| "loss": 0.4695, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6464646464646465, | |
| "grad_norm": 1.073163390573267, | |
| "learning_rate": 2.7944924054170087e-06, | |
| "loss": 0.4878, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.6480186480186481, | |
| "grad_norm": 0.8939568549949852, | |
| "learning_rate": 2.7725946723156626e-06, | |
| "loss": 0.564, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.6495726495726496, | |
| "grad_norm": 0.7664017459877794, | |
| "learning_rate": 2.750750110381001e-06, | |
| "loss": 0.499, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.6511266511266511, | |
| "grad_norm": 0.8801185511706636, | |
| "learning_rate": 2.728959241072149e-06, | |
| "loss": 0.4769, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.6526806526806527, | |
| "grad_norm": 0.645638892422625, | |
| "learning_rate": 2.7072225845665256e-06, | |
| "loss": 0.5266, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6542346542346542, | |
| "grad_norm": 0.8936830405965801, | |
| "learning_rate": 2.6855406597474098e-06, | |
| "loss": 0.5236, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.6557886557886557, | |
| "grad_norm": 0.8974442709328094, | |
| "learning_rate": 2.6639139841915628e-06, | |
| "loss": 0.5072, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.6573426573426573, | |
| "grad_norm": 0.8176617831786887, | |
| "learning_rate": 2.6423430741568746e-06, | |
| "loss": 0.6122, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.6588966588966589, | |
| "grad_norm": 1.027696072983406, | |
| "learning_rate": 2.6208284445700373e-06, | |
| "loss": 0.5373, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.6604506604506605, | |
| "grad_norm": 1.254318562097477, | |
| "learning_rate": 2.5993706090142484e-06, | |
| "loss": 0.5659, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.662004662004662, | |
| "grad_norm": 0.9412050785188167, | |
| "learning_rate": 2.577970079716959e-06, | |
| "loss": 0.6532, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.6635586635586636, | |
| "grad_norm": 0.669172885578478, | |
| "learning_rate": 2.5566273675376386e-06, | |
| "loss": 0.4321, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.6651126651126651, | |
| "grad_norm": 0.7839761385934717, | |
| "learning_rate": 2.535342981955591e-06, | |
| "loss": 0.5029, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.6291916677121909, | |
| "learning_rate": 2.5141174310577774e-06, | |
| "loss": 0.5619, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.6682206682206682, | |
| "grad_norm": 0.6009527872848712, | |
| "learning_rate": 2.492951221526705e-06, | |
| "loss": 0.4838, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6697746697746698, | |
| "grad_norm": 0.7557317812271531, | |
| "learning_rate": 2.4718448586283126e-06, | |
| "loss": 0.5686, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.6713286713286714, | |
| "grad_norm": 0.6594827809287767, | |
| "learning_rate": 2.4507988461999283e-06, | |
| "loss": 0.4434, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.6728826728826729, | |
| "grad_norm": 0.6721258490052082, | |
| "learning_rate": 2.429813686638227e-06, | |
| "loss": 0.5448, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.6744366744366744, | |
| "grad_norm": 0.6253425210970757, | |
| "learning_rate": 2.408889880887246e-06, | |
| "loss": 0.5399, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.675990675990676, | |
| "grad_norm": 0.7132068704320651, | |
| "learning_rate": 2.38802792842642e-06, | |
| "loss": 0.4131, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6775446775446775, | |
| "grad_norm": 0.9992884448946338, | |
| "learning_rate": 2.3672283272586745e-06, | |
| "loss": 0.6681, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.679098679098679, | |
| "grad_norm": 0.6736697588900878, | |
| "learning_rate": 2.346491573898513e-06, | |
| "loss": 0.5672, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.6806526806526807, | |
| "grad_norm": 0.8547599476784933, | |
| "learning_rate": 2.3258181633601836e-06, | |
| "loss": 0.5186, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.6822066822066822, | |
| "grad_norm": 0.7608589363389933, | |
| "learning_rate": 2.30520858914585e-06, | |
| "loss": 0.4367, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.6837606837606838, | |
| "grad_norm": 0.6900628130745625, | |
| "learning_rate": 2.2846633432338256e-06, | |
| "loss": 0.5083, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6853146853146853, | |
| "grad_norm": 0.7581972429614234, | |
| "learning_rate": 2.2641829160668137e-06, | |
| "loss": 0.4485, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.6868686868686869, | |
| "grad_norm": 0.7973753083321776, | |
| "learning_rate": 2.243767796540207e-06, | |
| "loss": 0.4911, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.6884226884226884, | |
| "grad_norm": 0.8430463031780523, | |
| "learning_rate": 2.223418471990421e-06, | |
| "loss": 0.6334, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.6899766899766899, | |
| "grad_norm": 0.6780160144046566, | |
| "learning_rate": 2.2031354281832555e-06, | |
| "loss": 0.4788, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.6915306915306916, | |
| "grad_norm": 0.8261099182920753, | |
| "learning_rate": 2.1829191493022974e-06, | |
| "loss": 0.5132, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6930846930846931, | |
| "grad_norm": 0.9687007131393034, | |
| "learning_rate": 2.1627701179373645e-06, | |
| "loss": 0.5751, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6946386946386947, | |
| "grad_norm": 0.6461088689056241, | |
| "learning_rate": 2.142688815072986e-06, | |
| "loss": 0.5866, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.6961926961926962, | |
| "grad_norm": 0.8088646308620774, | |
| "learning_rate": 2.1226757200769225e-06, | |
| "loss": 0.6883, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.6977466977466977, | |
| "grad_norm": 1.6821696576244451, | |
| "learning_rate": 2.102731310688723e-06, | |
| "loss": 0.5542, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6993006993006993, | |
| "grad_norm": 0.6856586399011556, | |
| "learning_rate": 2.0828560630083127e-06, | |
| "loss": 0.3613, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7008547008547008, | |
| "grad_norm": 0.8799640778401495, | |
| "learning_rate": 2.0630504514846372e-06, | |
| "loss": 0.6021, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.7024087024087025, | |
| "grad_norm": 1.0346792136769154, | |
| "learning_rate": 2.0433149489043296e-06, | |
| "loss": 0.6368, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.703962703962704, | |
| "grad_norm": 0.7233064199559888, | |
| "learning_rate": 2.0236500263804355e-06, | |
| "loss": 0.5101, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.7055167055167055, | |
| "grad_norm": 0.6101595125764867, | |
| "learning_rate": 2.0040561533411494e-06, | |
| "loss": 0.4283, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.7070707070707071, | |
| "grad_norm": 0.6838739790158097, | |
| "learning_rate": 1.9845337975186297e-06, | |
| "loss": 0.4044, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7086247086247086, | |
| "grad_norm": 0.6512670498735073, | |
| "learning_rate": 1.9650834249378125e-06, | |
| "loss": 0.5122, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.7101787101787101, | |
| "grad_norm": 0.7448486863111607, | |
| "learning_rate": 1.945705499905305e-06, | |
| "loss": 0.4664, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.7117327117327117, | |
| "grad_norm": 0.6714430637930826, | |
| "learning_rate": 1.926400484998289e-06, | |
| "loss": 0.4605, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.7132867132867133, | |
| "grad_norm": 0.7034702671220221, | |
| "learning_rate": 1.9071688410534828e-06, | |
| "loss": 0.4332, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.7148407148407149, | |
| "grad_norm": 0.920215915504864, | |
| "learning_rate": 1.8880110271561415e-06, | |
| "loss": 0.582, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7163947163947164, | |
| "grad_norm": 0.7704916182387226, | |
| "learning_rate": 1.8689275006291035e-06, | |
| "loss": 0.3934, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.717948717948718, | |
| "grad_norm": 0.7275954100685235, | |
| "learning_rate": 1.8499187170218614e-06, | |
| "loss": 0.6786, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.7195027195027195, | |
| "grad_norm": 0.9130677264258454, | |
| "learning_rate": 1.8309851300996934e-06, | |
| "loss": 0.4669, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.721056721056721, | |
| "grad_norm": 0.627613808696502, | |
| "learning_rate": 1.8121271918328314e-06, | |
| "loss": 0.3791, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.7226107226107226, | |
| "grad_norm": 0.7966353111195642, | |
| "learning_rate": 1.793345352385678e-06, | |
| "loss": 0.5131, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.7241647241647242, | |
| "grad_norm": 0.7257869097486319, | |
| "learning_rate": 1.7746400601060476e-06, | |
| "loss": 0.4698, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.7257187257187258, | |
| "grad_norm": 0.8274666640783022, | |
| "learning_rate": 1.7560117615144717e-06, | |
| "loss": 0.5127, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.919410912868769, | |
| "learning_rate": 1.7374609012935412e-06, | |
| "loss": 0.5077, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.7288267288267288, | |
| "grad_norm": 0.8104780902360221, | |
| "learning_rate": 1.7189879222772894e-06, | |
| "loss": 0.4689, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.7303807303807304, | |
| "grad_norm": 0.7461709338199918, | |
| "learning_rate": 1.7005932654406165e-06, | |
| "loss": 0.3507, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7319347319347319, | |
| "grad_norm": 0.6643952482627672, | |
| "learning_rate": 1.682277369888769e-06, | |
| "loss": 0.4538, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.7334887334887334, | |
| "grad_norm": 0.8395310070807439, | |
| "learning_rate": 1.6640406728468534e-06, | |
| "loss": 0.4401, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.7350427350427351, | |
| "grad_norm": 0.9088380178834886, | |
| "learning_rate": 1.6458836096494046e-06, | |
| "loss": 0.6829, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.7365967365967366, | |
| "grad_norm": 0.8158423552015206, | |
| "learning_rate": 1.6278066137299898e-06, | |
| "loss": 0.5124, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.7381507381507382, | |
| "grad_norm": 0.6628148397288011, | |
| "learning_rate": 1.6098101166108593e-06, | |
| "loss": 0.4556, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7397047397047397, | |
| "grad_norm": 0.7157818223192728, | |
| "learning_rate": 1.5918945478926484e-06, | |
| "loss": 0.5349, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.7412587412587412, | |
| "grad_norm": 0.9140000377609874, | |
| "learning_rate": 1.5740603352441281e-06, | |
| "loss": 0.6558, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.7428127428127428, | |
| "grad_norm": 1.8219839216709126, | |
| "learning_rate": 1.5563079043919843e-06, | |
| "loss": 0.6314, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.7443667443667443, | |
| "grad_norm": 0.6702786236421273, | |
| "learning_rate": 1.5386376791106627e-06, | |
| "loss": 0.4835, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.745920745920746, | |
| "grad_norm": 0.8589486289983319, | |
| "learning_rate": 1.5210500812122548e-06, | |
| "loss": 0.28, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7474747474747475, | |
| "grad_norm": 0.6696692189402571, | |
| "learning_rate": 1.5035455305364188e-06, | |
| "loss": 0.4483, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.749028749028749, | |
| "grad_norm": 0.832181293716284, | |
| "learning_rate": 1.4861244449403717e-06, | |
| "loss": 0.5884, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.7505827505827506, | |
| "grad_norm": 0.9644123682005706, | |
| "learning_rate": 1.4687872402888991e-06, | |
| "loss": 0.4757, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.7521367521367521, | |
| "grad_norm": 0.8646333036074157, | |
| "learning_rate": 1.451534330444438e-06, | |
| "loss": 0.4525, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.7536907536907537, | |
| "grad_norm": 0.7543556415051755, | |
| "learning_rate": 1.4343661272571967e-06, | |
| "loss": 0.453, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7552447552447552, | |
| "grad_norm": 0.7262516356092494, | |
| "learning_rate": 1.4172830405553216e-06, | |
| "loss": 0.3922, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.7567987567987567, | |
| "grad_norm": 0.7925066399718954, | |
| "learning_rate": 1.4002854781351104e-06, | |
| "loss": 0.517, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.7583527583527584, | |
| "grad_norm": 0.7148060275837002, | |
| "learning_rate": 1.3833738457512842e-06, | |
| "loss": 0.3847, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.7599067599067599, | |
| "grad_norm": 0.6309982281142955, | |
| "learning_rate": 1.3665485471072937e-06, | |
| "loss": 0.4778, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.7614607614607615, | |
| "grad_norm": 0.7642090785940528, | |
| "learning_rate": 1.3498099838456947e-06, | |
| "loss": 0.4309, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.763014763014763, | |
| "grad_norm": 0.7640039314162583, | |
| "learning_rate": 1.3331585555385458e-06, | |
| "loss": 0.4696, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.7645687645687645, | |
| "grad_norm": 0.8643751125446875, | |
| "learning_rate": 1.3165946596778773e-06, | |
| "loss": 0.4918, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.7661227661227661, | |
| "grad_norm": 0.6804715835256383, | |
| "learning_rate": 1.3001186916662066e-06, | |
| "loss": 0.4724, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.7676767676767676, | |
| "grad_norm": 1.0793870282462463, | |
| "learning_rate": 1.2837310448070929e-06, | |
| "loss": 0.5909, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 0.7827368042523198, | |
| "learning_rate": 1.2674321102957476e-06, | |
| "loss": 0.5138, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7707847707847708, | |
| "grad_norm": 0.847544394369445, | |
| "learning_rate": 1.251222277209702e-06, | |
| "loss": 0.603, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.7723387723387724, | |
| "grad_norm": 0.9723527581718034, | |
| "learning_rate": 1.2351019324995128e-06, | |
| "loss": 0.6304, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.7738927738927739, | |
| "grad_norm": 0.6468908302032275, | |
| "learning_rate": 1.2190714609795334e-06, | |
| "loss": 0.521, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.7754467754467754, | |
| "grad_norm": 0.7114004263355455, | |
| "learning_rate": 1.203131245318721e-06, | |
| "loss": 0.4607, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.777000777000777, | |
| "grad_norm": 0.6224249787588289, | |
| "learning_rate": 1.1872816660315029e-06, | |
| "loss": 0.3998, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7785547785547785, | |
| "grad_norm": 0.8157807813857092, | |
| "learning_rate": 1.171523101468693e-06, | |
| "loss": 0.5267, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.7801087801087802, | |
| "grad_norm": 0.799774654796459, | |
| "learning_rate": 1.1558559278084647e-06, | |
| "loss": 0.4909, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.7816627816627817, | |
| "grad_norm": 0.712975958103021, | |
| "learning_rate": 1.1402805190473649e-06, | |
| "loss": 0.4833, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.7832167832167832, | |
| "grad_norm": 0.7414921854529274, | |
| "learning_rate": 1.124797246991387e-06, | |
| "loss": 0.4083, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.7847707847707848, | |
| "grad_norm": 1.0834283945062337, | |
| "learning_rate": 1.1094064812471028e-06, | |
| "loss": 0.5327, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7863247863247863, | |
| "grad_norm": 0.7386660114893862, | |
| "learning_rate": 1.0941085892128272e-06, | |
| "loss": 0.4922, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.7878787878787878, | |
| "grad_norm": 0.5879874025861683, | |
| "learning_rate": 1.07890393606986e-06, | |
| "loss": 0.3765, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.7894327894327894, | |
| "grad_norm": 0.8290075934295431, | |
| "learning_rate": 1.0637928847737594e-06, | |
| "loss": 0.5165, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.790986790986791, | |
| "grad_norm": 0.9222992948806642, | |
| "learning_rate": 1.0487757960456812e-06, | |
| "loss": 0.5239, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.7925407925407926, | |
| "grad_norm": 0.894510374178912, | |
| "learning_rate": 1.0338530283637704e-06, | |
| "loss": 0.5294, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7940947940947941, | |
| "grad_norm": 0.9067683678931795, | |
| "learning_rate": 1.0190249379546024e-06, | |
| "loss": 0.6271, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.7956487956487956, | |
| "grad_norm": 0.8420348253099458, | |
| "learning_rate": 1.0042918787846757e-06, | |
| "loss": 0.4476, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.7972027972027972, | |
| "grad_norm": 2.2314012317895555, | |
| "learning_rate": 9.896542025519645e-07, | |
| "loss": 0.4875, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.7987567987567987, | |
| "grad_norm": 1.112553561220701, | |
| "learning_rate": 9.751122586775253e-07, | |
| "loss": 0.4668, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.8003108003108003, | |
| "grad_norm": 0.8631165830420672, | |
| "learning_rate": 9.606663942971568e-07, | |
| "loss": 0.4823, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.8018648018648019, | |
| "grad_norm": 0.8259570757220709, | |
| "learning_rate": 9.463169542531059e-07, | |
| "loss": 0.5339, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.8034188034188035, | |
| "grad_norm": 1.166843823225754, | |
| "learning_rate": 9.320642810858421e-07, | |
| "loss": 0.5133, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.804972804972805, | |
| "grad_norm": 0.9069458506573143, | |
| "learning_rate": 9.179087150258814e-07, | |
| "loss": 0.4574, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.8065268065268065, | |
| "grad_norm": 0.6083374762695839, | |
| "learning_rate": 9.038505939856612e-07, | |
| "loss": 0.5352, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.8080808080808081, | |
| "grad_norm": 0.7051533869734491, | |
| "learning_rate": 8.898902535514747e-07, | |
| "loss": 0.486, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8096348096348096, | |
| "grad_norm": 1.1361715723258188, | |
| "learning_rate": 8.76028026975459e-07, | |
| "loss": 0.5395, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.8111888111888111, | |
| "grad_norm": 0.7251363387868152, | |
| "learning_rate": 8.62264245167641e-07, | |
| "loss": 0.4734, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.8127428127428128, | |
| "grad_norm": 0.7015732791695378, | |
| "learning_rate": 8.485992366880419e-07, | |
| "loss": 0.5281, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.8142968142968143, | |
| "grad_norm": 1.5424433933643296, | |
| "learning_rate": 8.35033327738829e-07, | |
| "loss": 0.4043, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.8158508158508159, | |
| "grad_norm": 0.9218463524060198, | |
| "learning_rate": 8.215668421565276e-07, | |
| "loss": 0.5614, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.8174048174048174, | |
| "grad_norm": 0.764325971788183, | |
| "learning_rate": 8.082001014042945e-07, | |
| "loss": 0.5032, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.818958818958819, | |
| "grad_norm": 0.652486590264396, | |
| "learning_rate": 7.949334245642459e-07, | |
| "loss": 0.4544, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.8205128205128205, | |
| "grad_norm": 0.7176940824971066, | |
| "learning_rate": 7.817671283298345e-07, | |
| "loss": 0.5098, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.822066822066822, | |
| "grad_norm": 1.823594644970129, | |
| "learning_rate": 7.687015269982917e-07, | |
| "loss": 0.4841, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.8236208236208237, | |
| "grad_norm": 0.7669592956201216, | |
| "learning_rate": 7.557369324631303e-07, | |
| "loss": 0.5751, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.8251748251748252, | |
| "grad_norm": 0.7791697796343787, | |
| "learning_rate": 7.4287365420669e-07, | |
| "loss": 0.5334, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.8267288267288267, | |
| "grad_norm": 0.7675193579164078, | |
| "learning_rate": 7.301119992927585e-07, | |
| "loss": 0.4749, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.8282828282828283, | |
| "grad_norm": 0.8274131519826066, | |
| "learning_rate": 7.174522723592342e-07, | |
| "loss": 0.5556, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.8298368298368298, | |
| "grad_norm": 0.6248155156332447, | |
| "learning_rate": 7.048947756108576e-07, | |
| "loss": 0.4449, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.8313908313908314, | |
| "grad_norm": 0.764818832164719, | |
| "learning_rate": 6.924398088119988e-07, | |
| "loss": 0.5908, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.8329448329448329, | |
| "grad_norm": 0.7925584720042309, | |
| "learning_rate": 6.800876692794994e-07, | |
| "loss": 0.5557, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.8344988344988346, | |
| "grad_norm": 0.8639156535546165, | |
| "learning_rate": 6.678386518755747e-07, | |
| "loss": 0.5577, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.8360528360528361, | |
| "grad_norm": 0.8390195478931005, | |
| "learning_rate": 6.556930490007762e-07, | |
| "loss": 0.5484, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.8376068376068376, | |
| "grad_norm": 0.63339201522811, | |
| "learning_rate": 6.436511505870091e-07, | |
| "loss": 0.5014, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.8391608391608392, | |
| "grad_norm": 0.889532359805588, | |
| "learning_rate": 6.317132440906188e-07, | |
| "loss": 0.5266, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8407148407148407, | |
| "grad_norm": 0.5624920627332638, | |
| "learning_rate": 6.198796144855168e-07, | |
| "loss": 0.4393, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.8422688422688422, | |
| "grad_norm": 0.6994504769690357, | |
| "learning_rate": 6.081505442563912e-07, | |
| "loss": 0.4758, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.8438228438228438, | |
| "grad_norm": 0.677345489523822, | |
| "learning_rate": 5.965263133919508e-07, | |
| "loss": 0.4376, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.8453768453768454, | |
| "grad_norm": 1.1792412849625011, | |
| "learning_rate": 5.850071993782525e-07, | |
| "loss": 0.557, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.846930846930847, | |
| "grad_norm": 0.8383140877354333, | |
| "learning_rate": 5.735934771920704e-07, | |
| "loss": 0.4129, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.8484848484848485, | |
| "grad_norm": 0.7877126864302514, | |
| "learning_rate": 5.622854192943317e-07, | |
| "loss": 0.5008, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.85003885003885, | |
| "grad_norm": 0.9108037190809487, | |
| "learning_rate": 5.510832956236173e-07, | |
| "loss": 0.4783, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.8515928515928516, | |
| "grad_norm": 1.0094511865353648, | |
| "learning_rate": 5.399873735897137e-07, | |
| "loss": 0.4469, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.8531468531468531, | |
| "grad_norm": 0.663812075111233, | |
| "learning_rate": 5.289979180672344e-07, | |
| "loss": 0.6368, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.8547008547008547, | |
| "grad_norm": 0.7540552593487783, | |
| "learning_rate": 5.181151913892896e-07, | |
| "loss": 0.4343, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8562548562548562, | |
| "grad_norm": 1.0037008402911451, | |
| "learning_rate": 5.073394533412296e-07, | |
| "loss": 0.5469, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.8578088578088578, | |
| "grad_norm": 0.5605077409329356, | |
| "learning_rate": 4.966709611544435e-07, | |
| "loss": 0.4462, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.8593628593628594, | |
| "grad_norm": 0.8732352825291733, | |
| "learning_rate": 4.861099695002158e-07, | |
| "loss": 0.5251, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.8609168609168609, | |
| "grad_norm": 0.7053119794249835, | |
| "learning_rate": 4.7565673048364735e-07, | |
| "loss": 0.4842, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.8624708624708625, | |
| "grad_norm": 0.821971296932637, | |
| "learning_rate": 4.6531149363764126e-07, | |
| "loss": 0.4487, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.864024864024864, | |
| "grad_norm": 1.2794248867833895, | |
| "learning_rate": 4.550745059169398e-07, | |
| "loss": 0.5328, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.8655788655788655, | |
| "grad_norm": 0.7489672108536733, | |
| "learning_rate": 4.4494601169223715e-07, | |
| "loss": 0.5069, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.8671328671328671, | |
| "grad_norm": 0.6659188193571587, | |
| "learning_rate": 4.349262527443371e-07, | |
| "loss": 0.5734, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.8686868686868687, | |
| "grad_norm": 0.7454236276103242, | |
| "learning_rate": 4.2501546825838735e-07, | |
| "loss": 0.358, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.8702408702408703, | |
| "grad_norm": 0.9570678482900348, | |
| "learning_rate": 4.152138948181689e-07, | |
| "loss": 0.538, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8717948717948718, | |
| "grad_norm": 0.9593262810731289, | |
| "learning_rate": 4.0552176640045017e-07, | |
| "loss": 0.6003, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.8733488733488733, | |
| "grad_norm": 0.6814712625667729, | |
| "learning_rate": 3.959393143693946e-07, | |
| "loss": 0.4296, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.8749028749028749, | |
| "grad_norm": 0.8468228674134194, | |
| "learning_rate": 3.864667674710454e-07, | |
| "loss": 0.478, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.8764568764568764, | |
| "grad_norm": 0.8214210844483506, | |
| "learning_rate": 3.7710435182786053e-07, | |
| "loss": 0.4921, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.878010878010878, | |
| "grad_norm": 0.7775961282959882, | |
| "learning_rate": 3.6785229093331987e-07, | |
| "loss": 0.5158, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.8795648795648796, | |
| "grad_norm": 0.7706689173433338, | |
| "learning_rate": 3.587108056465827e-07, | |
| "loss": 0.4987, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.8811188811188811, | |
| "grad_norm": 0.7115920841531194, | |
| "learning_rate": 3.496801141872225e-07, | |
| "loss": 0.5508, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.8826728826728827, | |
| "grad_norm": 0.7360248498356703, | |
| "learning_rate": 3.407604321300123e-07, | |
| "loss": 0.4622, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.8842268842268842, | |
| "grad_norm": 0.6656952275189396, | |
| "learning_rate": 3.3195197239978384e-07, | |
| "loss": 0.4616, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.8857808857808858, | |
| "grad_norm": 1.0438892609670738, | |
| "learning_rate": 3.232549452663403e-07, | |
| "loss": 0.6725, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8873348873348873, | |
| "grad_norm": 0.9087915959905298, | |
| "learning_rate": 3.146695583394377e-07, | |
| "loss": 0.5397, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.7459812770941501, | |
| "learning_rate": 3.061960165638317e-07, | |
| "loss": 0.4763, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.8904428904428905, | |
| "grad_norm": 0.9368206700984494, | |
| "learning_rate": 2.9783452221438304e-07, | |
| "loss": 0.4894, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.891996891996892, | |
| "grad_norm": 0.7581701388162561, | |
| "learning_rate": 2.895852748912298e-07, | |
| "loss": 0.4285, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.8935508935508936, | |
| "grad_norm": 0.7295548456707804, | |
| "learning_rate": 2.814484715150212e-07, | |
| "loss": 0.4201, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.8951048951048951, | |
| "grad_norm": 0.6762252893935657, | |
| "learning_rate": 2.734243063222181e-07, | |
| "loss": 0.4842, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.8966588966588966, | |
| "grad_norm": 0.7273733934803849, | |
| "learning_rate": 2.655129708604576e-07, | |
| "loss": 0.3651, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.8982128982128982, | |
| "grad_norm": 0.9666761581157529, | |
| "learning_rate": 2.5771465398397757e-07, | |
| "loss": 0.7778, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.8997668997668997, | |
| "grad_norm": 0.6776269970027221, | |
| "learning_rate": 2.5002954184910887e-07, | |
| "loss": 0.5259, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.9013209013209014, | |
| "grad_norm": 0.5934838544460401, | |
| "learning_rate": 2.424578179098358e-07, | |
| "loss": 0.5417, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.9028749028749029, | |
| "grad_norm": 0.8173118127489211, | |
| "learning_rate": 2.3499966291341213e-07, | |
| "loss": 0.4431, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.9044289044289044, | |
| "grad_norm": 0.6926431286616653, | |
| "learning_rate": 2.2765525489604702e-07, | |
| "loss": 0.3946, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.905982905982906, | |
| "grad_norm": 0.8058462854601025, | |
| "learning_rate": 2.2042476917865706e-07, | |
| "loss": 0.4716, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.9075369075369075, | |
| "grad_norm": 0.735345958366895, | |
| "learning_rate": 2.1330837836267882e-07, | |
| "loss": 0.4664, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.6326682057182371, | |
| "learning_rate": 2.0630625232595126e-07, | |
| "loss": 0.395, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.9106449106449106, | |
| "grad_norm": 0.7934491290140667, | |
| "learning_rate": 1.9941855821865918e-07, | |
| "loss": 0.4923, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.9121989121989122, | |
| "grad_norm": 0.7620750821145263, | |
| "learning_rate": 1.9264546045934196e-07, | |
| "loss": 0.4928, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.9137529137529138, | |
| "grad_norm": 0.6744132607863633, | |
| "learning_rate": 1.859871207309688e-07, | |
| "loss": 0.544, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.9153069153069153, | |
| "grad_norm": 0.850939097017279, | |
| "learning_rate": 1.7944369797708362e-07, | |
| "loss": 0.4553, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.9168609168609169, | |
| "grad_norm": 0.6424420190959574, | |
| "learning_rate": 1.7301534839800348e-07, | |
| "loss": 0.3915, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.9184149184149184, | |
| "grad_norm": 0.7046012463166428, | |
| "learning_rate": 1.6670222544709515e-07, | |
| "loss": 0.4708, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.9199689199689199, | |
| "grad_norm": 1.0364283887763577, | |
| "learning_rate": 1.6050447982711214e-07, | |
| "loss": 0.4111, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.9215229215229215, | |
| "grad_norm": 0.7049190344022591, | |
| "learning_rate": 1.5442225948659183e-07, | |
| "loss": 0.5696, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.8155103152236225, | |
| "learning_rate": 1.4845570961633192e-07, | |
| "loss": 0.5059, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.9246309246309247, | |
| "grad_norm": 0.5949094353172898, | |
| "learning_rate": 1.426049726459172e-07, | |
| "loss": 0.4638, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.9261849261849262, | |
| "grad_norm": 0.6815208521528887, | |
| "learning_rate": 1.368701882403234e-07, | |
| "loss": 0.4551, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.9277389277389277, | |
| "grad_norm": 0.9296974247938309, | |
| "learning_rate": 1.3125149329658083e-07, | |
| "loss": 0.4026, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.9292929292929293, | |
| "grad_norm": 1.2038986087224606, | |
| "learning_rate": 1.2574902194050996e-07, | |
| "loss": 0.4656, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.9308469308469308, | |
| "grad_norm": 1.0581872023301784, | |
| "learning_rate": 1.2036290552351838e-07, | |
| "loss": 0.6128, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.9324009324009324, | |
| "grad_norm": 0.9158733284924282, | |
| "learning_rate": 1.150932726194609e-07, | |
| "loss": 0.4848, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.933954933954934, | |
| "grad_norm": 0.7543888048930436, | |
| "learning_rate": 1.0994024902157674e-07, | |
| "loss": 0.4701, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.9355089355089355, | |
| "grad_norm": 0.7081866182048879, | |
| "learning_rate": 1.0490395773948336e-07, | |
| "loss": 0.5253, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.9370629370629371, | |
| "grad_norm": 0.6949901814621636, | |
| "learning_rate": 9.998451899624007e-08, | |
| "loss": 0.4287, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.9386169386169386, | |
| "grad_norm": 0.8441421847918561, | |
| "learning_rate": 9.5182050225478e-08, | |
| "loss": 0.5441, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.9401709401709402, | |
| "grad_norm": 0.7034318164083617, | |
| "learning_rate": 9.049666606859852e-08, | |
| "loss": 0.4834, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.9417249417249417, | |
| "grad_norm": 0.8895517754609739, | |
| "learning_rate": 8.592847837203655e-08, | |
| "loss": 0.4564, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.9432789432789432, | |
| "grad_norm": 0.7017208181125264, | |
| "learning_rate": 8.147759618458706e-08, | |
| "loss": 0.4238, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.9448329448329449, | |
| "grad_norm": 0.7595723199577458, | |
| "learning_rate": 7.714412575480556e-08, | |
| "loss": 0.541, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.9463869463869464, | |
| "grad_norm": 0.701349856315337, | |
| "learning_rate": 7.292817052847068e-08, | |
| "loss": 0.5784, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.947940947940948, | |
| "grad_norm": 0.6902057779636126, | |
| "learning_rate": 6.882983114611497e-08, | |
| "loss": 0.5877, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.9494949494949495, | |
| "grad_norm": 0.7053000221431716, | |
| "learning_rate": 6.484920544062245e-08, | |
| "loss": 0.3299, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.951048951048951, | |
| "grad_norm": 0.7668065733213358, | |
| "learning_rate": 6.098638843489213e-08, | |
| "loss": 0.6096, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.9526029526029526, | |
| "grad_norm": 0.9203339601219532, | |
| "learning_rate": 5.7241472339572e-08, | |
| "loss": 0.6287, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.9541569541569541, | |
| "grad_norm": 0.7745409282534764, | |
| "learning_rate": 5.361454655085529e-08, | |
| "loss": 0.4677, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.9557109557109557, | |
| "grad_norm": 0.7385280228665362, | |
| "learning_rate": 5.0105697648347716e-08, | |
| "loss": 0.5869, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9572649572649573, | |
| "grad_norm": 0.7825960999337372, | |
| "learning_rate": 4.671500939300133e-08, | |
| "loss": 0.498, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.9588189588189588, | |
| "grad_norm": 0.753511461886924, | |
| "learning_rate": 4.344256272511338e-08, | |
| "loss": 0.4964, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.9603729603729604, | |
| "grad_norm": 0.6840524141170308, | |
| "learning_rate": 4.0288435762396164e-08, | |
| "loss": 0.4294, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.9619269619269619, | |
| "grad_norm": 1.035162046106296, | |
| "learning_rate": 3.725270379811019e-08, | |
| "loss": 0.5584, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.9634809634809635, | |
| "grad_norm": 0.9156660131303417, | |
| "learning_rate": 3.4335439299268414e-08, | |
| "loss": 0.4166, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.965034965034965, | |
| "grad_norm": 0.7719462038166263, | |
| "learning_rate": 3.1536711904904816e-08, | |
| "loss": 0.4776, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.9665889665889665, | |
| "grad_norm": 0.6723067728580475, | |
| "learning_rate": 2.8856588424414632e-08, | |
| "loss": 0.4692, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.9681429681429682, | |
| "grad_norm": 0.8273069565821325, | |
| "learning_rate": 2.6295132835956748e-08, | |
| "loss": 0.4465, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.9696969696969697, | |
| "grad_norm": 0.7689357247441089, | |
| "learning_rate": 2.3852406284927687e-08, | |
| "loss": 0.4544, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.9712509712509713, | |
| "grad_norm": 0.7619653211518902, | |
| "learning_rate": 2.152846708250167e-08, | |
| "loss": 0.5586, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9728049728049728, | |
| "grad_norm": 1.5148882415320697, | |
| "learning_rate": 1.93233707042384e-08, | |
| "loss": 0.4416, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.9743589743589743, | |
| "grad_norm": 0.723885981000404, | |
| "learning_rate": 1.723716978876133e-08, | |
| "loss": 0.366, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.9759129759129759, | |
| "grad_norm": 0.9896178053638355, | |
| "learning_rate": 1.5269914136497033e-08, | |
| "loss": 0.6383, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.9774669774669774, | |
| "grad_norm": 0.9450685315761042, | |
| "learning_rate": 1.3421650708487777e-08, | |
| "loss": 0.4261, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.9790209790209791, | |
| "grad_norm": 0.7776928320411156, | |
| "learning_rate": 1.1692423625273563e-08, | |
| "loss": 0.3718, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9805749805749806, | |
| "grad_norm": 2.709950931518374, | |
| "learning_rate": 1.0082274165834627e-08, | |
| "loss": 0.4794, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.9821289821289821, | |
| "grad_norm": 0.812744833469983, | |
| "learning_rate": 8.591240766607779e-09, | |
| "loss": 0.5243, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.9836829836829837, | |
| "grad_norm": 0.7580063402130408, | |
| "learning_rate": 7.219359020570471e-09, | |
| "loss": 0.4821, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.9852369852369852, | |
| "grad_norm": 1.012231114852923, | |
| "learning_rate": 5.966661676388152e-09, | |
| "loss": 0.4589, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.9867909867909868, | |
| "grad_norm": 0.7714041439030666, | |
| "learning_rate": 4.833178637633773e-09, | |
| "loss": 0.5265, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9883449883449883, | |
| "grad_norm": 0.7785451239864247, | |
| "learning_rate": 3.818936962076136e-09, | |
| "loss": 0.5888, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.98989898989899, | |
| "grad_norm": 0.8911993119168611, | |
| "learning_rate": 2.9239608610298618e-09, | |
| "loss": 0.415, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.9914529914529915, | |
| "grad_norm": 0.9270156128048929, | |
| "learning_rate": 2.148271698781401e-09, | |
| "loss": 0.4057, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.993006993006993, | |
| "grad_norm": 0.7334067869030746, | |
| "learning_rate": 1.4918879920750029e-09, | |
| "loss": 0.4826, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.9945609945609946, | |
| "grad_norm": 0.7633076997899867, | |
| "learning_rate": 9.548254096752862e-10, | |
| "loss": 0.6209, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9961149961149961, | |
| "grad_norm": 1.0315005661760475, | |
| "learning_rate": 5.370967719897646e-10, | |
| "loss": 0.4908, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.9976689976689976, | |
| "grad_norm": 0.8907517067968038, | |
| "learning_rate": 2.387120507629792e-10, | |
| "loss": 0.467, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.9992229992229992, | |
| "grad_norm": 0.9387605136360067, | |
| "learning_rate": 5.967836884168687e-11, | |
| "loss": 0.5826, | |
| "step": 643 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 643, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 70581768388608.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |