| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9952, |
| "eval_steps": 500, |
| "global_step": 585, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.00512, |
| "grad_norm": 7.427512523661066, |
| "learning_rate": 1.3559322033898307e-06, |
| "loss": 1.1833, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.01024, |
| "grad_norm": 7.378805913304633, |
| "learning_rate": 2.7118644067796613e-06, |
| "loss": 1.1693, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01536, |
| "grad_norm": 7.387359456401948, |
| "learning_rate": 4.067796610169492e-06, |
| "loss": 1.1699, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.02048, |
| "grad_norm": 5.424792613029199, |
| "learning_rate": 5.423728813559323e-06, |
| "loss": 1.112, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0256, |
| "grad_norm": 3.3531117149966567, |
| "learning_rate": 6.779661016949153e-06, |
| "loss": 1.0668, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.03072, |
| "grad_norm": 2.9208579813945645, |
| "learning_rate": 8.135593220338983e-06, |
| "loss": 1.0554, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.03584, |
| "grad_norm": 5.338271857324549, |
| "learning_rate": 9.491525423728815e-06, |
| "loss": 1.0192, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.04096, |
| "grad_norm": 5.570953182353899, |
| "learning_rate": 1.0847457627118645e-05, |
| "loss": 1.0134, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.04608, |
| "grad_norm": 5.770745007584102, |
| "learning_rate": 1.2203389830508477e-05, |
| "loss": 0.9676, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.0512, |
| "grad_norm": 4.652978095614179, |
| "learning_rate": 1.3559322033898305e-05, |
| "loss": 0.9515, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05632, |
| "grad_norm": 2.8588327705676626, |
| "learning_rate": 1.4915254237288137e-05, |
| "loss": 0.9188, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.06144, |
| "grad_norm": 3.6027109410465066, |
| "learning_rate": 1.6271186440677967e-05, |
| "loss": 0.8979, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.06656, |
| "grad_norm": 2.9981487608394635, |
| "learning_rate": 1.76271186440678e-05, |
| "loss": 0.8804, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.07168, |
| "grad_norm": 1.9815532395129927, |
| "learning_rate": 1.898305084745763e-05, |
| "loss": 0.85, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.0768, |
| "grad_norm": 2.0993713191830765, |
| "learning_rate": 2.033898305084746e-05, |
| "loss": 0.8359, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.08192, |
| "grad_norm": 1.8644702221939118, |
| "learning_rate": 2.169491525423729e-05, |
| "loss": 0.8325, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.08704, |
| "grad_norm": 1.4001838150615264, |
| "learning_rate": 2.3050847457627122e-05, |
| "loss": 0.8145, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.09216, |
| "grad_norm": 1.5105243610460197, |
| "learning_rate": 2.4406779661016954e-05, |
| "loss": 0.8061, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.09728, |
| "grad_norm": 1.508229346305267, |
| "learning_rate": 2.576271186440678e-05, |
| "loss": 0.8019, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.1024, |
| "grad_norm": 1.313209951669769, |
| "learning_rate": 2.711864406779661e-05, |
| "loss": 0.8, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.10752, |
| "grad_norm": 1.173580022336048, |
| "learning_rate": 2.8474576271186442e-05, |
| "loss": 0.7764, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.11264, |
| "grad_norm": 1.293902010777013, |
| "learning_rate": 2.9830508474576274e-05, |
| "loss": 0.7891, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.11776, |
| "grad_norm": 0.9680297788054414, |
| "learning_rate": 3.1186440677966106e-05, |
| "loss": 0.7745, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.12288, |
| "grad_norm": 0.9265872685281233, |
| "learning_rate": 3.2542372881355934e-05, |
| "loss": 0.7678, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.128, |
| "grad_norm": 0.9305803654580205, |
| "learning_rate": 3.389830508474576e-05, |
| "loss": 0.7559, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.13312, |
| "grad_norm": 1.334014536013991, |
| "learning_rate": 3.52542372881356e-05, |
| "loss": 0.7697, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.13824, |
| "grad_norm": 1.7668925981671184, |
| "learning_rate": 3.6610169491525426e-05, |
| "loss": 0.7506, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.14336, |
| "grad_norm": 1.2041359597289543, |
| "learning_rate": 3.796610169491526e-05, |
| "loss": 0.7446, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.14848, |
| "grad_norm": 1.9838038548617862, |
| "learning_rate": 3.932203389830509e-05, |
| "loss": 0.7511, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.1536, |
| "grad_norm": 1.2091908618849432, |
| "learning_rate": 4.067796610169492e-05, |
| "loss": 0.743, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.15872, |
| "grad_norm": 2.013739044410424, |
| "learning_rate": 4.203389830508475e-05, |
| "loss": 0.739, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.16384, |
| "grad_norm": 1.7387033344897997, |
| "learning_rate": 4.338983050847458e-05, |
| "loss": 0.737, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.16896, |
| "grad_norm": 1.7866106146689165, |
| "learning_rate": 4.474576271186441e-05, |
| "loss": 0.7392, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.17408, |
| "grad_norm": 1.5407095248074152, |
| "learning_rate": 4.6101694915254244e-05, |
| "loss": 0.7286, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.1792, |
| "grad_norm": 1.7355594859856742, |
| "learning_rate": 4.745762711864407e-05, |
| "loss": 0.7429, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.18432, |
| "grad_norm": 1.4876420903172791, |
| "learning_rate": 4.881355932203391e-05, |
| "loss": 0.7229, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.18944, |
| "grad_norm": 2.2843469216868098, |
| "learning_rate": 5.016949152542373e-05, |
| "loss": 0.7288, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.19456, |
| "grad_norm": 1.6645490807582124, |
| "learning_rate": 5.152542372881356e-05, |
| "loss": 0.7189, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.19968, |
| "grad_norm": 2.3532899761581096, |
| "learning_rate": 5.288135593220339e-05, |
| "loss": 0.7163, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.2048, |
| "grad_norm": 2.497396548783959, |
| "learning_rate": 5.423728813559322e-05, |
| "loss": 0.7264, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.20992, |
| "grad_norm": 1.4481682870152812, |
| "learning_rate": 5.5593220338983056e-05, |
| "loss": 0.7144, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.21504, |
| "grad_norm": 3.1370809945974587, |
| "learning_rate": 5.6949152542372884e-05, |
| "loss": 0.7002, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.22016, |
| "grad_norm": 2.6339360870235264, |
| "learning_rate": 5.830508474576271e-05, |
| "loss": 0.7403, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.22528, |
| "grad_norm": 2.2084591884302647, |
| "learning_rate": 5.966101694915255e-05, |
| "loss": 0.7236, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.2304, |
| "grad_norm": 2.7378970801102107, |
| "learning_rate": 6.1016949152542376e-05, |
| "loss": 0.6975, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.23552, |
| "grad_norm": 1.8558926790043844, |
| "learning_rate": 6.237288135593221e-05, |
| "loss": 0.7187, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.24064, |
| "grad_norm": 2.2724542031435866, |
| "learning_rate": 6.372881355932204e-05, |
| "loss": 0.7264, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.24576, |
| "grad_norm": 2.8585186655333605, |
| "learning_rate": 6.508474576271187e-05, |
| "loss": 0.7122, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.25088, |
| "grad_norm": 2.2069560445393246, |
| "learning_rate": 6.64406779661017e-05, |
| "loss": 0.727, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.256, |
| "grad_norm": 3.1071789158018173, |
| "learning_rate": 6.779661016949152e-05, |
| "loss": 0.6951, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.26112, |
| "grad_norm": 2.0255083166280694, |
| "learning_rate": 6.915254237288137e-05, |
| "loss": 0.6928, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.26624, |
| "grad_norm": 3.3982026768893427, |
| "learning_rate": 7.05084745762712e-05, |
| "loss": 0.6963, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.27136, |
| "grad_norm": 1.7595270818326945, |
| "learning_rate": 7.186440677966102e-05, |
| "loss": 0.7136, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.27648, |
| "grad_norm": 3.522255480730156, |
| "learning_rate": 7.322033898305085e-05, |
| "loss": 0.701, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2816, |
| "grad_norm": 2.0471574026263473, |
| "learning_rate": 7.457627118644068e-05, |
| "loss": 0.7015, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.28672, |
| "grad_norm": 4.076903746052479, |
| "learning_rate": 7.593220338983052e-05, |
| "loss": 0.7213, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.29184, |
| "grad_norm": 2.9540886546409, |
| "learning_rate": 7.728813559322035e-05, |
| "loss": 0.7072, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.29696, |
| "grad_norm": 2.930300987052726, |
| "learning_rate": 7.864406779661018e-05, |
| "loss": 0.7075, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.30208, |
| "grad_norm": 2.309566711188816, |
| "learning_rate": 8e-05, |
| "loss": 0.6845, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.3072, |
| "grad_norm": 3.0240560677976482, |
| "learning_rate": 7.999928656081034e-05, |
| "loss": 0.7071, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.31232, |
| "grad_norm": 2.1478418033816076, |
| "learning_rate": 7.999714626869112e-05, |
| "loss": 0.6906, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.31744, |
| "grad_norm": 3.179230462128308, |
| "learning_rate": 7.999357919999074e-05, |
| "loss": 0.7038, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.32256, |
| "grad_norm": 2.4903190109477693, |
| "learning_rate": 7.998858548195353e-05, |
| "loss": 0.7036, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.32768, |
| "grad_norm": 2.5167791307656717, |
| "learning_rate": 7.998216529271523e-05, |
| "loss": 0.7022, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.3328, |
| "grad_norm": 2.1782283151414723, |
| "learning_rate": 7.997431886129654e-05, |
| "loss": 0.7056, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.33792, |
| "grad_norm": 2.4061240485846707, |
| "learning_rate": 7.996504646759507e-05, |
| "loss": 0.7033, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.34304, |
| "grad_norm": 1.8094645315702094, |
| "learning_rate": 7.995434844237524e-05, |
| "loss": 0.703, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.34816, |
| "grad_norm": 2.3445365799928384, |
| "learning_rate": 7.994222516725659e-05, |
| "loss": 0.6953, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.35328, |
| "grad_norm": 1.99410664221794, |
| "learning_rate": 7.99286770747001e-05, |
| "loss": 0.6919, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.3584, |
| "grad_norm": 2.230153791562052, |
| "learning_rate": 7.991370464799278e-05, |
| "loss": 0.6862, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.36352, |
| "grad_norm": 1.9601599085514845, |
| "learning_rate": 7.989730842123042e-05, |
| "loss": 0.7128, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.36864, |
| "grad_norm": 1.5761765132400967, |
| "learning_rate": 7.987948897929855e-05, |
| "loss": 0.6745, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.37376, |
| "grad_norm": 1.679276038030434, |
| "learning_rate": 7.986024695785161e-05, |
| "loss": 0.6796, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.37888, |
| "grad_norm": 1.6649601965609995, |
| "learning_rate": 7.98395830432902e-05, |
| "loss": 0.6745, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 1.4057964596604813, |
| "learning_rate": 7.981749797273661e-05, |
| "loss": 0.6979, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.38912, |
| "grad_norm": 2.014531329083667, |
| "learning_rate": 7.979399253400862e-05, |
| "loss": 0.7037, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.39424, |
| "grad_norm": 1.7428321466528032, |
| "learning_rate": 7.976906756559127e-05, |
| "loss": 0.6832, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.39936, |
| "grad_norm": 2.167027629573797, |
| "learning_rate": 7.974272395660703e-05, |
| "loss": 0.6879, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.40448, |
| "grad_norm": 1.6144350155950205, |
| "learning_rate": 7.971496264678404e-05, |
| "loss": 0.6877, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.4096, |
| "grad_norm": 1.5708068654881047, |
| "learning_rate": 7.968578462642265e-05, |
| "loss": 0.6989, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.41472, |
| "grad_norm": 1.555445454516115, |
| "learning_rate": 7.965519093636e-05, |
| "loss": 0.6992, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.41984, |
| "grad_norm": 1.8059552941628938, |
| "learning_rate": 7.962318266793294e-05, |
| "loss": 0.6912, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.42496, |
| "grad_norm": 1.0063199261184264, |
| "learning_rate": 7.958976096293916e-05, |
| "loss": 0.6929, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.43008, |
| "grad_norm": 1.7307326550028748, |
| "learning_rate": 7.955492701359636e-05, |
| "loss": 0.6902, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.4352, |
| "grad_norm": 1.205147537826931, |
| "learning_rate": 7.951868206249975e-05, |
| "loss": 0.6943, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.44032, |
| "grad_norm": 1.4160221907669925, |
| "learning_rate": 7.948102740257776e-05, |
| "loss": 0.6976, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.44544, |
| "grad_norm": 2.001829250494979, |
| "learning_rate": 7.944196437704593e-05, |
| "loss": 0.7038, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.45056, |
| "grad_norm": 1.1653512775535024, |
| "learning_rate": 7.940149437935887e-05, |
| "loss": 0.6849, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.45568, |
| "grad_norm": 1.6823929061601692, |
| "learning_rate": 7.935961885316074e-05, |
| "loss": 0.6925, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.4608, |
| "grad_norm": 1.533381264168228, |
| "learning_rate": 7.93163392922336e-05, |
| "loss": 0.7078, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.46592, |
| "grad_norm": 1.3854602682735884, |
| "learning_rate": 7.927165724044418e-05, |
| "loss": 0.6905, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.47104, |
| "grad_norm": 1.6672169257948595, |
| "learning_rate": 7.922557429168884e-05, |
| "loss": 0.6868, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.47616, |
| "grad_norm": 1.3374855328007862, |
| "learning_rate": 7.917809208983667e-05, |
| "loss": 0.6872, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.48128, |
| "grad_norm": 1.1062013903355805, |
| "learning_rate": 7.912921232867082e-05, |
| "loss": 0.6829, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.4864, |
| "grad_norm": 1.2577822175567075, |
| "learning_rate": 7.907893675182816e-05, |
| "loss": 0.6851, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.49152, |
| "grad_norm": 1.1436105339986973, |
| "learning_rate": 7.902726715273704e-05, |
| "loss": 0.6801, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.49664, |
| "grad_norm": 1.3288774009958406, |
| "learning_rate": 7.89742053745533e-05, |
| "loss": 0.7013, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.50176, |
| "grad_norm": 1.2793250945243777, |
| "learning_rate": 7.891975331009454e-05, |
| "loss": 0.6742, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.50688, |
| "grad_norm": 1.3915794984714676, |
| "learning_rate": 7.886391290177259e-05, |
| "loss": 0.692, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.512, |
| "grad_norm": 1.683470719946437, |
| "learning_rate": 7.880668614152426e-05, |
| "loss": 0.6903, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.51712, |
| "grad_norm": 1.1736596909100243, |
| "learning_rate": 7.874807507074019e-05, |
| "loss": 0.6769, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.52224, |
| "grad_norm": 1.7187295695085814, |
| "learning_rate": 7.868808178019215e-05, |
| "loss": 0.6904, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.52736, |
| "grad_norm": 0.9916974084040032, |
| "learning_rate": 7.862670840995836e-05, |
| "loss": 0.6788, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.53248, |
| "grad_norm": 1.7935336303746816, |
| "learning_rate": 7.856395714934718e-05, |
| "loss": 0.6952, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.5376, |
| "grad_norm": 1.4946062420453348, |
| "learning_rate": 7.849983023681905e-05, |
| "loss": 0.6853, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.54272, |
| "grad_norm": 1.245185249022715, |
| "learning_rate": 7.84343299599066e-05, |
| "loss": 0.6729, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.54784, |
| "grad_norm": 2.0094790105419738, |
| "learning_rate": 7.836745865513304e-05, |
| "loss": 0.682, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.55296, |
| "grad_norm": 1.065814539722233, |
| "learning_rate": 7.829921870792886e-05, |
| "loss": 0.6764, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.55808, |
| "grad_norm": 1.9579572631250968, |
| "learning_rate": 7.822961255254669e-05, |
| "loss": 0.6756, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.5632, |
| "grad_norm": 1.52170370675673, |
| "learning_rate": 7.815864267197448e-05, |
| "loss": 0.6816, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.56832, |
| "grad_norm": 1.704295073143993, |
| "learning_rate": 7.808631159784693e-05, |
| "loss": 0.6807, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.57344, |
| "grad_norm": 1.0971860063652723, |
| "learning_rate": 7.801262191035518e-05, |
| "loss": 0.6775, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.57856, |
| "grad_norm": 1.2658364692181785, |
| "learning_rate": 7.79375762381548e-05, |
| "loss": 0.6677, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.58368, |
| "grad_norm": 1.1136078326808156, |
| "learning_rate": 7.786117725827195e-05, |
| "loss": 0.6706, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.5888, |
| "grad_norm": 1.2432841023033496, |
| "learning_rate": 7.778342769600796e-05, |
| "loss": 0.6604, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.59392, |
| "grad_norm": 0.9447874229879114, |
| "learning_rate": 7.770433032484204e-05, |
| "loss": 0.6674, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.59904, |
| "grad_norm": 1.1728862828599786, |
| "learning_rate": 7.762388796633243e-05, |
| "loss": 0.6743, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.60416, |
| "grad_norm": 1.0379645631823264, |
| "learning_rate": 7.754210349001568e-05, |
| "loss": 0.6746, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.60928, |
| "grad_norm": 0.6784319178622393, |
| "learning_rate": 7.74589798133043e-05, |
| "loss": 0.6613, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.6144, |
| "grad_norm": 1.2334508195450562, |
| "learning_rate": 7.737451990138275e-05, |
| "loss": 0.6832, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.61952, |
| "grad_norm": 1.390023864901241, |
| "learning_rate": 7.728872676710155e-05, |
| "loss": 0.667, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.62464, |
| "grad_norm": 1.0288050461695384, |
| "learning_rate": 7.720160347086994e-05, |
| "loss": 0.6823, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.62976, |
| "grad_norm": 1.144163750515486, |
| "learning_rate": 7.711315312054659e-05, |
| "loss": 0.6692, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.63488, |
| "grad_norm": 0.9460143556927034, |
| "learning_rate": 7.702337887132883e-05, |
| "loss": 0.6608, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 1.617773004869984, |
| "learning_rate": 7.693228392564003e-05, |
| "loss": 0.6912, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.64512, |
| "grad_norm": 0.8796030638057838, |
| "learning_rate": 7.68398715330154e-05, |
| "loss": 0.6545, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.65024, |
| "grad_norm": 1.4530036523303451, |
| "learning_rate": 7.674614498998608e-05, |
| "loss": 0.667, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.65536, |
| "grad_norm": 1.6001652553788648, |
| "learning_rate": 7.66511076399615e-05, |
| "loss": 0.686, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.66048, |
| "grad_norm": 0.9295070052125337, |
| "learning_rate": 7.655476287311017e-05, |
| "loss": 0.6726, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.6656, |
| "grad_norm": 1.981385007424261, |
| "learning_rate": 7.645711412623871e-05, |
| "loss": 0.6955, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.67072, |
| "grad_norm": 1.1198337635879976, |
| "learning_rate": 7.635816488266925e-05, |
| "loss": 0.6747, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.67584, |
| "grad_norm": 1.8403136092865524, |
| "learning_rate": 7.62579186721152e-05, |
| "loss": 0.6817, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.68096, |
| "grad_norm": 1.211708500347191, |
| "learning_rate": 7.615637907055533e-05, |
| "loss": 0.6673, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.68608, |
| "grad_norm": 1.619702658720517, |
| "learning_rate": 7.60535497001062e-05, |
| "loss": 0.6659, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.6912, |
| "grad_norm": 1.3880946828869691, |
| "learning_rate": 7.594943422889293e-05, |
| "loss": 0.6876, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.69632, |
| "grad_norm": 1.3898444786546138, |
| "learning_rate": 7.584403637091839e-05, |
| "loss": 0.6697, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.70144, |
| "grad_norm": 1.1550848443588233, |
| "learning_rate": 7.57373598859307e-05, |
| "loss": 0.6708, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.70656, |
| "grad_norm": 1.2208384976092193, |
| "learning_rate": 7.562940857928914e-05, |
| "loss": 0.6547, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.71168, |
| "grad_norm": 0.7766104942706619, |
| "learning_rate": 7.552018630182829e-05, |
| "loss": 0.6548, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.7168, |
| "grad_norm": 1.2980119993455839, |
| "learning_rate": 7.540969694972086e-05, |
| "loss": 0.6642, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.72192, |
| "grad_norm": 1.1032988591515491, |
| "learning_rate": 7.529794446433853e-05, |
| "loss": 0.6567, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.72704, |
| "grad_norm": 1.0687712643010066, |
| "learning_rate": 7.51849328321114e-05, |
| "loss": 0.659, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.73216, |
| "grad_norm": 0.9993909705876705, |
| "learning_rate": 7.507066608438587e-05, |
| "loss": 0.6653, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.73728, |
| "grad_norm": 1.4634026159707303, |
| "learning_rate": 7.495514829728072e-05, |
| "loss": 0.6633, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.7424, |
| "grad_norm": 0.9096724763021085, |
| "learning_rate": 7.483838359154175e-05, |
| "loss": 0.6735, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.74752, |
| "grad_norm": 1.359529554918389, |
| "learning_rate": 7.472037613239487e-05, |
| "loss": 0.6678, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.75264, |
| "grad_norm": 0.989415493913379, |
| "learning_rate": 7.460113012939733e-05, |
| "loss": 0.6637, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.75776, |
| "grad_norm": 1.3223608862498835, |
| "learning_rate": 7.448064983628775e-05, |
| "loss": 0.6645, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.76288, |
| "grad_norm": 1.1620124411680315, |
| "learning_rate": 7.435893955083422e-05, |
| "loss": 0.6513, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 1.0648774531623415, |
| "learning_rate": 7.423600361468117e-05, |
| "loss": 0.662, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.77312, |
| "grad_norm": 1.3007725912379944, |
| "learning_rate": 7.411184641319429e-05, |
| "loss": 0.6631, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.77824, |
| "grad_norm": 0.8104819966143042, |
| "learning_rate": 7.398647237530426e-05, |
| "loss": 0.64, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.78336, |
| "grad_norm": 1.0085673778184652, |
| "learning_rate": 7.385988597334867e-05, |
| "loss": 0.6574, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.78848, |
| "grad_norm": 1.2945419726019807, |
| "learning_rate": 7.373209172291253e-05, |
| "loss": 0.6475, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.7936, |
| "grad_norm": 1.0304441022051967, |
| "learning_rate": 7.360309418266715e-05, |
| "loss": 0.6517, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.79872, |
| "grad_norm": 0.9586021230391207, |
| "learning_rate": 7.347289795420759e-05, |
| "loss": 0.6582, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.80384, |
| "grad_norm": 1.1290408208864031, |
| "learning_rate": 7.334150768188839e-05, |
| "loss": 0.6609, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.80896, |
| "grad_norm": 1.1884226665875801, |
| "learning_rate": 7.320892805265807e-05, |
| "loss": 0.6699, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.81408, |
| "grad_norm": 0.8502814476612911, |
| "learning_rate": 7.307516379589177e-05, |
| "loss": 0.6493, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.8192, |
| "grad_norm": 0.9952888696907723, |
| "learning_rate": 7.294021968322261e-05, |
| "loss": 0.6484, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.82432, |
| "grad_norm": 1.2290779564393681, |
| "learning_rate": 7.280410052837156e-05, |
| "loss": 0.6582, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.82944, |
| "grad_norm": 0.8035227492015825, |
| "learning_rate": 7.266681118697555e-05, |
| "loss": 0.6699, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.83456, |
| "grad_norm": 1.1834030000315219, |
| "learning_rate": 7.252835655641445e-05, |
| "loss": 0.6625, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.83968, |
| "grad_norm": 0.8159782777745277, |
| "learning_rate": 7.23887415756362e-05, |
| "loss": 0.6474, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.8448, |
| "grad_norm": 1.5925994868655784, |
| "learning_rate": 7.224797122498074e-05, |
| "loss": 0.6568, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.84992, |
| "grad_norm": 0.7327801301614796, |
| "learning_rate": 7.210605052600233e-05, |
| "loss": 0.6535, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.85504, |
| "grad_norm": 1.3897449798624986, |
| "learning_rate": 7.196298454129037e-05, |
| "loss": 0.6586, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.86016, |
| "grad_norm": 0.9296756247599517, |
| "learning_rate": 7.18187783742889e-05, |
| "loss": 0.6493, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.86528, |
| "grad_norm": 1.4033864221213181, |
| "learning_rate": 7.167343716911446e-05, |
| "loss": 0.6535, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.8704, |
| "grad_norm": 1.2953910254634462, |
| "learning_rate": 7.15269661103726e-05, |
| "loss": 0.6522, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.87552, |
| "grad_norm": 0.7630866652770001, |
| "learning_rate": 7.137937042297303e-05, |
| "loss": 0.6533, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.88064, |
| "grad_norm": 1.6581576394049042, |
| "learning_rate": 7.123065537194311e-05, |
| "loss": 0.6466, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.88576, |
| "grad_norm": 0.894168267427839, |
| "learning_rate": 7.108082626224014e-05, |
| "loss": 0.6749, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.89088, |
| "grad_norm": 1.2060587657486854, |
| "learning_rate": 7.092988843856202e-05, |
| "loss": 0.6644, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.896, |
| "grad_norm": 0.9959417990440771, |
| "learning_rate": 7.077784728515668e-05, |
| "loss": 0.6578, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.90112, |
| "grad_norm": 1.0821074322876068, |
| "learning_rate": 7.062470822563003e-05, |
| "loss": 0.6543, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.90624, |
| "grad_norm": 0.7740564812790837, |
| "learning_rate": 7.047047672275235e-05, |
| "loss": 0.6595, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.91136, |
| "grad_norm": 0.7885588166965639, |
| "learning_rate": 7.031515827826356e-05, |
| "loss": 0.6677, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.91648, |
| "grad_norm": 0.8964594506190875, |
| "learning_rate": 7.015875843267696e-05, |
| "loss": 0.668, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.9216, |
| "grad_norm": 0.7922996726063177, |
| "learning_rate": 7.000128276508146e-05, |
| "loss": 0.6613, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.92672, |
| "grad_norm": 0.7656175088558786, |
| "learning_rate": 6.984273689294272e-05, |
| "loss": 0.6474, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.93184, |
| "grad_norm": 0.9214614344976504, |
| "learning_rate": 6.968312647190267e-05, |
| "loss": 0.6326, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.93696, |
| "grad_norm": 1.2164859593883526, |
| "learning_rate": 6.952245719557777e-05, |
| "loss": 0.6465, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.94208, |
| "grad_norm": 0.6778217095702366, |
| "learning_rate": 6.936073479535592e-05, |
| "loss": 0.657, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.9472, |
| "grad_norm": 0.8101497456748098, |
| "learning_rate": 6.919796504019208e-05, |
| "loss": 0.6762, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.95232, |
| "grad_norm": 0.8876944556782685, |
| "learning_rate": 6.903415373640231e-05, |
| "loss": 0.6475, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.95744, |
| "grad_norm": 1.0000115870935409, |
| "learning_rate": 6.886930672745684e-05, |
| "loss": 0.6561, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.96256, |
| "grad_norm": 1.0242604450962167, |
| "learning_rate": 6.870342989377146e-05, |
| "loss": 0.6434, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.96768, |
| "grad_norm": 0.9780023684498255, |
| "learning_rate": 6.853652915249786e-05, |
| "loss": 0.6545, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.9728, |
| "grad_norm": 0.9266467661951784, |
| "learning_rate": 6.836861045731254e-05, |
| "loss": 0.6501, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.97792, |
| "grad_norm": 0.9727642879091534, |
| "learning_rate": 6.819967979820441e-05, |
| "loss": 0.6457, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.98304, |
| "grad_norm": 0.6002892901190532, |
| "learning_rate": 6.802974320126105e-05, |
| "loss": 0.6535, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.98816, |
| "grad_norm": 0.7042555907925779, |
| "learning_rate": 6.785880672845391e-05, |
| "loss": 0.6497, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.99328, |
| "grad_norm": 1.0659679020014914, |
| "learning_rate": 6.768687647742188e-05, |
| "loss": 0.6594, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.9984, |
| "grad_norm": 0.919967416151866, |
| "learning_rate": 6.751395858125393e-05, |
| "loss": 0.6454, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.00352, |
| "grad_norm": 0.832989636341993, |
| "learning_rate": 6.734005920827023e-05, |
| "loss": 0.6229, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.00864, |
| "grad_norm": 0.9268003420050892, |
| "learning_rate": 6.716518456180219e-05, |
| "loss": 0.6067, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.01376, |
| "grad_norm": 0.9264505307476663, |
| "learning_rate": 6.69893408799711e-05, |
| "loss": 0.594, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.01888, |
| "grad_norm": 0.992069998033644, |
| "learning_rate": 6.681253443546566e-05, |
| "loss": 0.6027, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.024, |
| "grad_norm": 0.9820410181623107, |
| "learning_rate": 6.663477153531819e-05, |
| "loss": 0.6091, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.02912, |
| "grad_norm": 0.7956602066339694, |
| "learning_rate": 6.645605852067966e-05, |
| "loss": 0.6016, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.03424, |
| "grad_norm": 1.1030274948771646, |
| "learning_rate": 6.627640176659349e-05, |
| "loss": 0.6128, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.03936, |
| "grad_norm": 0.8027746768903161, |
| "learning_rate": 6.609580768176813e-05, |
| "loss": 0.6202, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.04448, |
| "grad_norm": 0.6182011917758258, |
| "learning_rate": 6.591428270834848e-05, |
| "loss": 0.6197, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.0496, |
| "grad_norm": 0.6933362237087946, |
| "learning_rate": 6.573183332168599e-05, |
| "loss": 0.5973, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.05472, |
| "grad_norm": 0.7261413202596659, |
| "learning_rate": 6.554846603010783e-05, |
| "loss": 0.6033, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.05984, |
| "grad_norm": 0.8175867268516304, |
| "learning_rate": 6.536418737468458e-05, |
| "loss": 0.5962, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.06496, |
| "grad_norm": 0.8967324447987707, |
| "learning_rate": 6.517900392899696e-05, |
| "loss": 0.6138, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.07008, |
| "grad_norm": 0.8644025442427714, |
| "learning_rate": 6.499292229890136e-05, |
| "loss": 0.6143, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.0752, |
| "grad_norm": 0.8340587352046295, |
| "learning_rate": 6.480594912229415e-05, |
| "loss": 0.5932, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.08032, |
| "grad_norm": 0.7992015971845043, |
| "learning_rate": 6.46180910688749e-05, |
| "loss": 0.5946, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.08544, |
| "grad_norm": 0.7883742168020988, |
| "learning_rate": 6.442935483990848e-05, |
| "loss": 0.5912, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.09056, |
| "grad_norm": 0.7062624710725375, |
| "learning_rate": 6.4239747167986e-05, |
| "loss": 0.6006, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.09568, |
| "grad_norm": 0.4879826112399788, |
| "learning_rate": 6.404927481678465e-05, |
| "loss": 0.5908, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.1008, |
| "grad_norm": 0.46707410887787343, |
| "learning_rate": 6.385794458082645e-05, |
| "loss": 0.6016, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.10592, |
| "grad_norm": 0.46215479735805276, |
| "learning_rate": 6.366576328523579e-05, |
| "loss": 0.6059, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.11104, |
| "grad_norm": 0.4872892344668909, |
| "learning_rate": 6.347273778549609e-05, |
| "loss": 0.5979, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.11616, |
| "grad_norm": 0.4117850505120705, |
| "learning_rate": 6.327887496720513e-05, |
| "loss": 0.6042, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.12128, |
| "grad_norm": 0.6094192942803528, |
| "learning_rate": 6.308418174582952e-05, |
| "loss": 0.6098, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.1264, |
| "grad_norm": 0.47370404685867984, |
| "learning_rate": 6.288866506645799e-05, |
| "loss": 0.5967, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.13152, |
| "grad_norm": 0.5812822920441131, |
| "learning_rate": 6.269233190355355e-05, |
| "loss": 0.589, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.13664, |
| "grad_norm": 0.581689390915006, |
| "learning_rate": 6.249518926070488e-05, |
| "loss": 0.6033, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.14176, |
| "grad_norm": 0.7708083440150924, |
| "learning_rate": 6.229724417037632e-05, |
| "loss": 0.6008, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.14688, |
| "grad_norm": 0.9437745661416408, |
| "learning_rate": 6.209850369365714e-05, |
| "loss": 0.5872, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.152, |
| "grad_norm": 1.0285174187897348, |
| "learning_rate": 6.189897492000953e-05, |
| "loss": 0.6034, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.15712, |
| "grad_norm": 0.9256911501210775, |
| "learning_rate": 6.169866496701586e-05, |
| "loss": 0.6065, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.16224, |
| "grad_norm": 0.8931346889464913, |
| "learning_rate": 6.149758098012464e-05, |
| "loss": 0.6162, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.16736, |
| "grad_norm": 0.906249956782627, |
| "learning_rate": 6.129573013239571e-05, |
| "loss": 0.609, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.17248, |
| "grad_norm": 0.9031799418470178, |
| "learning_rate": 6.109311962424433e-05, |
| "loss": 0.6043, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.1776, |
| "grad_norm": 0.7749991050050256, |
| "learning_rate": 6.0889756683184324e-05, |
| "loss": 0.609, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.18272, |
| "grad_norm": 0.6113291239741755, |
| "learning_rate": 6.068564856357031e-05, |
| "loss": 0.6041, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.18784, |
| "grad_norm": 0.4610907845132779, |
| "learning_rate": 6.048080254633885e-05, |
| "loss": 0.6032, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.19296, |
| "grad_norm": 0.7535273976467541, |
| "learning_rate": 6.0275225938748764e-05, |
| "loss": 0.5856, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.19808, |
| "grad_norm": 0.7357293529763054, |
| "learning_rate": 6.006892607412047e-05, |
| "loss": 0.5871, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.2032, |
| "grad_norm": 0.7850706730871837, |
| "learning_rate": 5.98619103115744e-05, |
| "loss": 0.6022, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.20832, |
| "grad_norm": 0.9520874763788266, |
| "learning_rate": 5.965418603576842e-05, |
| "loss": 0.6029, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.21344, |
| "grad_norm": 0.6234215842388057, |
| "learning_rate": 5.944576065663449e-05, |
| "loss": 0.5989, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.21856, |
| "grad_norm": 0.5581618761087828, |
| "learning_rate": 5.9236641609114295e-05, |
| "loss": 0.5852, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.2236799999999999, |
| "grad_norm": 0.8280934815331484, |
| "learning_rate": 5.902683635289403e-05, |
| "loss": 0.6065, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.2288000000000001, |
| "grad_norm": 0.8871893394061044, |
| "learning_rate": 5.881635237213829e-05, |
| "loss": 0.5901, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.23392, |
| "grad_norm": 0.6767238651749208, |
| "learning_rate": 5.8605197175223095e-05, |
| "loss": 0.6073, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.23904, |
| "grad_norm": 0.688896399179518, |
| "learning_rate": 5.839337829446809e-05, |
| "loss": 0.6049, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.24416, |
| "grad_norm": 0.5804470630643502, |
| "learning_rate": 5.8180903285867804e-05, |
| "loss": 0.6036, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.24928, |
| "grad_norm": 0.6457016614930509, |
| "learning_rate": 5.7967779728822134e-05, |
| "loss": 0.5981, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.2544, |
| "grad_norm": 0.8414381704157707, |
| "learning_rate": 5.775401522586596e-05, |
| "loss": 0.6109, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.25952, |
| "grad_norm": 0.8374711125166566, |
| "learning_rate": 5.753961740239799e-05, |
| "loss": 0.6005, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.26464, |
| "grad_norm": 0.5542646630500834, |
| "learning_rate": 5.732459390640868e-05, |
| "loss": 0.6103, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.26976, |
| "grad_norm": 0.5875945873220082, |
| "learning_rate": 5.7108952408207485e-05, |
| "loss": 0.6128, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.27488, |
| "grad_norm": 0.5087534889785712, |
| "learning_rate": 5.689270060014919e-05, |
| "loss": 0.5958, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.28, |
| "grad_norm": 0.4031024078366132, |
| "learning_rate": 5.66758461963595e-05, |
| "loss": 0.589, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.28512, |
| "grad_norm": 0.4918002116220245, |
| "learning_rate": 5.645839693245995e-05, |
| "loss": 0.5982, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.29024, |
| "grad_norm": 0.48513251035233335, |
| "learning_rate": 5.6240360565291874e-05, |
| "loss": 0.5981, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.29536, |
| "grad_norm": 0.41091372229331263, |
| "learning_rate": 5.602174487263971e-05, |
| "loss": 0.5902, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.30048, |
| "grad_norm": 0.5989492893332392, |
| "learning_rate": 5.580255765295358e-05, |
| "loss": 0.6129, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.3056, |
| "grad_norm": 0.5776722852203174, |
| "learning_rate": 5.5582806725071135e-05, |
| "loss": 0.606, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.3107199999999999, |
| "grad_norm": 0.4466289358616809, |
| "learning_rate": 5.536249992793856e-05, |
| "loss": 0.5992, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.3158400000000001, |
| "grad_norm": 0.4281414323403905, |
| "learning_rate": 5.514164512033099e-05, |
| "loss": 0.5865, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.32096, |
| "grad_norm": 0.3767339434891749, |
| "learning_rate": 5.4920250180572164e-05, |
| "loss": 0.6049, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.32608, |
| "grad_norm": 0.4541584747628375, |
| "learning_rate": 5.469832300625343e-05, |
| "loss": 0.5891, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.3312, |
| "grad_norm": 0.5852329624714224, |
| "learning_rate": 5.447587151395193e-05, |
| "loss": 0.6013, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.33632, |
| "grad_norm": 0.5470836134042647, |
| "learning_rate": 5.4252903638948304e-05, |
| "loss": 0.5871, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.34144, |
| "grad_norm": 0.3981165895490368, |
| "learning_rate": 5.402942733494355e-05, |
| "loss": 0.5988, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.34656, |
| "grad_norm": 0.8090553651844287, |
| "learning_rate": 5.380545057377533e-05, |
| "loss": 0.5919, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.35168, |
| "grad_norm": 0.6973495560228751, |
| "learning_rate": 5.358098134513359e-05, |
| "loss": 0.6099, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.3568, |
| "grad_norm": 0.4439863252173243, |
| "learning_rate": 5.335602765627556e-05, |
| "loss": 0.6112, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.36192, |
| "grad_norm": 0.45728264462579415, |
| "learning_rate": 5.313059753174011e-05, |
| "loss": 0.5999, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.36704, |
| "grad_norm": 0.5814907075742051, |
| "learning_rate": 5.2904699013061525e-05, |
| "loss": 0.5996, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.37216, |
| "grad_norm": 0.5358642327581291, |
| "learning_rate": 5.2678340158482605e-05, |
| "loss": 0.6085, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.37728, |
| "grad_norm": 0.5406775247232574, |
| "learning_rate": 5.245152904266722e-05, |
| "loss": 0.6234, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.3824, |
| "grad_norm": 0.7178735700752464, |
| "learning_rate": 5.2224273756412335e-05, |
| "loss": 0.6273, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.3875199999999999, |
| "grad_norm": 0.7039970725546744, |
| "learning_rate": 5.1996582406359286e-05, |
| "loss": 0.5899, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.39264, |
| "grad_norm": 0.5122004385795155, |
| "learning_rate": 5.17684631147047e-05, |
| "loss": 0.5772, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.39776, |
| "grad_norm": 0.5292102951542996, |
| "learning_rate": 5.1539924018910705e-05, |
| "loss": 0.5963, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.4028800000000001, |
| "grad_norm": 0.5393146339840262, |
| "learning_rate": 5.131097327141467e-05, |
| "loss": 0.6078, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.408, |
| "grad_norm": 0.708939654981038, |
| "learning_rate": 5.108161903933837e-05, |
| "loss": 0.6005, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.41312, |
| "grad_norm": 0.7037903511384231, |
| "learning_rate": 5.085186950419671e-05, |
| "loss": 0.5942, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.41824, |
| "grad_norm": 0.4165090417742424, |
| "learning_rate": 5.0621732861605754e-05, |
| "loss": 0.5906, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.42336, |
| "grad_norm": 0.6642875595643233, |
| "learning_rate": 5.039121732099052e-05, |
| "loss": 0.5891, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.42848, |
| "grad_norm": 0.7547032791125439, |
| "learning_rate": 5.0160331105292036e-05, |
| "loss": 0.6008, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.4336, |
| "grad_norm": 0.733986554958704, |
| "learning_rate": 4.992908245067403e-05, |
| "loss": 0.5915, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.43872, |
| "grad_norm": 0.47614304475036945, |
| "learning_rate": 4.969747960622912e-05, |
| "loss": 0.6011, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.44384, |
| "grad_norm": 0.585876239848498, |
| "learning_rate": 4.9465530833684614e-05, |
| "loss": 0.5912, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.44896, |
| "grad_norm": 0.6945418578556748, |
| "learning_rate": 4.923324440710773e-05, |
| "loss": 0.6106, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.45408, |
| "grad_norm": 0.6261522164548919, |
| "learning_rate": 4.900062861261044e-05, |
| "loss": 0.5941, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.4592, |
| "grad_norm": 0.5342894182102067, |
| "learning_rate": 4.8767691748053955e-05, |
| "loss": 0.5922, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.46432, |
| "grad_norm": 0.5838783944360918, |
| "learning_rate": 4.8534442122752674e-05, |
| "loss": 0.6075, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.46944, |
| "grad_norm": 0.6486938746583781, |
| "learning_rate": 4.830088805717779e-05, |
| "loss": 0.5954, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.4745599999999999, |
| "grad_norm": 0.5719218459201613, |
| "learning_rate": 4.806703788266045e-05, |
| "loss": 0.6018, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.47968, |
| "grad_norm": 0.5120146948133751, |
| "learning_rate": 4.783289994109459e-05, |
| "loss": 0.5912, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.4848, |
| "grad_norm": 0.6804527806275119, |
| "learning_rate": 4.759848258463942e-05, |
| "loss": 0.5896, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.4899200000000001, |
| "grad_norm": 0.5487294767485589, |
| "learning_rate": 4.736379417542134e-05, |
| "loss": 0.5882, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.49504, |
| "grad_norm": 0.4853965765407641, |
| "learning_rate": 4.712884308523579e-05, |
| "loss": 0.5888, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.5001600000000002, |
| "grad_norm": 0.586848411445172, |
| "learning_rate": 4.689363769524854e-05, |
| "loss": 0.5959, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.50528, |
| "grad_norm": 0.49095884598520845, |
| "learning_rate": 4.6658186395696735e-05, |
| "loss": 0.6054, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.5104, |
| "grad_norm": 0.5564665878511703, |
| "learning_rate": 4.64224975855896e-05, |
| "loss": 0.6059, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.51552, |
| "grad_norm": 0.46245666700892735, |
| "learning_rate": 4.618657967240879e-05, |
| "loss": 0.5891, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.52064, |
| "grad_norm": 0.533299845495822, |
| "learning_rate": 4.595044107180857e-05, |
| "loss": 0.6044, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.52576, |
| "grad_norm": 0.42525913312635544, |
| "learning_rate": 4.571409020731554e-05, |
| "loss": 0.5869, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.53088, |
| "grad_norm": 0.5215704284954067, |
| "learning_rate": 4.547753551002814e-05, |
| "loss": 0.5923, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.536, |
| "grad_norm": 0.47973640340231194, |
| "learning_rate": 4.524078541831597e-05, |
| "loss": 0.5891, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.54112, |
| "grad_norm": 0.4327598342601823, |
| "learning_rate": 4.500384837751868e-05, |
| "loss": 0.5958, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.54624, |
| "grad_norm": 0.5990121963555108, |
| "learning_rate": 4.476673283964481e-05, |
| "loss": 0.5856, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.5513599999999999, |
| "grad_norm": 0.7131944725290759, |
| "learning_rate": 4.4529447263070215e-05, |
| "loss": 0.5942, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.55648, |
| "grad_norm": 0.5329359342241601, |
| "learning_rate": 4.429200011223638e-05, |
| "loss": 0.597, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.5615999999999999, |
| "grad_norm": 0.4477137886623582, |
| "learning_rate": 4.4054399857348426e-05, |
| "loss": 0.5905, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.5667200000000001, |
| "grad_norm": 0.7235716501912173, |
| "learning_rate": 4.381665497407304e-05, |
| "loss": 0.6044, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.57184, |
| "grad_norm": 0.5656431568517662, |
| "learning_rate": 4.3578773943236066e-05, |
| "loss": 0.5935, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.5769600000000001, |
| "grad_norm": 0.5443076040573943, |
| "learning_rate": 4.334076525051998e-05, |
| "loss": 0.6085, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.58208, |
| "grad_norm": 0.44142667616814085, |
| "learning_rate": 4.3102637386161214e-05, |
| "loss": 0.5974, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.5872000000000002, |
| "grad_norm": 0.4881724554496324, |
| "learning_rate": 4.2864398844647333e-05, |
| "loss": 0.5982, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.59232, |
| "grad_norm": 0.48224923855420715, |
| "learning_rate": 4.2626058124413934e-05, |
| "loss": 0.5957, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.59744, |
| "grad_norm": 0.5087339085522424, |
| "learning_rate": 4.23876237275415e-05, |
| "loss": 0.5919, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.60256, |
| "grad_norm": 0.4533960557571388, |
| "learning_rate": 4.2149104159452194e-05, |
| "loss": 0.6004, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.60768, |
| "grad_norm": 0.511203081310295, |
| "learning_rate": 4.191050792860641e-05, |
| "loss": 0.5994, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.6128, |
| "grad_norm": 0.4223027905990951, |
| "learning_rate": 4.1671843546199195e-05, |
| "loss": 0.5887, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.61792, |
| "grad_norm": 0.4494541281616636, |
| "learning_rate": 4.143311952585673e-05, |
| "loss": 0.5775, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.62304, |
| "grad_norm": 0.49316734261443734, |
| "learning_rate": 4.119434438333261e-05, |
| "loss": 0.5937, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.62816, |
| "grad_norm": 0.36392011844676875, |
| "learning_rate": 4.095552663620403e-05, |
| "loss": 0.5962, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.63328, |
| "grad_norm": 0.4968107231897248, |
| "learning_rate": 4.0716674803568005e-05, |
| "loss": 0.5919, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.6383999999999999, |
| "grad_norm": 0.37525246925036004, |
| "learning_rate": 4.0477797405737414e-05, |
| "loss": 0.5969, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.64352, |
| "grad_norm": 0.5908101591078128, |
| "learning_rate": 4.023890296393714e-05, |
| "loss": 0.6099, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.6486399999999999, |
| "grad_norm": 0.3984607947668425, |
| "learning_rate": 4e-05, |
| "loss": 0.6004, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.6537600000000001, |
| "grad_norm": 0.5149940417738129, |
| "learning_rate": 3.976109703606288e-05, |
| "loss": 0.6006, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.65888, |
| "grad_norm": 0.4005964486620091, |
| "learning_rate": 3.952220259426259e-05, |
| "loss": 0.5803, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.6640000000000001, |
| "grad_norm": 0.35086767512207107, |
| "learning_rate": 3.928332519643201e-05, |
| "loss": 0.585, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.66912, |
| "grad_norm": 0.42885330848275394, |
| "learning_rate": 3.9044473363796e-05, |
| "loss": 0.5958, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.67424, |
| "grad_norm": 0.38751578060305375, |
| "learning_rate": 3.8805655616667406e-05, |
| "loss": 0.6014, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.67936, |
| "grad_norm": 0.3603759776041056, |
| "learning_rate": 3.8566880474143284e-05, |
| "loss": 0.6001, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.68448, |
| "grad_norm": 0.4283886804732866, |
| "learning_rate": 3.832815645380083e-05, |
| "loss": 0.5999, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.6896, |
| "grad_norm": 0.3337168354335494, |
| "learning_rate": 3.80894920713936e-05, |
| "loss": 0.5918, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.69472, |
| "grad_norm": 0.383147123590219, |
| "learning_rate": 3.785089584054781e-05, |
| "loss": 0.604, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.69984, |
| "grad_norm": 0.39090517742398523, |
| "learning_rate": 3.761237627245851e-05, |
| "loss": 0.5784, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.70496, |
| "grad_norm": 0.41160894437025963, |
| "learning_rate": 3.737394187558608e-05, |
| "loss": 0.5899, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.71008, |
| "grad_norm": 0.4348994266327126, |
| "learning_rate": 3.713560115535268e-05, |
| "loss": 0.584, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.7151999999999998, |
| "grad_norm": 0.44033711538569453, |
| "learning_rate": 3.689736261383879e-05, |
| "loss": 0.6019, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.72032, |
| "grad_norm": 0.46185741930953395, |
| "learning_rate": 3.6659234749480043e-05, |
| "loss": 0.5979, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.7254399999999999, |
| "grad_norm": 0.4557337264056941, |
| "learning_rate": 3.642122605676396e-05, |
| "loss": 0.6102, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.73056, |
| "grad_norm": 0.6447579696011232, |
| "learning_rate": 3.6183345025926966e-05, |
| "loss": 0.6094, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.73568, |
| "grad_norm": 0.35483221584671387, |
| "learning_rate": 3.594560014265159e-05, |
| "loss": 0.5859, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.7408000000000001, |
| "grad_norm": 0.5619949022675687, |
| "learning_rate": 3.570799988776364e-05, |
| "loss": 0.602, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.74592, |
| "grad_norm": 0.4808833851628103, |
| "learning_rate": 3.5470552736929785e-05, |
| "loss": 0.59, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.7510400000000002, |
| "grad_norm": 0.4280664310256486, |
| "learning_rate": 3.5233267160355206e-05, |
| "loss": 0.5973, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.75616, |
| "grad_norm": 0.34606786385637417, |
| "learning_rate": 3.4996151622481336e-05, |
| "loss": 0.5987, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.76128, |
| "grad_norm": 0.47886946752668064, |
| "learning_rate": 3.475921458168404e-05, |
| "loss": 0.5974, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.7664, |
| "grad_norm": 0.35895795443414463, |
| "learning_rate": 3.452246448997187e-05, |
| "loss": 0.5825, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.77152, |
| "grad_norm": 0.6237524007460633, |
| "learning_rate": 3.428590979268448e-05, |
| "loss": 0.593, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.77664, |
| "grad_norm": 0.5291377288153889, |
| "learning_rate": 3.4049558928191435e-05, |
| "loss": 0.5885, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.78176, |
| "grad_norm": 0.5075647920106248, |
| "learning_rate": 3.381342032759122e-05, |
| "loss": 0.5982, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.78688, |
| "grad_norm": 0.49287865419478344, |
| "learning_rate": 3.357750241441041e-05, |
| "loss": 0.5802, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.792, |
| "grad_norm": 0.543103939185933, |
| "learning_rate": 3.334181360430327e-05, |
| "loss": 0.5936, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.79712, |
| "grad_norm": 0.3894840623083752, |
| "learning_rate": 3.310636230475148e-05, |
| "loss": 0.588, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.8022399999999998, |
| "grad_norm": 0.43995147863147344, |
| "learning_rate": 3.287115691476422e-05, |
| "loss": 0.6006, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.80736, |
| "grad_norm": 0.3921709111241505, |
| "learning_rate": 3.263620582457868e-05, |
| "loss": 0.5939, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.8124799999999999, |
| "grad_norm": 0.31433310266314246, |
| "learning_rate": 3.24015174153606e-05, |
| "loss": 0.5893, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.8176, |
| "grad_norm": 0.43895209082760966, |
| "learning_rate": 3.2167100058905415e-05, |
| "loss": 0.5899, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.82272, |
| "grad_norm": 0.35289039516589493, |
| "learning_rate": 3.1932962117339566e-05, |
| "loss": 0.6034, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.8278400000000001, |
| "grad_norm": 0.3361411963291542, |
| "learning_rate": 3.169911194282222e-05, |
| "loss": 0.5794, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.83296, |
| "grad_norm": 0.35346529955825, |
| "learning_rate": 3.1465557877247325e-05, |
| "loss": 0.5872, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.8380800000000002, |
| "grad_norm": 0.3031581900611866, |
| "learning_rate": 3.123230825194606e-05, |
| "loss": 0.5771, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.8432, |
| "grad_norm": 0.4329864321858136, |
| "learning_rate": 3.099937138738958e-05, |
| "loss": 0.5927, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.84832, |
| "grad_norm": 0.35326273185796286, |
| "learning_rate": 3.076675559289228e-05, |
| "loss": 0.5996, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.85344, |
| "grad_norm": 0.35925676205986545, |
| "learning_rate": 3.05344691663154e-05, |
| "loss": 0.5781, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.85856, |
| "grad_norm": 0.3536221477128384, |
| "learning_rate": 3.0302520393770894e-05, |
| "loss": 0.5934, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.86368, |
| "grad_norm": 0.42419086729757743, |
| "learning_rate": 3.0070917549325983e-05, |
| "loss": 0.592, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.8688, |
| "grad_norm": 0.3833286680510395, |
| "learning_rate": 2.9839668894707974e-05, |
| "loss": 0.5858, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.87392, |
| "grad_norm": 0.3817012337576983, |
| "learning_rate": 2.960878267900948e-05, |
| "loss": 0.5816, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.87904, |
| "grad_norm": 0.400413399573491, |
| "learning_rate": 2.937826713839426e-05, |
| "loss": 0.5839, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.88416, |
| "grad_norm": 0.36874490386425984, |
| "learning_rate": 2.9148130495803307e-05, |
| "loss": 0.5854, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.8892799999999998, |
| "grad_norm": 0.39202924423336344, |
| "learning_rate": 2.8918380960661624e-05, |
| "loss": 0.5941, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.8944, |
| "grad_norm": 0.35104738229454596, |
| "learning_rate": 2.8689026728585338e-05, |
| "loss": 0.5845, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.8995199999999999, |
| "grad_norm": 0.42429376258538937, |
| "learning_rate": 2.8460075981089305e-05, |
| "loss": 0.5827, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.90464, |
| "grad_norm": 0.36436129019616925, |
| "learning_rate": 2.82315368852953e-05, |
| "loss": 0.5754, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.90976, |
| "grad_norm": 0.42556325640476184, |
| "learning_rate": 2.8003417593640727e-05, |
| "loss": 0.599, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.9148800000000001, |
| "grad_norm": 0.3851458476612929, |
| "learning_rate": 2.7775726243587685e-05, |
| "loss": 0.5931, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.3737224724659946, |
| "learning_rate": 2.754847095733278e-05, |
| "loss": 0.6015, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.9251200000000002, |
| "grad_norm": 0.36520508052939255, |
| "learning_rate": 2.732165984151741e-05, |
| "loss": 0.5961, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.93024, |
| "grad_norm": 0.3088144098356228, |
| "learning_rate": 2.709530098693849e-05, |
| "loss": 0.5898, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.93536, |
| "grad_norm": 0.4046047102015038, |
| "learning_rate": 2.686940246825989e-05, |
| "loss": 0.6059, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.94048, |
| "grad_norm": 0.3815617952147954, |
| "learning_rate": 2.6643972343724453e-05, |
| "loss": 0.5785, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.9456, |
| "grad_norm": 0.3351980518182547, |
| "learning_rate": 2.6419018654866415e-05, |
| "loss": 0.585, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.95072, |
| "grad_norm": 0.4441803690810445, |
| "learning_rate": 2.6194549426224684e-05, |
| "loss": 0.5955, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.95584, |
| "grad_norm": 0.2725524980139723, |
| "learning_rate": 2.5970572665056465e-05, |
| "loss": 0.5871, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.96096, |
| "grad_norm": 0.4789800834824601, |
| "learning_rate": 2.57470963610517e-05, |
| "loss": 0.597, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.96608, |
| "grad_norm": 0.3196138403431241, |
| "learning_rate": 2.5524128486048073e-05, |
| "loss": 0.5948, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.9712, |
| "grad_norm": 0.4883702808240898, |
| "learning_rate": 2.5301676993746592e-05, |
| "loss": 0.588, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.9763199999999999, |
| "grad_norm": 0.4304710776740572, |
| "learning_rate": 2.5079749819427842e-05, |
| "loss": 0.5884, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.98144, |
| "grad_norm": 0.364646635973235, |
| "learning_rate": 2.4858354879669025e-05, |
| "loss": 0.5776, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.9865599999999999, |
| "grad_norm": 0.385522115127419, |
| "learning_rate": 2.463750007206146e-05, |
| "loss": 0.5842, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.9916800000000001, |
| "grad_norm": 0.3482195171706265, |
| "learning_rate": 2.441719327492887e-05, |
| "loss": 0.5886, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.9968, |
| "grad_norm": 0.4205224902290012, |
| "learning_rate": 2.4197442347046432e-05, |
| "loss": 0.5969, |
| "step": 390 |
| }, |
| { |
| "epoch": 2.00192, |
| "grad_norm": 0.40631741625288587, |
| "learning_rate": 2.397825512736032e-05, |
| "loss": 0.5662, |
| "step": 391 |
| }, |
| { |
| "epoch": 2.00704, |
| "grad_norm": 0.4013123221753668, |
| "learning_rate": 2.3759639434708142e-05, |
| "loss": 0.5248, |
| "step": 392 |
| }, |
| { |
| "epoch": 2.01216, |
| "grad_norm": 0.4323654857095571, |
| "learning_rate": 2.3541603067540063e-05, |
| "loss": 0.5389, |
| "step": 393 |
| }, |
| { |
| "epoch": 2.01728, |
| "grad_norm": 0.4365874888455653, |
| "learning_rate": 2.332415380364052e-05, |
| "loss": 0.5172, |
| "step": 394 |
| }, |
| { |
| "epoch": 2.0224, |
| "grad_norm": 0.38461821215764164, |
| "learning_rate": 2.3107299399850822e-05, |
| "loss": 0.5262, |
| "step": 395 |
| }, |
| { |
| "epoch": 2.02752, |
| "grad_norm": 0.47903986310634056, |
| "learning_rate": 2.2891047591792532e-05, |
| "loss": 0.5212, |
| "step": 396 |
| }, |
| { |
| "epoch": 2.03264, |
| "grad_norm": 0.35779675128884125, |
| "learning_rate": 2.2675406093591316e-05, |
| "loss": 0.5293, |
| "step": 397 |
| }, |
| { |
| "epoch": 2.03776, |
| "grad_norm": 0.4808543092900177, |
| "learning_rate": 2.2460382597602016e-05, |
| "loss": 0.5352, |
| "step": 398 |
| }, |
| { |
| "epoch": 2.04288, |
| "grad_norm": 0.3753156744242789, |
| "learning_rate": 2.2245984774134063e-05, |
| "loss": 0.5214, |
| "step": 399 |
| }, |
| { |
| "epoch": 2.048, |
| "grad_norm": 0.39281547945021766, |
| "learning_rate": 2.2032220271177876e-05, |
| "loss": 0.5287, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.05312, |
| "grad_norm": 0.40725978791424594, |
| "learning_rate": 2.1819096714132206e-05, |
| "loss": 0.526, |
| "step": 401 |
| }, |
| { |
| "epoch": 2.05824, |
| "grad_norm": 0.36597234139560436, |
| "learning_rate": 2.1606621705531934e-05, |
| "loss": 0.5073, |
| "step": 402 |
| }, |
| { |
| "epoch": 2.06336, |
| "grad_norm": 0.4434308159144889, |
| "learning_rate": 2.139480282477691e-05, |
| "loss": 0.5259, |
| "step": 403 |
| }, |
| { |
| "epoch": 2.06848, |
| "grad_norm": 0.36571996113644073, |
| "learning_rate": 2.1183647627861736e-05, |
| "loss": 0.5375, |
| "step": 404 |
| }, |
| { |
| "epoch": 2.0736, |
| "grad_norm": 0.5190659641286463, |
| "learning_rate": 2.097316364710599e-05, |
| "loss": 0.5204, |
| "step": 405 |
| }, |
| { |
| "epoch": 2.07872, |
| "grad_norm": 0.34838410996336344, |
| "learning_rate": 2.0763358390885705e-05, |
| "loss": 0.5183, |
| "step": 406 |
| }, |
| { |
| "epoch": 2.08384, |
| "grad_norm": 0.464065008178319, |
| "learning_rate": 2.0554239343365524e-05, |
| "loss": 0.5181, |
| "step": 407 |
| }, |
| { |
| "epoch": 2.08896, |
| "grad_norm": 0.3876057469692389, |
| "learning_rate": 2.0345813964231604e-05, |
| "loss": 0.5283, |
| "step": 408 |
| }, |
| { |
| "epoch": 2.09408, |
| "grad_norm": 0.38233945304862516, |
| "learning_rate": 2.013808968842561e-05, |
| "loss": 0.5253, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.0992, |
| "grad_norm": 0.38531045185458873, |
| "learning_rate": 1.9931073925879544e-05, |
| "loss": 0.5286, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.10432, |
| "grad_norm": 0.3441646148982092, |
| "learning_rate": 1.9724774061251253e-05, |
| "loss": 0.5289, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.10944, |
| "grad_norm": 0.3059332693194638, |
| "learning_rate": 1.9519197453661166e-05, |
| "loss": 0.5268, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.11456, |
| "grad_norm": 0.34251732288777326, |
| "learning_rate": 1.9314351436429703e-05, |
| "loss": 0.535, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.11968, |
| "grad_norm": 0.27043155273405406, |
| "learning_rate": 1.9110243316815672e-05, |
| "loss": 0.519, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.1248, |
| "grad_norm": 0.3287810309710835, |
| "learning_rate": 1.8906880375755686e-05, |
| "loss": 0.5123, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.12992, |
| "grad_norm": 0.31017397419120324, |
| "learning_rate": 1.87042698676043e-05, |
| "loss": 0.5271, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.13504, |
| "grad_norm": 0.27534025617214347, |
| "learning_rate": 1.8502419019875357e-05, |
| "loss": 0.5148, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.14016, |
| "grad_norm": 0.34713006182861084, |
| "learning_rate": 1.8301335032984157e-05, |
| "loss": 0.5263, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.14528, |
| "grad_norm": 0.2944738685945991, |
| "learning_rate": 1.8101025079990485e-05, |
| "loss": 0.5082, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.1504, |
| "grad_norm": 0.2795225009877447, |
| "learning_rate": 1.7901496306342886e-05, |
| "loss": 0.5162, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.15552, |
| "grad_norm": 0.2792843641392462, |
| "learning_rate": 1.7702755829623696e-05, |
| "loss": 0.516, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.16064, |
| "grad_norm": 0.31863845049393985, |
| "learning_rate": 1.750481073929514e-05, |
| "loss": 0.5294, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.16576, |
| "grad_norm": 0.2926787760079869, |
| "learning_rate": 1.730766809644646e-05, |
| "loss": 0.5436, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.17088, |
| "grad_norm": 0.26756179726965906, |
| "learning_rate": 1.711133493354203e-05, |
| "loss": 0.5391, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.176, |
| "grad_norm": 0.26829916228856365, |
| "learning_rate": 1.6915818254170485e-05, |
| "loss": 0.5293, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.18112, |
| "grad_norm": 0.2916809848264421, |
| "learning_rate": 1.672112503279488e-05, |
| "loss": 0.5239, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.18624, |
| "grad_norm": 0.2847780874837289, |
| "learning_rate": 1.6527262214503917e-05, |
| "loss": 0.5206, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.19136, |
| "grad_norm": 0.31205176032409004, |
| "learning_rate": 1.6334236714764215e-05, |
| "loss": 0.5219, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.19648, |
| "grad_norm": 0.28653984101417374, |
| "learning_rate": 1.6142055419173556e-05, |
| "loss": 0.5396, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.2016, |
| "grad_norm": 0.29293525576131924, |
| "learning_rate": 1.595072518321535e-05, |
| "loss": 0.5186, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.20672, |
| "grad_norm": 0.32036644737076314, |
| "learning_rate": 1.576025283201401e-05, |
| "loss": 0.5293, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.21184, |
| "grad_norm": 0.2746440977694007, |
| "learning_rate": 1.5570645160091534e-05, |
| "loss": 0.5238, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.21696, |
| "grad_norm": 0.2763614067422842, |
| "learning_rate": 1.5381908931125112e-05, |
| "loss": 0.5315, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.22208, |
| "grad_norm": 0.3206600626128733, |
| "learning_rate": 1.519405087770586e-05, |
| "loss": 0.5155, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.2272, |
| "grad_norm": 0.28546354885077907, |
| "learning_rate": 1.5007077701098643e-05, |
| "loss": 0.5153, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.23232, |
| "grad_norm": 0.32183823115025384, |
| "learning_rate": 1.4820996071003042e-05, |
| "loss": 0.5244, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.23744, |
| "grad_norm": 0.2833366198667468, |
| "learning_rate": 1.4635812625315428e-05, |
| "loss": 0.5291, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.24256, |
| "grad_norm": 0.35259963599910704, |
| "learning_rate": 1.445153396989218e-05, |
| "loss": 0.5334, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.24768, |
| "grad_norm": 0.31168802605945267, |
| "learning_rate": 1.4268166678314029e-05, |
| "loss": 0.522, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.2528, |
| "grad_norm": 0.32274340867941653, |
| "learning_rate": 1.4085717291651534e-05, |
| "loss": 0.5291, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.25792, |
| "grad_norm": 0.2948948221905903, |
| "learning_rate": 1.390419231823187e-05, |
| "loss": 0.5394, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.26304, |
| "grad_norm": 0.2743662929170965, |
| "learning_rate": 1.3723598233406525e-05, |
| "loss": 0.5271, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.26816, |
| "grad_norm": 0.2556644467949261, |
| "learning_rate": 1.3543941479320344e-05, |
| "loss": 0.5253, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.27328, |
| "grad_norm": 0.2611595534203661, |
| "learning_rate": 1.3365228464681814e-05, |
| "loss": 0.5262, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.2784, |
| "grad_norm": 0.26247038877870327, |
| "learning_rate": 1.3187465564534359e-05, |
| "loss": 0.5257, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.28352, |
| "grad_norm": 0.2536877324922943, |
| "learning_rate": 1.30106591200289e-05, |
| "loss": 0.5174, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.28864, |
| "grad_norm": 0.2691257976797504, |
| "learning_rate": 1.2834815438197827e-05, |
| "loss": 0.5421, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.29376, |
| "grad_norm": 0.21048332739110232, |
| "learning_rate": 1.2659940791729773e-05, |
| "loss": 0.5183, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.29888, |
| "grad_norm": 0.26799152242066077, |
| "learning_rate": 1.248604141874608e-05, |
| "loss": 0.5347, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.304, |
| "grad_norm": 0.23183888170912065, |
| "learning_rate": 1.2313123522578141e-05, |
| "loss": 0.528, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.30912, |
| "grad_norm": 0.23153879138735126, |
| "learning_rate": 1.2141193271546104e-05, |
| "loss": 0.5402, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.31424, |
| "grad_norm": 0.2365803046413853, |
| "learning_rate": 1.1970256798738946e-05, |
| "loss": 0.524, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.31936, |
| "grad_norm": 0.23790371050570536, |
| "learning_rate": 1.1800320201795609e-05, |
| "loss": 0.5236, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.32448, |
| "grad_norm": 0.21840868912845707, |
| "learning_rate": 1.1631389542687455e-05, |
| "loss": 0.5158, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.3296, |
| "grad_norm": 0.22811000289018, |
| "learning_rate": 1.1463470847502154e-05, |
| "loss": 0.5491, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.33472, |
| "grad_norm": 0.2441771477812667, |
| "learning_rate": 1.1296570106228568e-05, |
| "loss": 0.5119, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.33984, |
| "grad_norm": 0.2504770922206427, |
| "learning_rate": 1.1130693272543174e-05, |
| "loss": 0.5259, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.34496, |
| "grad_norm": 0.22307013304308804, |
| "learning_rate": 1.0965846263597704e-05, |
| "loss": 0.5229, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.35008, |
| "grad_norm": 0.20777825026632868, |
| "learning_rate": 1.0802034959807934e-05, |
| "loss": 0.5327, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.3552, |
| "grad_norm": 0.2496892379371564, |
| "learning_rate": 1.063926520464407e-05, |
| "loss": 0.5173, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.3603199999999998, |
| "grad_norm": 0.2710443329473512, |
| "learning_rate": 1.047754280442225e-05, |
| "loss": 0.5093, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.36544, |
| "grad_norm": 0.2221839293934009, |
| "learning_rate": 1.0316873528097333e-05, |
| "loss": 0.5339, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.3705600000000002, |
| "grad_norm": 0.21317614214755715, |
| "learning_rate": 1.0157263107057291e-05, |
| "loss": 0.5284, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.37568, |
| "grad_norm": 0.22721711775906023, |
| "learning_rate": 9.998717234918557e-06, |
| "loss": 0.5351, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.3808, |
| "grad_norm": 0.22893893378395325, |
| "learning_rate": 9.84124156732305e-06, |
| "loss": 0.5173, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.38592, |
| "grad_norm": 0.2312282069514675, |
| "learning_rate": 9.684841721736449e-06, |
| "loss": 0.5099, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.39104, |
| "grad_norm": 0.23235918033484193, |
| "learning_rate": 9.529523277247672e-06, |
| "loss": 0.5353, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.39616, |
| "grad_norm": 0.2122801075559491, |
| "learning_rate": 9.37529177436998e-06, |
| "loss": 0.524, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.40128, |
| "grad_norm": 0.2281610275331807, |
| "learning_rate": 9.222152714843324e-06, |
| "loss": 0.5198, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.4064, |
| "grad_norm": 0.2276209528821846, |
| "learning_rate": 9.070111561437994e-06, |
| "loss": 0.5125, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.41152, |
| "grad_norm": 0.25863996249108634, |
| "learning_rate": 8.919173737759878e-06, |
| "loss": 0.5278, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.41664, |
| "grad_norm": 0.24519777588872835, |
| "learning_rate": 8.769344628056893e-06, |
| "loss": 0.548, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.42176, |
| "grad_norm": 0.22910426740880335, |
| "learning_rate": 8.62062957702698e-06, |
| "loss": 0.5186, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.42688, |
| "grad_norm": 0.2172444686293662, |
| "learning_rate": 8.473033889627404e-06, |
| "loss": 0.5316, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.432, |
| "grad_norm": 0.21789541593913744, |
| "learning_rate": 8.326562830885559e-06, |
| "loss": 0.4964, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.43712, |
| "grad_norm": 0.26419477579272155, |
| "learning_rate": 8.181221625711102e-06, |
| "loss": 0.5285, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.44224, |
| "grad_norm": 0.22975957761205745, |
| "learning_rate": 8.03701545870963e-06, |
| "loss": 0.5323, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.4473599999999998, |
| "grad_norm": 0.2192599105025437, |
| "learning_rate": 7.893949473997682e-06, |
| "loss": 0.513, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.45248, |
| "grad_norm": 0.21383791397200727, |
| "learning_rate": 7.752028775019264e-06, |
| "loss": 0.5287, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.4576000000000002, |
| "grad_norm": 0.23011437791676428, |
| "learning_rate": 7.61125842436381e-06, |
| "loss": 0.5158, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.46272, |
| "grad_norm": 0.23089368890405654, |
| "learning_rate": 7.471643443585561e-06, |
| "loss": 0.5286, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.46784, |
| "grad_norm": 0.21550660720139894, |
| "learning_rate": 7.33318881302445e-06, |
| "loss": 0.5315, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.47296, |
| "grad_norm": 0.21786834523919976, |
| "learning_rate": 7.1958994716284556e-06, |
| "loss": 0.5344, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.47808, |
| "grad_norm": 0.23493881832666266, |
| "learning_rate": 7.059780316777396e-06, |
| "loss": 0.5298, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.4832, |
| "grad_norm": 0.23179568273154502, |
| "learning_rate": 6.9248362041082514e-06, |
| "loss": 0.5395, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.48832, |
| "grad_norm": 0.1997326200659906, |
| "learning_rate": 6.791071947341939e-06, |
| "loss": 0.5344, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.49344, |
| "grad_norm": 0.18863063393495233, |
| "learning_rate": 6.658492318111611e-06, |
| "loss": 0.528, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.49856, |
| "grad_norm": 0.2042454987848812, |
| "learning_rate": 6.527102045792424e-06, |
| "loss": 0.5057, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.50368, |
| "grad_norm": 0.21633481485073822, |
| "learning_rate": 6.39690581733285e-06, |
| "loss": 0.5231, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.5088, |
| "grad_norm": 0.20671940715964057, |
| "learning_rate": 6.267908277087489e-06, |
| "loss": 0.519, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.51392, |
| "grad_norm": 0.20567854945985983, |
| "learning_rate": 6.140114026651338e-06, |
| "loss": 0.5246, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.51904, |
| "grad_norm": 0.21668723148852223, |
| "learning_rate": 6.01352762469575e-06, |
| "loss": 0.5234, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.52416, |
| "grad_norm": 0.1955709273332618, |
| "learning_rate": 5.888153586805723e-06, |
| "loss": 0.4833, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.52928, |
| "grad_norm": 0.19018764122168655, |
| "learning_rate": 5.763996385318838e-06, |
| "loss": 0.5241, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.5343999999999998, |
| "grad_norm": 0.21895623789510293, |
| "learning_rate": 5.641060449165774e-06, |
| "loss": 0.5022, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.53952, |
| "grad_norm": 0.23471379305483125, |
| "learning_rate": 5.5193501637122605e-06, |
| "loss": 0.527, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.5446400000000002, |
| "grad_norm": 0.2119274689582658, |
| "learning_rate": 5.39886987060267e-06, |
| "loss": 0.5376, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.54976, |
| "grad_norm": 0.19216483721783073, |
| "learning_rate": 5.279623867605144e-06, |
| "loss": 0.5221, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.55488, |
| "grad_norm": 0.2154169515261133, |
| "learning_rate": 5.161616408458239e-06, |
| "loss": 0.5188, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.20441281420775267, |
| "learning_rate": 5.044851702719289e-06, |
| "loss": 0.5303, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.56512, |
| "grad_norm": 0.2107906688631621, |
| "learning_rate": 4.929333915614147e-06, |
| "loss": 0.511, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.57024, |
| "grad_norm": 0.21514247844377765, |
| "learning_rate": 4.815067167888603e-06, |
| "loss": 0.5245, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.57536, |
| "grad_norm": 0.2061450148487434, |
| "learning_rate": 4.702055535661481e-06, |
| "loss": 0.5079, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.58048, |
| "grad_norm": 0.19909839216020486, |
| "learning_rate": 4.590303050279144e-06, |
| "loss": 0.5192, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.5856, |
| "grad_norm": 0.20208792009677243, |
| "learning_rate": 4.479813698171702e-06, |
| "loss": 0.5102, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.59072, |
| "grad_norm": 0.2120494653558063, |
| "learning_rate": 4.370591420710879e-06, |
| "loss": 0.5276, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.59584, |
| "grad_norm": 0.19182746478809978, |
| "learning_rate": 4.262640114069303e-06, |
| "loss": 0.5297, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.60096, |
| "grad_norm": 0.2049595817249684, |
| "learning_rate": 4.1559636290816165e-06, |
| "loss": 0.5089, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.60608, |
| "grad_norm": 0.20334632758136215, |
| "learning_rate": 4.050565771107086e-06, |
| "loss": 0.5341, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.6112, |
| "grad_norm": 0.21626882759385432, |
| "learning_rate": 3.946450299893813e-06, |
| "loss": 0.5356, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.61632, |
| "grad_norm": 0.22272362239503654, |
| "learning_rate": 3.843620929444667e-06, |
| "loss": 0.5264, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.6214399999999998, |
| "grad_norm": 0.20142676114788888, |
| "learning_rate": 3.74208132788481e-06, |
| "loss": 0.5277, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.62656, |
| "grad_norm": 0.19564873650299325, |
| "learning_rate": 3.641835117330761e-06, |
| "loss": 0.532, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.6316800000000002, |
| "grad_norm": 0.21279829206795126, |
| "learning_rate": 3.542885873761308e-06, |
| "loss": 0.5163, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.6368, |
| "grad_norm": 0.20724605046419126, |
| "learning_rate": 3.4452371268898444e-06, |
| "loss": 0.5074, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.64192, |
| "grad_norm": 0.2140560895573128, |
| "learning_rate": 3.3488923600385028e-06, |
| "loss": 0.5331, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.64704, |
| "grad_norm": 0.19348656997795682, |
| "learning_rate": 3.2538550100139353e-06, |
| "loss": 0.5273, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.65216, |
| "grad_norm": 0.1959351210860576, |
| "learning_rate": 3.160128466984609e-06, |
| "loss": 0.5189, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.65728, |
| "grad_norm": 0.2043908489285227, |
| "learning_rate": 3.0677160743599788e-06, |
| "loss": 0.5349, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.6624, |
| "grad_norm": 0.20288115925706235, |
| "learning_rate": 2.9766211286711868e-06, |
| "loss": 0.5248, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.66752, |
| "grad_norm": 0.20731847438641798, |
| "learning_rate": 2.8868468794534243e-06, |
| "loss": 0.5181, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.67264, |
| "grad_norm": 0.19303823605826137, |
| "learning_rate": 2.7983965291300765e-06, |
| "loss": 0.5309, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.67776, |
| "grad_norm": 0.2049514957880367, |
| "learning_rate": 2.7112732328984594e-06, |
| "loss": 0.5301, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.68288, |
| "grad_norm": 0.19908648001763588, |
| "learning_rate": 2.6254800986172635e-06, |
| "loss": 0.5278, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.6879999999999997, |
| "grad_norm": 0.20892226869833805, |
| "learning_rate": 2.5410201866957042e-06, |
| "loss": 0.5236, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.69312, |
| "grad_norm": 0.18255526452984, |
| "learning_rate": 2.457896509984332e-06, |
| "loss": 0.525, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.69824, |
| "grad_norm": 0.19075543504463546, |
| "learning_rate": 2.376112033667579e-06, |
| "loss": 0.5139, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.70336, |
| "grad_norm": 0.19978943687563103, |
| "learning_rate": 2.29566967515797e-06, |
| "loss": 0.523, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.7084799999999998, |
| "grad_norm": 0.17850021044317485, |
| "learning_rate": 2.216572303992055e-06, |
| "loss": 0.5327, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.7136, |
| "grad_norm": 0.1854277104912946, |
| "learning_rate": 2.1388227417280527e-06, |
| "loss": 0.5233, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.7187200000000002, |
| "grad_norm": 0.18348286998112376, |
| "learning_rate": 2.0624237618452047e-06, |
| "loss": 0.5396, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.72384, |
| "grad_norm": 0.1883434076908113, |
| "learning_rate": 1.98737808964482e-06, |
| "loss": 0.5197, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.72896, |
| "grad_norm": 0.1841837424394637, |
| "learning_rate": 1.913688402153082e-06, |
| "loss": 0.5297, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.73408, |
| "grad_norm": 0.17622787049259134, |
| "learning_rate": 1.8413573280255326e-06, |
| "loss": 0.5285, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.7392, |
| "grad_norm": 0.17806753215393287, |
| "learning_rate": 1.7703874474533167e-06, |
| "loss": 0.5278, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.74432, |
| "grad_norm": 0.1958676559056442, |
| "learning_rate": 1.7007812920711408e-06, |
| "loss": 0.5044, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.74944, |
| "grad_norm": 0.16605984676027447, |
| "learning_rate": 1.6325413448669625e-06, |
| "loss": 0.514, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.75456, |
| "grad_norm": 0.18195390968020672, |
| "learning_rate": 1.5656700400934121e-06, |
| "loss": 0.5445, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.75968, |
| "grad_norm": 0.16824479934145423, |
| "learning_rate": 1.500169763180961e-06, |
| "loss": 0.5212, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.7648, |
| "grad_norm": 0.1819730086116406, |
| "learning_rate": 1.4360428506528323e-06, |
| "loss": 0.5232, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.76992, |
| "grad_norm": 0.17926267467820442, |
| "learning_rate": 1.373291590041661e-06, |
| "loss": 0.5273, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.7750399999999997, |
| "grad_norm": 0.21059680680377105, |
| "learning_rate": 1.3119182198078596e-06, |
| "loss": 0.5131, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.78016, |
| "grad_norm": 0.18407921454967086, |
| "learning_rate": 1.2519249292598112e-06, |
| "loss": 0.4969, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.78528, |
| "grad_norm": 0.18395693454820505, |
| "learning_rate": 1.1933138584757508e-06, |
| "loss": 0.5188, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.7904, |
| "grad_norm": 0.19315285688390138, |
| "learning_rate": 1.1360870982274118e-06, |
| "loss": 0.5186, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.79552, |
| "grad_norm": 0.1887790134903703, |
| "learning_rate": 1.080246689905473e-06, |
| "loss": 0.5271, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.80064, |
| "grad_norm": 0.17959548964956856, |
| "learning_rate": 1.0257946254467145e-06, |
| "loss": 0.5243, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.8057600000000003, |
| "grad_norm": 0.1817474574269339, |
| "learning_rate": 9.727328472629716e-07, |
| "loss": 0.5287, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.81088, |
| "grad_norm": 0.17642219828487612, |
| "learning_rate": 9.21063248171854e-07, |
| "loss": 0.526, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.816, |
| "grad_norm": 0.1904246025654594, |
| "learning_rate": 8.707876713291941e-07, |
| "loss": 0.5135, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.82112, |
| "grad_norm": 0.17957762634349514, |
| "learning_rate": 8.219079101633443e-07, |
| "loss": 0.5215, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.82624, |
| "grad_norm": 0.16263065954048853, |
| "learning_rate": 7.744257083111662e-07, |
| "loss": 0.5193, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.83136, |
| "grad_norm": 0.17484720518211333, |
| "learning_rate": 7.283427595558224e-07, |
| "loss": 0.5296, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.83648, |
| "grad_norm": 0.1867914151335944, |
| "learning_rate": 6.836607077664115e-07, |
| "loss": 0.5383, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.8416, |
| "grad_norm": 0.18028174989209536, |
| "learning_rate": 6.403811468392685e-07, |
| "loss": 0.5453, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.84672, |
| "grad_norm": 0.1730198700904891, |
| "learning_rate": 5.9850562064113e-07, |
| "loss": 0.5272, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.85184, |
| "grad_norm": 0.17377782938839437, |
| "learning_rate": 5.580356229540851e-07, |
| "loss": 0.5298, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.85696, |
| "grad_norm": 0.16971911427369651, |
| "learning_rate": 5.189725974222448e-07, |
| "loss": 0.5239, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.8620799999999997, |
| "grad_norm": 0.1853393873016306, |
| "learning_rate": 4.81317937500263e-07, |
| "loss": 0.5358, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.8672, |
| "grad_norm": 0.16321076208370755, |
| "learning_rate": 4.450729864036607e-07, |
| "loss": 0.511, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.87232, |
| "grad_norm": 0.17617659485628395, |
| "learning_rate": 4.1023903706084666e-07, |
| "loss": 0.5251, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.87744, |
| "grad_norm": 0.17229617450371348, |
| "learning_rate": 3.768173320670654e-07, |
| "loss": 0.516, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.88256, |
| "grad_norm": 0.18627902011447772, |
| "learning_rate": 3.448090636400192e-07, |
| "loss": 0.527, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.88768, |
| "grad_norm": 0.1692917592955478, |
| "learning_rate": 3.1421537357735567e-07, |
| "loss": 0.5203, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.8928000000000003, |
| "grad_norm": 0.1742890382438682, |
| "learning_rate": 2.850373532159578e-07, |
| "loss": 0.5099, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.89792, |
| "grad_norm": 0.18401122824627672, |
| "learning_rate": 2.5727604339297996e-07, |
| "loss": 0.5303, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.90304, |
| "grad_norm": 0.17215703579697378, |
| "learning_rate": 2.309324344087349e-07, |
| "loss": 0.5248, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.90816, |
| "grad_norm": 0.1744660017717841, |
| "learning_rate": 2.060074659913891e-07, |
| "loss": 0.5067, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.91328, |
| "grad_norm": 0.1753887233423339, |
| "learning_rate": 1.8250202726339815e-07, |
| "loss": 0.5142, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.9184, |
| "grad_norm": 0.17222558777930064, |
| "learning_rate": 1.6041695670981684e-07, |
| "loss": 0.5271, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.92352, |
| "grad_norm": 0.1749876224664464, |
| "learning_rate": 1.397530421483939e-07, |
| "loss": 0.5215, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.92864, |
| "grad_norm": 0.17369587300943137, |
| "learning_rate": 1.2051102070144816e-07, |
| "loss": 0.5248, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.93376, |
| "grad_norm": 0.1720447918581037, |
| "learning_rate": 1.0269157876959146e-07, |
| "loss": 0.5198, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.93888, |
| "grad_norm": 0.18006932981535365, |
| "learning_rate": 8.62953520072285e-08, |
| "loss": 0.5081, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.944, |
| "grad_norm": 0.16965260368509588, |
| "learning_rate": 7.132292529990814e-08, |
| "loss": 0.5227, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.9491199999999997, |
| "grad_norm": 0.17833147445276212, |
| "learning_rate": 5.7774832743415776e-08, |
| "loss": 0.5269, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.95424, |
| "grad_norm": 0.17095515499148287, |
| "learning_rate": 4.565155762477069e-08, |
| "loss": 0.5416, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.95936, |
| "grad_norm": 0.17471514998398122, |
| "learning_rate": 3.4953532404942146e-08, |
| "loss": 0.5026, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.96448, |
| "grad_norm": 0.1721020510585504, |
| "learning_rate": 2.568113870346167e-08, |
| "loss": 0.5092, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.9696, |
| "grad_norm": 0.18037480336003978, |
| "learning_rate": 1.783470728477621e-08, |
| "loss": 0.514, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.97472, |
| "grad_norm": 0.17100517198442003, |
| "learning_rate": 1.1414518046470868e-08, |
| "loss": 0.5405, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.9798400000000003, |
| "grad_norm": 0.1702692183309079, |
| "learning_rate": 6.4208000092769174e-09, |
| "loss": 0.5228, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.98496, |
| "grad_norm": 0.17252007004331688, |
| "learning_rate": 2.8537313088961015e-09, |
| "loss": 0.5295, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.99008, |
| "grad_norm": 0.18840800940002203, |
| "learning_rate": 7.134391896679305e-10, |
| "loss": 0.5159, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.9952, |
| "grad_norm": 0.1870617408251541, |
| "learning_rate": 0.0, |
| "loss": 0.4924, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.9952, |
| "step": 585, |
| "total_flos": 4.2089030343416873e+18, |
| "train_loss": 0.0, |
| "train_runtime": 5.9478, |
| "train_samples_per_second": 50438.911, |
| "train_steps_per_second": 98.356 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 585, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.2089030343416873e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|