|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 710, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.3505215219626858e-06, |
|
"loss": 1.5844, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7010430439253716e-06, |
|
"loss": 1.1717, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.051564565888058e-06, |
|
"loss": 1.1708, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.402086087850743e-06, |
|
"loss": 1.5208, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.175260760981343e-05, |
|
"loss": 1.2781, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4103129131776116e-05, |
|
"loss": 1.3885, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.64536506537388e-05, |
|
"loss": 1.4596, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.8804172175701486e-05, |
|
"loss": 1.2032, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1154693697664172e-05, |
|
"loss": 1.3789, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.350521521962686e-05, |
|
"loss": 1.1589, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.585573674158954e-05, |
|
"loss": 1.3547, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.820625826355223e-05, |
|
"loss": 1.6605, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.055677978551492e-05, |
|
"loss": 1.2712, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.29073013074776e-05, |
|
"loss": 1.2314, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.525782282944028e-05, |
|
"loss": 1.3365, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.760834435140297e-05, |
|
"loss": 1.3434, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.9958865873365655e-05, |
|
"loss": 1.4921, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.2309387395328345e-05, |
|
"loss": 1.3429, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4659908917291035e-05, |
|
"loss": 1.3414, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.701043043925372e-05, |
|
"loss": 1.3829, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.93609519612164e-05, |
|
"loss": 1.0409, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.171147348317908e-05, |
|
"loss": 1.4251, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.406199500514177e-05, |
|
"loss": 1.3312, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.641251652710446e-05, |
|
"loss": 1.5272, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 5.8763038049067145e-05, |
|
"loss": 1.3403, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.111355957102983e-05, |
|
"loss": 1.2074, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.346408109299252e-05, |
|
"loss": 1.2367, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.58146026149552e-05, |
|
"loss": 1.0457, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.816512413691789e-05, |
|
"loss": 1.2032, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.051564565888057e-05, |
|
"loss": 1.2936, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.286616718084326e-05, |
|
"loss": 1.4325, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.521668870280595e-05, |
|
"loss": 1.1163, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.756721022476862e-05, |
|
"loss": 1.2707, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.991773174673131e-05, |
|
"loss": 1.2504, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.2268253268694e-05, |
|
"loss": 1.3381, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.461877479065669e-05, |
|
"loss": 1.4586, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.696929631261938e-05, |
|
"loss": 1.3332, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.931981783458207e-05, |
|
"loss": 1.2071, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.167033935654475e-05, |
|
"loss": 1.2606, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.402086087850743e-05, |
|
"loss": 1.2386, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.637138240047012e-05, |
|
"loss": 1.163, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.87219039224328e-05, |
|
"loss": 1.2538, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.999988438329202e-05, |
|
"loss": 1.1698, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.999882216302589e-05, |
|
"loss": 1.3658, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.999664914761532e-05, |
|
"loss": 1.0131, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.999336538533694e-05, |
|
"loss": 1.4838, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.998897094914424e-05, |
|
"loss": 1.1761, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.998346593666587e-05, |
|
"loss": 1.4351, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.997685047020353e-05, |
|
"loss": 1.3387, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.996912469672925e-05, |
|
"loss": 1.2005, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.99602887878821e-05, |
|
"loss": 1.3915, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.995034293996442e-05, |
|
"loss": 1.2307, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.993928737393738e-05, |
|
"loss": 1.1234, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.992712233541616e-05, |
|
"loss": 1.2151, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.991384809466445e-05, |
|
"loss": 1.0929, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.989946494658839e-05, |
|
"loss": 1.4025, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.98839732107302e-05, |
|
"loss": 1.1941, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.986737323126084e-05, |
|
"loss": 1.2325, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.984966537697254e-05, |
|
"loss": 1.2219, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.983085004127055e-05, |
|
"loss": 1.256, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.981092764216437e-05, |
|
"loss": 1.3723, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.978989862225853e-05, |
|
"loss": 1.1166, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.976776344874268e-05, |
|
"loss": 1.2826, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.974452261338128e-05, |
|
"loss": 1.1347, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.972017663250264e-05, |
|
"loss": 1.1066, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.969472604698742e-05, |
|
"loss": 1.3199, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.96681714222567e-05, |
|
"loss": 1.3186, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.964051334825937e-05, |
|
"loss": 1.3577, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.961175243945893e-05, |
|
"loss": 1.1913, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.958188933482003e-05, |
|
"loss": 1.3416, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.95509246977941e-05, |
|
"loss": 1.4361, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.951885921630475e-05, |
|
"loss": 1.2034, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.948569360273236e-05, |
|
"loss": 1.147, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.945142859389835e-05, |
|
"loss": 1.5043, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.941606495104877e-05, |
|
"loss": 1.2877, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.937960345983736e-05, |
|
"loss": 1.2024, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.934204493030816e-05, |
|
"loss": 1.2718, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.930339019687746e-05, |
|
"loss": 1.1279, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.92636401183153e-05, |
|
"loss": 1.1138, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.922279557772639e-05, |
|
"loss": 1.2727, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.918085748253044e-05, |
|
"loss": 1.1073, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.9137826764442e-05, |
|
"loss": 1.1801, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.909370437944993e-05, |
|
"loss": 1.4271, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.904849130779588e-05, |
|
"loss": 1.45, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.900218855395276e-05, |
|
"loss": 1.3001, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.895479714660226e-05, |
|
"loss": 1.3452, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.890631813861207e-05, |
|
"loss": 1.2141, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.885675260701254e-05, |
|
"loss": 1.213, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.880610165297262e-05, |
|
"loss": 1.3351, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.87543664017755e-05, |
|
"loss": 1.063, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.870154800279354e-05, |
|
"loss": 1.0482, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.864764762946285e-05, |
|
"loss": 1.3549, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.859266647925709e-05, |
|
"loss": 1.2104, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.853660577366092e-05, |
|
"loss": 1.3416, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.847946675814286e-05, |
|
"loss": 1.0623, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.842125070212765e-05, |
|
"loss": 1.0585, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.8361958898968e-05, |
|
"loss": 1.123, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.830159266591589e-05, |
|
"loss": 1.2343, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.824015334409325e-05, |
|
"loss": 1.2386, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.817764229846228e-05, |
|
"loss": 0.9396, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.811406091779501e-05, |
|
"loss": 1.0644, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.804941061464246e-05, |
|
"loss": 1.2164, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.798369282530334e-05, |
|
"loss": 1.2597, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.79169090097921e-05, |
|
"loss": 1.0657, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.784906065180639e-05, |
|
"loss": 1.3115, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.77801492586943e-05, |
|
"loss": 1.1609, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.771017636142069e-05, |
|
"loss": 1.2352, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.763914351453328e-05, |
|
"loss": 1.2551, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.756705229612806e-05, |
|
"loss": 1.1841, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.749390430781428e-05, |
|
"loss": 1.4215, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.741970117467885e-05, |
|
"loss": 1.3392, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.734444454525015e-05, |
|
"loss": 1.2668, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.726813609146156e-05, |
|
"loss": 1.2107, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.719077750861422e-05, |
|
"loss": 1.0586, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.711237051533939e-05, |
|
"loss": 1.2463, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.703291685356022e-05, |
|
"loss": 1.0728, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.695241828845314e-05, |
|
"loss": 1.0965, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.68708766084086e-05, |
|
"loss": 1.4591, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.678829362499131e-05, |
|
"loss": 1.0267, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.670467117290003e-05, |
|
"loss": 1.2758, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.662001110992679e-05, |
|
"loss": 1.2481, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.653431531691564e-05, |
|
"loss": 1.1977, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.644758569772084e-05, |
|
"loss": 1.4683, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.635982417916459e-05, |
|
"loss": 1.2774, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.627103271099418e-05, |
|
"loss": 1.1035, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.618121326583874e-05, |
|
"loss": 1.1808, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.609036783916529e-05, |
|
"loss": 1.1988, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.59984984492346e-05, |
|
"loss": 1.3405, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.590560713705619e-05, |
|
"loss": 1.31, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.581169596634302e-05, |
|
"loss": 1.2651, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.571676702346571e-05, |
|
"loss": 1.2317, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.56208224174061e-05, |
|
"loss": 1.2728, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.552386427971049e-05, |
|
"loss": 1.2959, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.542589476444215e-05, |
|
"loss": 1.2706, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.53269160481336e-05, |
|
"loss": 1.1602, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.52269303297382e-05, |
|
"loss": 1.2293, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.512593983058129e-05, |
|
"loss": 1.2822, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.502394679431084e-05, |
|
"loss": 1.0499, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.49209534868476e-05, |
|
"loss": 1.3138, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.481696219633481e-05, |
|
"loss": 1.4097, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.47119752330873e-05, |
|
"loss": 1.1332, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.460599492954018e-05, |
|
"loss": 1.1116, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.449902364019708e-05, |
|
"loss": 1.3985, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.439106374157773e-05, |
|
"loss": 1.0012, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.428211763216526e-05, |
|
"loss": 0.9964, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.417218773235286e-05, |
|
"loss": 1.339, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.406127648439008e-05, |
|
"loss": 1.0616, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.394938635232844e-05, |
|
"loss": 1.2071, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.383651982196682e-05, |
|
"loss": 1.289, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.372267940079617e-05, |
|
"loss": 1.0109, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.36078676179438e-05, |
|
"loss": 1.1774, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.349208702411722e-05, |
|
"loss": 0.9881, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.337534019154745e-05, |
|
"loss": 1.1558, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.32576297139319e-05, |
|
"loss": 1.468, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.313895820637669e-05, |
|
"loss": 1.1095, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.301932830533862e-05, |
|
"loss": 1.0915, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.289874266856655e-05, |
|
"loss": 1.1427, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.277720397504243e-05, |
|
"loss": 1.2019, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.265471492492163e-05, |
|
"loss": 1.3101, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.253127823947311e-05, |
|
"loss": 1.1997, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.240689666101892e-05, |
|
"loss": 1.1938, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.228157295287317e-05, |
|
"loss": 1.2199, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.215530989928081e-05, |
|
"loss": 0.9386, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.202811030535565e-05, |
|
"loss": 1.2752, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.189997699701807e-05, |
|
"loss": 1.2073, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.177091282093231e-05, |
|
"loss": 1.3951, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.164092064444306e-05, |
|
"loss": 1.1749, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.151000335551194e-05, |
|
"loss": 1.0863, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.137816386265327e-05, |
|
"loss": 1.1223, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.124540509486939e-05, |
|
"loss": 0.948, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.111173000158566e-05, |
|
"loss": 1.1039, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.097714155258497e-05, |
|
"loss": 1.1733, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.084164273794163e-05, |
|
"loss": 1.3096, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.070523656795508e-05, |
|
"loss": 0.995, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.056792607308293e-05, |
|
"loss": 1.1716, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.042971430387366e-05, |
|
"loss": 1.1152, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.029060433089885e-05, |
|
"loss": 1.2403, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.015059924468494e-05, |
|
"loss": 1.3455, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.000970215564462e-05, |
|
"loss": 1.1923, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 8.986791619400768e-05, |
|
"loss": 1.1189, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 8.972524450975143e-05, |
|
"loss": 1.166, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 8.958169027253087e-05, |
|
"loss": 1.1154, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 8.94372566716081e-05, |
|
"loss": 1.0369, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 8.929194691578158e-05, |
|
"loss": 1.1153, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 8.914576423331475e-05, |
|
"loss": 1.0748, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 8.89987118718644e-05, |
|
"loss": 1.2589, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 8.885079309840848e-05, |
|
"loss": 0.9202, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 8.870201119917354e-05, |
|
"loss": 1.3762, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 8.855236947956161e-05, |
|
"loss": 1.0431, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 8.840187126407695e-05, |
|
"loss": 1.2946, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 8.825051989625206e-05, |
|
"loss": 1.1731, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 8.809831873857344e-05, |
|
"loss": 1.0521, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 8.794527117240688e-05, |
|
"loss": 1.2539, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 8.779138059792232e-05, |
|
"loss": 1.1325, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 8.76366504340184e-05, |
|
"loss": 0.9917, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 8.748108411824636e-05, |
|
"loss": 1.0688, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 8.732468510673379e-05, |
|
"loss": 0.882, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 8.716745687410779e-05, |
|
"loss": 1.2479, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.700940291341782e-05, |
|
"loss": 1.0746, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 8.685052673605798e-05, |
|
"loss": 1.0906, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 8.66908318716892e-05, |
|
"loss": 1.0399, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 8.653032186816062e-05, |
|
"loss": 1.1061, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 8.636900029143091e-05, |
|
"loss": 1.2413, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.620687072548899e-05, |
|
"loss": 0.9968, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.604393677227436e-05, |
|
"loss": 1.0685, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 8.588020205159724e-05, |
|
"loss": 1.0171, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8.57156702010579e-05, |
|
"loss": 0.9696, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8.555034487596612e-05, |
|
"loss": 1.1729, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.538422974925975e-05, |
|
"loss": 1.1588, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.521732851142327e-05, |
|
"loss": 1.2008, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.50496448704057e-05, |
|
"loss": 1.0552, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.488118255153831e-05, |
|
"loss": 1.1742, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.471194529745176e-05, |
|
"loss": 1.2478, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 8.454193686799306e-05, |
|
"loss": 1.0792, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 8.437116104014187e-05, |
|
"loss": 0.9578, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 8.419962160792683e-05, |
|
"loss": 1.3535, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 8.402732238234105e-05, |
|
"loss": 1.0785, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 8.385426719125759e-05, |
|
"loss": 1.0145, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 8.368045987934432e-05, |
|
"loss": 1.1375, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 8.350590430797858e-05, |
|
"loss": 0.976, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 8.333060435516134e-05, |
|
"loss": 0.9385, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 8.315456391543106e-05, |
|
"loss": 1.1096, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 8.297778689977719e-05, |
|
"loss": 0.9635, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.280027723555327e-05, |
|
"loss": 1.0106, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.262203886638964e-05, |
|
"loss": 1.2063, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.24430757521059e-05, |
|
"loss": 1.2897, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 8.226339186862286e-05, |
|
"loss": 1.1298, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.208299120787429e-05, |
|
"loss": 1.1559, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.190187777771812e-05, |
|
"loss": 1.0587, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.172005560184755e-05, |
|
"loss": 1.0188, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.153752871970151e-05, |
|
"loss": 1.1215, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.135430118637504e-05, |
|
"loss": 0.9491, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.117037707252911e-05, |
|
"loss": 0.9292, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.098576046430023e-05, |
|
"loss": 1.1974, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.080045546320963e-05, |
|
"loss": 1.0007, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.061446618607222e-05, |
|
"loss": 1.1207, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.042779676490506e-05, |
|
"loss": 0.8835, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.024045134683559e-05, |
|
"loss": 0.8961, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.005243409400945e-05, |
|
"loss": 0.9905, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 7.986374918349804e-05, |
|
"loss": 1.0613, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.967440080720581e-05, |
|
"loss": 1.0781, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.948439317177696e-05, |
|
"loss": 0.7686, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 7.929373049850214e-05, |
|
"loss": 0.941, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 7.910241702322452e-05, |
|
"loss": 1.0858, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.891045699624586e-05, |
|
"loss": 1.0956, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.87178546822319e-05, |
|
"loss": 0.9023, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.852461436011777e-05, |
|
"loss": 1.1251, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.833074032301279e-05, |
|
"loss": 1.0181, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.813623687810528e-05, |
|
"loss": 1.0848, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.79411083465666e-05, |
|
"loss": 1.0787, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.774535906345541e-05, |
|
"loss": 1.0356, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.754899337762122e-05, |
|
"loss": 1.1425, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.73520156516078e-05, |
|
"loss": 1.1107, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.715443026155628e-05, |
|
"loss": 1.1205, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.695624159710788e-05, |
|
"loss": 1.0453, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.675745406130647e-05, |
|
"loss": 0.8335, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.655807207050066e-05, |
|
"loss": 1.0367, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.635810005424575e-05, |
|
"loss": 0.9137, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.615754245520526e-05, |
|
"loss": 0.9815, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.595640372905235e-05, |
|
"loss": 1.1748, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.575468834437065e-05, |
|
"loss": 0.8648, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.555240078255512e-05, |
|
"loss": 1.0501, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.53495455377125e-05, |
|
"loss": 1.0069, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.514612711656132e-05, |
|
"loss": 1.0022, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.494215003833197e-05, |
|
"loss": 1.2586, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.473761883466614e-05, |
|
"loss": 1.1238, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.453253804951623e-05, |
|
"loss": 0.9192, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.432691223904439e-05, |
|
"loss": 1.0445, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.412074597152125e-05, |
|
"loss": 1.0032, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.39140438272245e-05, |
|
"loss": 1.147, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.370681039833706e-05, |
|
"loss": 1.1687, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.349905028884516e-05, |
|
"loss": 1.1281, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.329076811443589e-05, |
|
"loss": 1.0926, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.308196850239487e-05, |
|
"loss": 1.0286, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.287265609150325e-05, |
|
"loss": 1.1251, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.266283553193475e-05, |
|
"loss": 1.0484, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.245251148515237e-05, |
|
"loss": 0.9548, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.224168862380482e-05, |
|
"loss": 1.0533, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.203037163162258e-05, |
|
"loss": 1.1724, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 7.181856520331407e-05, |
|
"loss": 0.8868, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.160627404446115e-05, |
|
"loss": 1.1498, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 7.13935028714147e-05, |
|
"loss": 1.2512, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 7.118025641118981e-05, |
|
"loss": 0.9648, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 7.096653940136069e-05, |
|
"loss": 0.8932, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 7.075235658995558e-05, |
|
"loss": 1.1335, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 7.053771273535108e-05, |
|
"loss": 0.8604, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 7.032261260616654e-05, |
|
"loss": 0.858, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 7.010706098115812e-05, |
|
"loss": 1.152, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.989106264911262e-05, |
|
"loss": 0.8461, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.967462240874104e-05, |
|
"loss": 1.0286, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 6.9457745068572e-05, |
|
"loss": 1.1287, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 6.924043544684494e-05, |
|
"loss": 0.828, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 6.902269837140304e-05, |
|
"loss": 0.9707, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 6.880453867958599e-05, |
|
"loss": 0.8406, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.858596121812247e-05, |
|
"loss": 0.9251, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.836697084302253e-05, |
|
"loss": 1.1901, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 6.814757241946969e-05, |
|
"loss": 0.9653, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 6.792777082171286e-05, |
|
"loss": 0.9213, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 6.770757093295798e-05, |
|
"loss": 0.8884, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 6.74869776452597e-05, |
|
"loss": 1.0057, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 6.726599585941247e-05, |
|
"loss": 1.0599, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 6.704463048484184e-05, |
|
"loss": 1.0271, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 6.682288643949534e-05, |
|
"loss": 0.9883, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 6.66007686497332e-05, |
|
"loss": 1.0209, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 6.637828205021891e-05, |
|
"loss": 0.8159, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 6.615543158380956e-05, |
|
"loss": 1.0447, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 6.593222220144617e-05, |
|
"loss": 1.0394, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 6.570865886204347e-05, |
|
"loss": 1.146, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 6.54847465323799e-05, |
|
"loss": 0.9289, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 6.526049018698721e-05, |
|
"loss": 0.8888, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 6.503589480803998e-05, |
|
"loss": 0.9346, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 6.481096538524487e-05, |
|
"loss": 0.7988, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 6.458570691572979e-05, |
|
"loss": 0.9219, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 6.436012440393294e-05, |
|
"loss": 0.9391, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 6.413422286149154e-05, |
|
"loss": 1.0681, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 6.390800730713055e-05, |
|
"loss": 0.7624, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 6.368148276655113e-05, |
|
"loss": 0.9216, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 6.345465427231904e-05, |
|
"loss": 0.8041, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 6.32275268637528e-05, |
|
"loss": 1.0059, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 6.300010558681171e-05, |
|
"loss": 1.0607, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 6.27723954939838e-05, |
|
"loss": 0.8981, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.254440164417356e-05, |
|
"loss": 0.9132, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.231612910258952e-05, |
|
"loss": 0.9141, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.208758294063176e-05, |
|
"loss": 0.8164, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 6.185876823577923e-05, |
|
"loss": 0.8132, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.162969007147697e-05, |
|
"loss": 0.8809, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.14003535370231e-05, |
|
"loss": 0.8769, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.117076372745579e-05, |
|
"loss": 1.0372, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.094092574344017e-05, |
|
"loss": 0.7325, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.0710844691154824e-05, |
|
"loss": 1.1814, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 6.0480525682178455e-05, |
|
"loss": 0.8201, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 6.024997383337637e-05, |
|
"loss": 1.0517, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 6.0019194266786715e-05, |
|
"loss": 0.8776, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.97881921095067e-05, |
|
"loss": 0.8195, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.955697249357869e-05, |
|
"loss": 0.9821, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.932554055587624e-05, |
|
"loss": 0.9625, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.909390143798995e-05, |
|
"loss": 0.7567, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.8862060286113165e-05, |
|
"loss": 0.8004, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 5.8630022250927716e-05, |
|
"loss": 0.5626, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 5.839779248748954e-05, |
|
"loss": 1.015, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 5.816537615511404e-05, |
|
"loss": 0.8443, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 5.79327784172615e-05, |
|
"loss": 0.8472, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 5.7700004441422395e-05, |
|
"loss": 0.7691, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 5.746705939900262e-05, |
|
"loss": 0.8565, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 5.723394846520852e-05, |
|
"loss": 1.0142, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 5.7000676818931944e-05, |
|
"loss": 0.7965, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 5.676724964263521e-05, |
|
"loss": 0.8013, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 5.653367212223599e-05, |
|
"loss": 0.8015, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 5.6299949446992026e-05, |
|
"loss": 0.742, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 5.6066086809385873e-05, |
|
"loss": 0.9467, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 5.583208940500959e-05, |
|
"loss": 0.8903, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 5.559796243244926e-05, |
|
"loss": 0.9451, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 5.536371109316947e-05, |
|
"loss": 0.8239, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 5.5129340591397804e-05, |
|
"loss": 0.9108, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 5.489485613400929e-05, |
|
"loss": 0.9606, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 5.466026293041053e-05, |
|
"loss": 0.8744, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 5.442556619242414e-05, |
|
"loss": 0.7085, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 5.419077113417288e-05, |
|
"loss": 1.0677, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 5.3955882971963856e-05, |
|
"loss": 0.8227, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 5.3720906924172596e-05, |
|
"loss": 0.7129, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 5.348584821112711e-05, |
|
"loss": 0.8936, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 5.325071205499198e-05, |
|
"loss": 0.7626, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 5.301550367965228e-05, |
|
"loss": 0.7006, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 5.278022831059751e-05, |
|
"loss": 0.8868, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 5.254489117480556e-05, |
|
"loss": 0.7274, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 5.230949750062655e-05, |
|
"loss": 0.7568, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 5.2074052517666685e-05, |
|
"loss": 0.8734, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 5.183856145667204e-05, |
|
"loss": 1.0261, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 5.160302954941241e-05, |
|
"loss": 0.9876, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 5.136746202856506e-05, |
|
"loss": 0.9113, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 5.113186412759842e-05, |
|
"loss": 0.8247, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 5.089624108065587e-05, |
|
"loss": 0.7352, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 5.066059812243946e-05, |
|
"loss": 0.8319, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 5.042494048809361e-05, |
|
"loss": 0.7573, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.018927341308874e-05, |
|
"loss": 0.729, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.9953602133105056e-05, |
|
"loss": 0.9483, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.971793188391615e-05, |
|
"loss": 0.6948, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.948226790127273e-05, |
|
"loss": 0.7836, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.9246615420786315e-05, |
|
"loss": 0.647, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.901097967781281e-05, |
|
"loss": 0.6558, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.8775365907336345e-05, |
|
"loss": 0.7926, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.8539779343852884e-05, |
|
"loss": 0.7926, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.8304225221253895e-05, |
|
"loss": 0.818, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.80687087727102e-05, |
|
"loss": 0.5522, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.783323523055559e-05, |
|
"loss": 0.7327, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.7597809826170646e-05, |
|
"loss": 0.8745, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.736243778986651e-05, |
|
"loss": 0.8599, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.7127124350768646e-05, |
|
"loss": 0.6869, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.689187473670073e-05, |
|
"loss": 0.8579, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.665669417406847e-05, |
|
"loss": 0.7942, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.642158788774345e-05, |
|
"loss": 0.8683, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.618656110094717e-05, |
|
"loss": 0.8248, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.5951619035134914e-05, |
|
"loss": 0.8052, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.571676690987972e-05, |
|
"loss": 0.7724, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.548200994275652e-05, |
|
"loss": 0.8052, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.524735334922615e-05, |
|
"loss": 0.8825, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.501280234251952e-05, |
|
"loss": 0.8237, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.477836213352178e-05, |
|
"loss": 0.5574, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.4544037930656526e-05, |
|
"loss": 0.7633, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.430983493977012e-05, |
|
"loss": 0.6846, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.407575836401605e-05, |
|
"loss": 0.7732, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.384181340373927e-05, |
|
"loss": 0.8842, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.360800525636076e-05, |
|
"loss": 0.618, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.337433911626198e-05, |
|
"loss": 0.7508, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.314082017466947e-05, |
|
"loss": 0.6947, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.2907453619539596e-05, |
|
"loss": 0.7075, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.267424463544318e-05, |
|
"loss": 0.9874, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.244119840345045e-05, |
|
"loss": 0.8875, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.2208320101015816e-05, |
|
"loss": 0.6921, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.197561490186291e-05, |
|
"loss": 0.8342, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.174308797586961e-05, |
|
"loss": 0.7022, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.151074448895324e-05, |
|
"loss": 0.8656, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.1278589602955706e-05, |
|
"loss": 0.9449, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.1046628475528936e-05, |
|
"loss": 0.9049, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.081486626002021e-05, |
|
"loss": 0.8814, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.058330810535766e-05, |
|
"loss": 0.7045, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.035195915593598e-05, |
|
"loss": 0.8558, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.012082455150201e-05, |
|
"loss": 0.772, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.988990942704066e-05, |
|
"loss": 0.7281, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.9659218912660766e-05, |
|
"loss": 0.7919, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.9428758133481106e-05, |
|
"loss": 0.9592, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.919853220951658e-05, |
|
"loss": 0.6567, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.896854625556448e-05, |
|
"loss": 0.9045, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.873880538109073e-05, |
|
"loss": 0.9526, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.850931469011656e-05, |
|
"loss": 0.6942, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.8280079281104996e-05, |
|
"loss": 0.604, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.805110424684754e-05, |
|
"loss": 0.7876, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.782239467435119e-05, |
|
"loss": 0.6762, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.7593955644725263e-05, |
|
"loss": 0.6553, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.736579223306863e-05, |
|
"loss": 0.9052, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.713790950835691e-05, |
|
"loss": 0.6197, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.691031253332981e-05, |
|
"loss": 0.8027, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.6683006364378745e-05, |
|
"loss": 0.8929, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.64559960514345e-05, |
|
"loss": 0.5936, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.6229286637854865e-05, |
|
"loss": 0.6819, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.6002883160312865e-05, |
|
"loss": 0.6546, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.577679064868466e-05, |
|
"loss": 0.6387, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.555101412593781e-05, |
|
"loss": 0.8402, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.532555860801982e-05, |
|
"loss": 0.7592, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.5100429103746547e-05, |
|
"loss": 0.7132, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.487563061469102e-05, |
|
"loss": 0.6006, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.465116813507232e-05, |
|
"loss": 0.758, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.4427046651644523e-05, |
|
"loss": 0.7745, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.420327114358606e-05, |
|
"loss": 0.8207, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.397984658238902e-05, |
|
"loss": 0.7432, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.375677793174862e-05, |
|
"loss": 0.7818, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.3534070147453135e-05, |
|
"loss": 0.6546, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.331172817727363e-05, |
|
"loss": 0.8025, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.308975696085405e-05, |
|
"loss": 0.854, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.2868161429601584e-05, |
|
"loss": 0.8708, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.264694650657698e-05, |
|
"loss": 0.6904, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.242611710638526e-05, |
|
"loss": 0.6629, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.220567813506653e-05, |
|
"loss": 0.7047, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.198563448998691e-05, |
|
"loss": 0.6103, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.176599105972982e-05, |
|
"loss": 0.7088, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.154675272398734e-05, |
|
"loss": 0.692, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.132792435345173e-05, |
|
"loss": 0.7956, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.1109510809707395e-05, |
|
"loss": 0.5122, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.0891516945122735e-05, |
|
"loss": 0.6636, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.06739476027423e-05, |
|
"loss": 0.5402, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.0456807616179385e-05, |
|
"loss": 0.7909, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.0240101809508447e-05, |
|
"loss": 0.7802, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.0023834997158084e-05, |
|
"loss": 0.6254, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.9808011983803974e-05, |
|
"loss": 0.7085, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.9592637564262155e-05, |
|
"loss": 0.6821, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 2.9377716523382548e-05, |
|
"loss": 0.5261, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 2.916325363594261e-05, |
|
"loss": 0.6004, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 2.894925366654125e-05, |
|
"loss": 0.7009, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 2.8735721369492998e-05, |
|
"loss": 0.6891, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.8522661488722402e-05, |
|
"loss": 0.7982, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.8310078757658552e-05, |
|
"loss": 0.5411, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.809797789913004e-05, |
|
"loss": 0.9861, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 2.788636362525995e-05, |
|
"loss": 0.6231, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 2.7675240637361127e-05, |
|
"loss": 0.8455, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.7464613625831947e-05, |
|
"loss": 0.6272, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.725448727005181e-05, |
|
"loss": 0.6168, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 2.704486623827742e-05, |
|
"loss": 0.7557, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 2.683575518753899e-05, |
|
"loss": 0.8038, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 2.662715876353672e-05, |
|
"loss": 0.5731, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.6419081600537676e-05, |
|
"loss": 0.5925, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 2.6211528321272815e-05, |
|
"loss": 0.3563, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 2.600450353683426e-05, |
|
"loss": 0.8047, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.5798011846572845e-05, |
|
"loss": 0.6462, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.5592057837996008e-05, |
|
"loss": 0.6358, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.5386646086665756e-05, |
|
"loss": 0.5355, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.518178115609714e-05, |
|
"loss": 0.6574, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.4977467597656733e-05, |
|
"loss": 0.8254, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.4773709950461665e-05, |
|
"loss": 0.6084, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.4570512741278672e-05, |
|
"loss": 0.5879, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.436788048442356e-05, |
|
"loss": 0.5913, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.4165817681660923e-05, |
|
"loss": 0.5537, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.3964328822104127e-05, |
|
"loss": 0.7386, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.3763418382115522e-05, |
|
"loss": 0.6689, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.356309082520709e-05, |
|
"loss": 0.72, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.3363350601941254e-05, |
|
"loss": 0.6096, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.3164202149831875e-05, |
|
"loss": 0.6886, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.2965649893245943e-05, |
|
"loss": 0.75, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.276769824330496e-05, |
|
"loss": 0.7009, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.2570351597787166e-05, |
|
"loss": 0.527, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.2373614341029776e-05, |
|
"loss": 0.8298, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.217749084383151e-05, |
|
"loss": 0.6151, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.1981985463355587e-05, |
|
"loss": 0.4804, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.178710254303288e-05, |
|
"loss": 0.6926, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.1592846412465408e-05, |
|
"loss": 0.5709, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.139922138733016e-05, |
|
"loss": 0.4891, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.1206231769283253e-05, |
|
"loss": 0.7093, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 2.101388184586426e-05, |
|
"loss": 0.5319, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.0822175890401102e-05, |
|
"loss": 0.5793, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.0631118161914965e-05, |
|
"loss": 0.6307, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.0440712905025788e-05, |
|
"loss": 0.7734, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 2.025096434985793e-05, |
|
"loss": 0.7496, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 2.006187671194616e-05, |
|
"loss": 0.7035, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.9873454192142038e-05, |
|
"loss": 0.6277, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.9685700976520605e-05, |
|
"loss": 0.504, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.9498621236287275e-05, |
|
"loss": 0.6165, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.931221912768532e-05, |
|
"loss": 0.5932, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.9126498791903464e-05, |
|
"loss": 0.5616, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.894146435498378e-05, |
|
"loss": 0.7417, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.8757119927730266e-05, |
|
"loss": 0.4589, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.8573469605617234e-05, |
|
"loss": 0.5317, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.839051746869854e-05, |
|
"loss": 0.4569, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.8208267581516853e-05, |
|
"loss": 0.4805, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.8026723993013317e-05, |
|
"loss": 0.6394, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.7845890736437686e-05, |
|
"loss": 0.5725, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.766577182925867e-05, |
|
"loss": 0.6262, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.748637127307468e-05, |
|
"loss": 0.4061, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.7307693053524948e-05, |
|
"loss": 0.5609, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.7129741140200966e-05, |
|
"loss": 0.6813, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.6952519486558255e-05, |
|
"loss": 0.6705, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.6776032029828652e-05, |
|
"loss": 0.5283, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.6600282690932685e-05, |
|
"loss": 0.647, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.642527537439259e-05, |
|
"loss": 0.6204, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.625101396824551e-05, |
|
"loss": 0.7142, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.6077502343957117e-05, |
|
"loss": 0.6346, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.5904744356335615e-05, |
|
"loss": 0.6174, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.5732743843446112e-05, |
|
"loss": 0.5119, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.5561504626525274e-05, |
|
"loss": 0.5757, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.539103050989656e-05, |
|
"loss": 0.6991, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.5221325280885606e-05, |
|
"loss": 0.6397, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.5052392709736101e-05, |
|
"loss": 0.3825, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.488423654952606e-05, |
|
"loss": 0.5771, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.4716860536084403e-05, |
|
"loss": 0.5234, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.4550268387907996e-05, |
|
"loss": 0.6009, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.4384463806079024e-05, |
|
"loss": 0.7015, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.4219450474182722e-05, |
|
"loss": 0.4498, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.4055232058225609e-05, |
|
"loss": 0.553, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.3891812206554023e-05, |
|
"loss": 0.503, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.3729194549773039e-05, |
|
"loss": 0.5165, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.3567382700665826e-05, |
|
"loss": 0.8116, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.3406380254113421e-05, |
|
"loss": 0.7193, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.3246190787014767e-05, |
|
"loss": 0.5503, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.3086817858207368e-05, |
|
"loss": 0.6821, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.2928265008388119e-05, |
|
"loss": 0.5011, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.2770535760034718e-05, |
|
"loss": 0.6798, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.2613633617327386e-05, |
|
"loss": 0.7772, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.245756206607101e-05, |
|
"loss": 0.7508, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.2302324573617674e-05, |
|
"loss": 0.7464, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.21479245887897e-05, |
|
"loss": 0.494, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.1994365541802938e-05, |
|
"loss": 0.6598, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.1841650844190639e-05, |
|
"loss": 0.6079, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.1689783888727612e-05, |
|
"loss": 0.5854, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.1538768049354886e-05, |
|
"loss": 0.6023, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.1388606681104701e-05, |
|
"loss": 0.8083, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.1239303120026045e-05, |
|
"loss": 0.5125, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.1090860683110432e-05, |
|
"loss": 0.7413, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.094328266821833e-05, |
|
"loss": 0.7644, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.0796572354005836e-05, |
|
"loss": 0.5222, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.0650732999851793e-05, |
|
"loss": 0.4381, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.050576784578547e-05, |
|
"loss": 0.5834, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.0361680112414518e-05, |
|
"loss": 0.5592, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.0218473000853435e-05, |
|
"loss": 0.54, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.007614969265247e-05, |
|
"loss": 0.7499, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 9.93471334972687e-06, |
|
"loss": 0.5057, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 9.794167114286728e-06, |
|
"loss": 0.6646, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 9.654514108767122e-06, |
|
"loss": 0.7452, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.515757435758743e-06, |
|
"loss": 0.4576, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.377900177938981e-06, |
|
"loss": 0.5076, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 9.240945398003448e-06, |
|
"loss": 0.5335, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 9.104896138597884e-06, |
|
"loss": 0.4748, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 8.969755422250642e-06, |
|
"loss": 0.6551, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 8.835526251305487e-06, |
|
"loss": 0.6218, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 8.702211607854871e-06, |
|
"loss": 0.5859, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 8.569814453673747e-06, |
|
"loss": 0.4408, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 8.438337730153728e-06, |
|
"loss": 0.6113, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.307784358237736e-06, |
|
"loss": 0.6236, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 8.178157238355149e-06, |
|
"loss": 0.6943, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 8.049459250357284e-06, |
|
"loss": 0.6044, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 7.92169325345351e-06, |
|
"loss": 0.6424, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 7.794862086147686e-06, |
|
"loss": 0.5592, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 7.668968566175067e-06, |
|
"loss": 0.6441, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 7.544015490439743e-06, |
|
"loss": 0.7385, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 7.420005634952548e-06, |
|
"loss": 0.7177, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 7.296941754769243e-06, |
|
"loss": 0.5613, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 7.174826583929478e-06, |
|
"loss": 0.5203, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 7.053662835395908e-06, |
|
"loss": 0.5635, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 6.933453200994033e-06, |
|
"loss": 0.4943, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 6.814200351352323e-06, |
|
"loss": 0.5714, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 6.695906935842916e-06, |
|
"loss": 0.5431, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 6.5785755825227554e-06, |
|
"loss": 0.6152, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 6.462208898075201e-06, |
|
"loss": 0.3967, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 6.346809467752101e-06, |
|
"loss": 0.51, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 6.232379855316395e-06, |
|
"loss": 0.3982, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 6.118922602985133e-06, |
|
"loss": 0.6577, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 6.006440231372973e-06, |
|
"loss": 0.6096, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 5.894935239436239e-06, |
|
"loss": 0.4768, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 5.78441010441736e-06, |
|
"loss": 0.582, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 5.674867281789842e-06, |
|
"loss": 0.5339, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 5.566309205203735e-06, |
|
"loss": 0.3802, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 5.458738286431525e-06, |
|
"loss": 0.4993, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 5.352156915314594e-06, |
|
"loss": 0.5956, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 5.2465674597101275e-06, |
|
"loss": 0.5637, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 5.141972265438455e-06, |
|
"loss": 0.6436, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 5.0383736562310105e-06, |
|
"loss": 0.4381, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.935773933678645e-06, |
|
"loss": 0.8673, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.8341753771805275e-06, |
|
"loss": 0.5128, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.733580243893509e-06, |
|
"loss": 0.7217, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.633990768681928e-06, |
|
"loss": 0.5057, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.535409164068022e-06, |
|
"loss": 0.4874, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.437837620182745e-06, |
|
"loss": 0.5992, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.341278304717078e-06, |
|
"loss": 0.7009, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.2457333628739605e-06, |
|
"loss": 0.4728, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.1512049173205445e-06, |
|
"loss": 0.4825, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 4.057695068141043e-06, |
|
"loss": 0.264, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.9652058927901415e-06, |
|
"loss": 0.6728, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.873739446046787e-06, |
|
"loss": 0.5413, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.7832977599685236e-06, |
|
"loss": 0.5163, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.693882843846419e-06, |
|
"loss": 0.4204, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.605496684160348e-06, |
|
"loss": 0.5551, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.518141244534917e-06, |
|
"loss": 0.7123, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.4318184656958074e-06, |
|
"loss": 0.5059, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.3465302654266616e-06, |
|
"loss": 0.4831, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.262278538526492e-06, |
|
"loss": 0.4672, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.17906515676758e-06, |
|
"loss": 0.4423, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.0968919688538656e-06, |
|
"loss": 0.6304, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.0157608003799355e-06, |
|
"loss": 0.547, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.9356734537903997e-06, |
|
"loss": 0.6087, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.8566317083398907e-06, |
|
"loss": 0.5036, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.7786373200535175e-06, |
|
"loss": 0.5607, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.701692021687846e-06, |
|
"loss": 0.6446, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.625797522692425e-06, |
|
"loss": 0.6112, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.550955509171804e-06, |
|
"loss": 0.4442, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.477167643848033e-06, |
|
"loss": 0.7012, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.404435566023794e-06, |
|
"loss": 0.4942, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.3327608915459378e-06, |
|
"loss": 0.3784, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.262145212769562e-06, |
|
"loss": 0.5823, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.192590098522701e-06, |
|
"loss": 0.4819, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.1240970940714146e-06, |
|
"loss": 0.3844, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.056667721085481e-06, |
|
"loss": 0.622, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.9903034776045935e-06, |
|
"loss": 0.438, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.9250058380050674e-06, |
|
"loss": 0.5024, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.8607762529671059e-06, |
|
"loss": 0.5313, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.7976161494425493e-06, |
|
"loss": 0.6647, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.7355269306231602e-06, |
|
"loss": 0.6355, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.6745099759094996e-06, |
|
"loss": 0.6087, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.6145666408802496e-06, |
|
"loss": 0.5331, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.5556982572620783e-06, |
|
"loss": 0.419, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.49790613290009e-06, |
|
"loss": 0.5193, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.4411915517287543e-06, |
|
"loss": 0.5199, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.38555577374338e-06, |
|
"loss": 0.4913, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.3310000349721297e-06, |
|
"loss": 0.6488, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.2775255474485436e-06, |
|
"loss": 0.3613, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.225133499184633e-06, |
|
"loss": 0.4291, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.1738250541444728e-06, |
|
"loss": 0.3886, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.1236013522183341e-06, |
|
"loss": 0.4071, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.0744635091974098e-06, |
|
"loss": 0.5659, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.0264126167489508e-06, |
|
"loss": 0.4816, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 9.794497423920635e-07, |
|
"loss": 0.5466, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 9.335759294739899e-07, |
|
"loss": 0.3509, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 8.887921971469037e-07, |
|
"loss": 0.485, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 8.450995403452955e-07, |
|
"loss": 0.6001, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 8.024989297638452e-07, |
|
"loss": 0.5909, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 7.609913118358791e-07, |
|
"loss": 0.4665, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 7.205776087123362e-07, |
|
"loss": 0.5521, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 6.812587182412622e-07, |
|
"loss": 0.5523, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 6.430355139478871e-07, |
|
"loss": 0.6478, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 6.059088450152128e-07, |
|
"loss": 0.5499, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 5.698795362651443e-07, |
|
"loss": 0.5434, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 5.349483881401496e-07, |
|
"loss": 0.4236, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 5.011161766855177e-07, |
|
"loss": 0.4867, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.683836535320674e-07, |
|
"loss": 0.6196, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.3675154587949907e-07, |
|
"loss": 0.5592, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.0622055648018573e-07, |
|
"loss": 0.3231, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.7679136362360757e-07, |
|
"loss": 0.5, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.484646211212528e-07, |
|
"loss": 0.4595, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.212409582921128e-07, |
|
"loss": 0.5326, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.951209799486765e-07, |
|
"loss": 0.6091, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.7010526638351906e-07, |
|
"loss": 0.3778, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.4619437335640647e-07, |
|
"loss": 0.4767, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.233888320819111e-07, |
|
"loss": 0.4401, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.0168914921767644e-07, |
|
"loss": 0.4482, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.8109580685309324e-07, |
|
"loss": 0.7395, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.616092624986354e-07, |
|
"loss": 0.6509, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.4322994907568498e-07, |
|
"loss": 0.495, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.259582749068955e-07, |
|
"loss": 0.6137, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.0979462370714344e-07, |
|
"loss": 0.4285, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 9.473935457500194e-08, |
|
"loss": 0.5902, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 8.079280198473038e-08, |
|
"loss": 0.702, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 6.79552757789026e-08, |
|
"loss": 0.6918, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 5.622706116146237e-08, |
|
"loss": 0.6861, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.560841869142851e-08, |
|
"loss": 0.4192, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.609958427708282e-08, |
|
"loss": 0.5755, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.770076917075204e-08, |
|
"loss": 0.5318, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.0412159964083853e-08, |
|
"loss": 0.5324, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.4233918583939077e-08, |
|
"loss": 0.5296, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 9.166182288750103e-09, |
|
"loss": 0.7441, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 5.209063665506664e-09, |
|
"loss": 0.4587, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.362650627252272e-09, |
|
"loss": 0.671, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 6.270064111024709e-10, |
|
"loss": 0.6712, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.169576873711776e-12, |
|
"loss": 0.4572, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.881540062118273e-10, |
|
"loss": 0.3704, |
|
"step": 710 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 710, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 5.732084547059712e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|