|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.980744544287548, |
|
"eval_steps": 500, |
|
"global_step": 485, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.6114, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.5742, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6e-06, |
|
"loss": 2.4717, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.3307, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5579, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.999892908320647e-06, |
|
"loss": 2.1376, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.999571637870035e-06, |
|
"loss": 2.5183, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.999036202410324e-06, |
|
"loss": 2.6324, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.998286624877786e-06, |
|
"loss": 2.6522, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.997322937381829e-06, |
|
"loss": 2.8157, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.996145181203616e-06, |
|
"loss": 2.2681, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.994753406794303e-06, |
|
"loss": 2.4257, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.993147673772869e-06, |
|
"loss": 2.9611, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.99132805092358e-06, |
|
"loss": 2.6448, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.989294616193018e-06, |
|
"loss": 2.1739, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.98704745668676e-06, |
|
"loss": 2.3803, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.984586668665641e-06, |
|
"loss": 3.0124, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.981912357541628e-06, |
|
"loss": 1.8866, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.979024637873309e-06, |
|
"loss": 2.8247, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.975923633360985e-06, |
|
"loss": 2.3442, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.972609476841368e-06, |
|
"loss": 2.2966, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.96908231028189e-06, |
|
"loss": 2.5517, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.965342284774633e-06, |
|
"loss": 1.9759, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.961389560529835e-06, |
|
"loss": 2.2761, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.957224306869053e-06, |
|
"loss": 2.3277, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.952846702217886e-06, |
|
"loss": 2.5624, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.948256934098353e-06, |
|
"loss": 2.379, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.943455199120836e-06, |
|
"loss": 2.3016, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.938441702975689e-06, |
|
"loss": 2.2362, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.933216660424396e-06, |
|
"loss": 2.0744, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.92778029529039e-06, |
|
"loss": 2.6486, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.922132840449459e-06, |
|
"loss": 2.2122, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.916274537819774e-06, |
|
"loss": 2.3736, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.91020563835152e-06, |
|
"loss": 2.0414, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.903926402016153e-06, |
|
"loss": 2.222, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.897437097795257e-06, |
|
"loss": 2.6075, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 2.3274, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.883829406604363e-06, |
|
"loss": 2.3744, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.876711602542564e-06, |
|
"loss": 2.7945, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.869384896386669e-06, |
|
"loss": 2.3775, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.861849601988384e-06, |
|
"loss": 1.9743, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.854106042134642e-06, |
|
"loss": 2.6625, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.846154548533773e-06, |
|
"loss": 2.3661, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.8379954618013e-06, |
|
"loss": 2.2979, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.829629131445342e-06, |
|
"loss": 2.3176, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.821055915851647e-06, |
|
"loss": 2.2117, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.812276182268236e-06, |
|
"loss": 1.9806, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.803290306789676e-06, |
|
"loss": 2.2496, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.794098674340966e-06, |
|
"loss": 2.3943, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.784701678661045e-06, |
|
"loss": 2.0787, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.775099722285934e-06, |
|
"loss": 2.1421, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.765293216531486e-06, |
|
"loss": 2.4088, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 2.1875, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.745068245941071e-06, |
|
"loss": 2.0356, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.73465064747553e-06, |
|
"loss": 2.0821, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.72403023233439e-06, |
|
"loss": 1.8047, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.713207455460893e-06, |
|
"loss": 2.3556, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.702182780466775e-06, |
|
"loss": 2.2028, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.690956679612422e-06, |
|
"loss": 2.5292, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.67952963378663e-06, |
|
"loss": 2.0529, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.667902132486009e-06, |
|
"loss": 2.3667, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.656074673794018e-06, |
|
"loss": 2.2068, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.644047764359623e-06, |
|
"loss": 2.3572, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.63182191937559e-06, |
|
"loss": 1.9575, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 2.1685, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.606775526115963e-06, |
|
"loss": 2.2896, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.593956050744493e-06, |
|
"loss": 2.3626, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.58093978558568e-06, |
|
"loss": 2.0103, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 2.1916, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.55431912460588e-06, |
|
"loss": 2.2239, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.540715869125407e-06, |
|
"loss": 1.8908, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.526918104489777e-06, |
|
"loss": 2.2892, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.512926421749305e-06, |
|
"loss": 1.9166, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.498741420261109e-06, |
|
"loss": 2.0043, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.484363707663443e-06, |
|
"loss": 2.3196, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.469793899849663e-06, |
|
"loss": 2.2653, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.45503262094184e-06, |
|
"loss": 2.034, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.440080503264038e-06, |
|
"loss": 2.1738, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.42493818731521e-06, |
|
"loss": 2.1786, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.409606321741776e-06, |
|
"loss": 1.896, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.394085563309827e-06, |
|
"loss": 2.1103, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.378376576876999e-06, |
|
"loss": 1.8544, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.362480035363987e-06, |
|
"loss": 1.9015, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.34639661972572e-06, |
|
"loss": 2.3878, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 1.9042, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.31367192988896e-06, |
|
"loss": 2.297, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.297032057507264e-06, |
|
"loss": 2.1189, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.280208114573859e-06, |
|
"loss": 2.156, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.263200821770462e-06, |
|
"loss": 2.0641, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.246010907632894e-06, |
|
"loss": 2.3262, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.228639108519867e-06, |
|
"loss": 2.0385, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.211086168581433e-06, |
|
"loss": 2.0262, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.193352839727122e-06, |
|
"loss": 2.4263, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.175439881593716e-06, |
|
"loss": 1.9041, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.157348061512728e-06, |
|
"loss": 2.1657, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.139078154477512e-06, |
|
"loss": 2.2347, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.120630943110078e-06, |
|
"loss": 2.1755, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.102007217627568e-06, |
|
"loss": 2.0328, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.083207775808395e-06, |
|
"loss": 2.149, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.064233422958078e-06, |
|
"loss": 2.1247, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 1.9357, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.025763242814291e-06, |
|
"loss": 1.8592, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.006269063455305e-06, |
|
"loss": 2.0261, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.986603268863536e-06, |
|
"loss": 1.9487, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.966766701456177e-06, |
|
"loss": 2.1715, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.94676021096575e-06, |
|
"loss": 2.1337, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.926584654403725e-06, |
|
"loss": 2.3317, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.906240896023794e-06, |
|
"loss": 2.0257, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.885729807284855e-06, |
|
"loss": 2.0842, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.865052266813686e-06, |
|
"loss": 2.2196, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.844209160367298e-06, |
|
"loss": 2.0957, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.823201380795003e-06, |
|
"loss": 2.1453, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.802029828000157e-06, |
|
"loss": 2.1887, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.780695408901613e-06, |
|
"loss": 2.032, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.759199037394888e-06, |
|
"loss": 2.2083, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.737541634312985e-06, |
|
"loss": 2.3168, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.715724127386971e-06, |
|
"loss": 1.9182, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.693747451206231e-06, |
|
"loss": 1.8499, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.671612547178428e-06, |
|
"loss": 2.1667, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 8.649320363489178e-06, |
|
"loss": 1.9357, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 8.626871855061438e-06, |
|
"loss": 2.0429, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 8.604267983514595e-06, |
|
"loss": 1.8081, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 8.581509717123272e-06, |
|
"loss": 1.6956, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 8.558598030775857e-06, |
|
"loss": 2.2946, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 2.1193, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 8.51231833058426e-06, |
|
"loss": 2.327, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 8.488952299208402e-06, |
|
"loss": 1.911, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 8.465436812728181e-06, |
|
"loss": 2.2266, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 8.44177287846877e-06, |
|
"loss": 2.1731, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 8.417961510114357e-06, |
|
"loss": 2.2541, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 8.39400372766471e-06, |
|
"loss": 2.0563, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 8.36990055739149e-06, |
|
"loss": 1.8764, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 2.3466, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 8.32126218955641e-06, |
|
"loss": 2.2691, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 8.296729075500345e-06, |
|
"loss": 2.1656, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.272054740543053e-06, |
|
"loss": 2.2326, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 8.247240241650918e-06, |
|
"loss": 1.8526, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 8.222286641794488e-06, |
|
"loss": 1.6012, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 8.197195009902924e-06, |
|
"loss": 1.947, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.171966420818227e-06, |
|
"loss": 1.8734, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 8.146601955249187e-06, |
|
"loss": 1.756, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8.12110269972509e-06, |
|
"loss": 1.7878, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.095469746549172e-06, |
|
"loss": 2.0159, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.069704193751834e-06, |
|
"loss": 2.1754, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.043807145043604e-06, |
|
"loss": 2.0156, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.017779709767857e-06, |
|
"loss": 1.938, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.991623002853296e-06, |
|
"loss": 1.9589, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.965338144766186e-06, |
|
"loss": 2.0535, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 2.0857, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 7.912388484339012e-06, |
|
"loss": 2.2401, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 7.88572595018617e-06, |
|
"loss": 1.9925, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 7.858939801138061e-06, |
|
"loss": 2.2118, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 7.832031184624165e-06, |
|
"loss": 1.8856, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 7.80500125332005e-06, |
|
"loss": 1.8032, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 7.777851165098012e-06, |
|
"loss": 2.0556, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 7.750582082977468e-06, |
|
"loss": 2.1473, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 7.723195175075136e-06, |
|
"loss": 1.767, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 7.695691614555002e-06, |
|
"loss": 1.8623, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 7.66807257957806e-06, |
|
"loss": 1.9875, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 7.64033925325184e-06, |
|
"loss": 2.0664, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 7.612492823579744e-06, |
|
"loss": 1.9954, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 7.584534483410137e-06, |
|
"loss": 1.7128, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 7.55646543038526e-06, |
|
"loss": 2.4184, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 7.528286866889924e-06, |
|
"loss": 2.2349, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.9001, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.471606041430724e-06, |
|
"loss": 1.8973, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 7.443106207484776e-06, |
|
"loss": 2.0085, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.414501719000187e-06, |
|
"loss": 1.5986, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.3857938012980425e-06, |
|
"loss": 2.2827, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.3569836841299905e-06, |
|
"loss": 1.8377, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.328072601625558e-06, |
|
"loss": 1.9012, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.2990617922393e-06, |
|
"loss": 1.8906, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.269952498697734e-06, |
|
"loss": 2.0498, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.240745967946113e-06, |
|
"loss": 2.1313, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.211443451095007e-06, |
|
"loss": 1.8483, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.18204620336671e-06, |
|
"loss": 2.0565, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.1525554840414765e-06, |
|
"loss": 1.8597, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.1229725564035665e-06, |
|
"loss": 2.0648, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.093298687687141e-06, |
|
"loss": 2.0188, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.063535149021974e-06, |
|
"loss": 1.8016, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.033683215379002e-06, |
|
"loss": 2.2722, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.0037441655157045e-06, |
|
"loss": 2.044, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.973719281921336e-06, |
|
"loss": 2.0889, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.943609850761979e-06, |
|
"loss": 1.9273, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 1.8093, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.883142508466054e-06, |
|
"loss": 1.8979, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.852787187549182e-06, |
|
"loss": 1.5268, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.822352499395751e-06, |
|
"loss": 2.2526, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.7918397477265e-06, |
|
"loss": 2.0474, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.7612502396061685e-06, |
|
"loss": 2.0026, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.730585285387465e-06, |
|
"loss": 2.0033, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.6998461986549715e-06, |
|
"loss": 1.719, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.669034296168855e-06, |
|
"loss": 1.829, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.638150897808469e-06, |
|
"loss": 2.0211, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.607197326515808e-06, |
|
"loss": 2.052, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 6.57617490823885e-06, |
|
"loss": 1.9236, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 1.989, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 6.513928849212874e-06, |
|
"loss": 1.7597, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.482707874877855e-06, |
|
"loss": 2.0161, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 6.451423386272312e-06, |
|
"loss": 2.167, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 6.420076723519615e-06, |
|
"loss": 2.1043, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 6.388669229406462e-06, |
|
"loss": 1.7945, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.3572022493253715e-06, |
|
"loss": 1.8759, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 6.325677131217041e-06, |
|
"loss": 1.9408, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 6.294095225512604e-06, |
|
"loss": 1.9382, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 6.26245788507579e-06, |
|
"loss": 1.953, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 6.230766465144966e-06, |
|
"loss": 1.9079, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 6.199022323275083e-06, |
|
"loss": 2.141, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 6.1672268192795285e-06, |
|
"loss": 1.7766, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 6.135381315171867e-06, |
|
"loss": 1.8225, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 6.103487175107508e-06, |
|
"loss": 1.9764, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 6.071545765325254e-06, |
|
"loss": 1.815, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 6.039558454088796e-06, |
|
"loss": 2.0872, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 6.0075266116280865e-06, |
|
"loss": 2.0516, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 5.975451610080643e-06, |
|
"loss": 1.9711, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 5.943334823432777e-06, |
|
"loss": 2.0024, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 5.911177627460739e-06, |
|
"loss": 2.0042, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 5.878981399671774e-06, |
|
"loss": 1.7748, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 5.846747519245123e-06, |
|
"loss": 2.0583, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.814477366972945e-06, |
|
"loss": 2.1047, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.782172325201155e-06, |
|
"loss": 1.9893, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.749833777770225e-06, |
|
"loss": 1.8167, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 5.717463109955896e-06, |
|
"loss": 1.8661, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 5.6850617084098416e-06, |
|
"loss": 2.0974, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 5.65263096110026e-06, |
|
"loss": 1.8873, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 5.620172257252427e-06, |
|
"loss": 1.7506, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 5.587686987289189e-06, |
|
"loss": 1.9003, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.555176542771389e-06, |
|
"loss": 1.906, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 1.935, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.490085701647805e-06, |
|
"loss": 1.7954, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.457508093317013e-06, |
|
"loss": 1.9815, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.4249108868622095e-06, |
|
"loss": 1.6584, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 5.392295478639226e-06, |
|
"loss": 1.8019, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 5.3596632657835975e-06, |
|
"loss": 2.1204, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 5.327015646150716e-06, |
|
"loss": 2.1109, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 5.294354018255945e-06, |
|
"loss": 1.9626, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 5.2616797812147205e-06, |
|
"loss": 1.8431, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 5.228994334682605e-06, |
|
"loss": 1.818, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 5.1962990787953436e-06, |
|
"loss": 1.8086, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 5.1635954141088815e-06, |
|
"loss": 1.8096, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 5.130884741539367e-06, |
|
"loss": 1.9406, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 5.098168462303141e-06, |
|
"loss": 2.0109, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 5.065447977856723e-06, |
|
"loss": 1.8903, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 5.0327246898367595e-06, |
|
"loss": 1.9648, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 5e-06, |
|
"loss": 1.9892, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.967275310163241e-06, |
|
"loss": 2.0007, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.934552022143279e-06, |
|
"loss": 1.7414, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.90183153769686e-06, |
|
"loss": 1.701, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.869115258460636e-06, |
|
"loss": 2.3587, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.83640458589112e-06, |
|
"loss": 1.8817, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.803700921204659e-06, |
|
"loss": 1.9942, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.771005665317398e-06, |
|
"loss": 2.2835, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.738320218785281e-06, |
|
"loss": 1.4821, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.705645981744055e-06, |
|
"loss": 1.8867, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.672984353849285e-06, |
|
"loss": 2.2334, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.640336734216403e-06, |
|
"loss": 1.8587, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.6077045213607765e-06, |
|
"loss": 1.8544, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.575089113137792e-06, |
|
"loss": 2.0754, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.542491906682988e-06, |
|
"loss": 2.0149, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.509914298352197e-06, |
|
"loss": 1.9791, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 1.6395, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.4448234572286126e-06, |
|
"loss": 2.11, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.4123130127108125e-06, |
|
"loss": 2.041, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.379827742747575e-06, |
|
"loss": 2.2184, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.347369038899744e-06, |
|
"loss": 1.7426, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.314938291590161e-06, |
|
"loss": 1.9714, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.282536890044105e-06, |
|
"loss": 2.0392, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.250166222229775e-06, |
|
"loss": 2.2633, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.217827674798845e-06, |
|
"loss": 2.0275, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.185522633027057e-06, |
|
"loss": 1.8916, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.1532524807548776e-06, |
|
"loss": 1.7847, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.1210186003282275e-06, |
|
"loss": 1.9785, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.088822372539263e-06, |
|
"loss": 1.858, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.056665176567225e-06, |
|
"loss": 2.0106, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.02454838991936e-06, |
|
"loss": 1.9469, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.992473388371914e-06, |
|
"loss": 2.327, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.960441545911205e-06, |
|
"loss": 1.9751, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.928454234674748e-06, |
|
"loss": 1.7995, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.8965128248924956e-06, |
|
"loss": 1.853, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.864618684828135e-06, |
|
"loss": 2.0151, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.832773180720475e-06, |
|
"loss": 2.1437, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.800977676724919e-06, |
|
"loss": 2.1601, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.769233534855035e-06, |
|
"loss": 1.9736, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.7375421149242102e-06, |
|
"loss": 1.7847, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.705904774487396e-06, |
|
"loss": 2.1208, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.6743228687829596e-06, |
|
"loss": 2.009, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.6427977506746293e-06, |
|
"loss": 1.6825, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.6113307705935398e-06, |
|
"loss": 1.6349, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.579923276480387e-06, |
|
"loss": 2.0538, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5485766137276894e-06, |
|
"loss": 2.0987, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.517292125122146e-06, |
|
"loss": 1.9249, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.486071150787128e-06, |
|
"loss": 2.1965, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 1.8131, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.4238250917611533e-06, |
|
"loss": 1.7838, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.3928026734841935e-06, |
|
"loss": 1.7282, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.3618491021915334e-06, |
|
"loss": 1.9375, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.330965703831146e-06, |
|
"loss": 1.6664, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.3001538013450285e-06, |
|
"loss": 2.164, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.269414714612534e-06, |
|
"loss": 2.0269, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.2387497603938327e-06, |
|
"loss": 1.9625, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.2081602522734987e-06, |
|
"loss": 2.2313, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.177647500604252e-06, |
|
"loss": 2.1034, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.147212812450819e-06, |
|
"loss": 2.0628, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.1168574915339465e-06, |
|
"loss": 2.2198, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 2.3057, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.056390149238022e-06, |
|
"loss": 1.8151, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.0262807180786647e-06, |
|
"loss": 2.0671, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.9962558344842963e-06, |
|
"loss": 2.1226, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.966316784621e-06, |
|
"loss": 2.0534, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.936464850978027e-06, |
|
"loss": 2.1771, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 2.906701312312861e-06, |
|
"loss": 2.1316, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.8770274435964356e-06, |
|
"loss": 1.8236, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.8474445159585235e-06, |
|
"loss": 1.8514, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 2.817953796633289e-06, |
|
"loss": 2.1408, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 2.7885565489049948e-06, |
|
"loss": 1.7615, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.759254032053888e-06, |
|
"loss": 1.8991, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 2.7300475013022666e-06, |
|
"loss": 1.8208, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.700938207760701e-06, |
|
"loss": 1.6085, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 2.671927398374443e-06, |
|
"loss": 1.8248, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 2.6430163158700116e-06, |
|
"loss": 1.95, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 2.614206198701958e-06, |
|
"loss": 1.7459, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.5854982809998154e-06, |
|
"loss": 2.2413, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.5568937925152272e-06, |
|
"loss": 2.181, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 2.5283939585692787e-06, |
|
"loss": 1.6875, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 1.692, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.471713133110078e-06, |
|
"loss": 1.8894, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 2.4435345696147404e-06, |
|
"loss": 2.2015, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 2.4154655165898626e-06, |
|
"loss": 1.8859, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.387507176420256e-06, |
|
"loss": 2.0344, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 2.3596607467481602e-06, |
|
"loss": 1.8513, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.3319274204219427e-06, |
|
"loss": 1.9896, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.304308385444999e-06, |
|
"loss": 1.6932, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.2768048249248648e-06, |
|
"loss": 2.0101, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.2494179170225333e-06, |
|
"loss": 1.793, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.2221488349019903e-06, |
|
"loss": 1.8287, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.1949987466799524e-06, |
|
"loss": 2.2081, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.1679688153758373e-06, |
|
"loss": 1.9535, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.1410601988619394e-06, |
|
"loss": 1.9921, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.1142740498138327e-06, |
|
"loss": 2.0291, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.08761151566099e-06, |
|
"loss": 2.063, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 1.5788, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.034661855233815e-06, |
|
"loss": 2.2809, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.008376997146705e-06, |
|
"loss": 1.9602, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.982220290232143e-06, |
|
"loss": 1.507, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.956192854956397e-06, |
|
"loss": 1.5588, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.9302958062481673e-06, |
|
"loss": 1.9348, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.9045302534508298e-06, |
|
"loss": 1.8372, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.8788973002749112e-06, |
|
"loss": 1.672, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.8533980447508138e-06, |
|
"loss": 1.8803, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.8280335791817733e-06, |
|
"loss": 2.133, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.8028049900970768e-06, |
|
"loss": 1.7064, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.777713358205514e-06, |
|
"loss": 1.8624, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.7527597583490825e-06, |
|
"loss": 1.7757, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.7279452594569484e-06, |
|
"loss": 1.9208, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7032709244996559e-06, |
|
"loss": 1.9518, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.6787378104435931e-06, |
|
"loss": 1.6618, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 2.1538, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.6300994426085103e-06, |
|
"loss": 1.9981, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.6059962723352912e-06, |
|
"loss": 1.9644, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.5820384898856433e-06, |
|
"loss": 2.0848, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.5582271215312294e-06, |
|
"loss": 2.0623, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.5345631872718214e-06, |
|
"loss": 1.9082, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.5110477007916002e-06, |
|
"loss": 1.944, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.487681669415742e-06, |
|
"loss": 1.7845, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 2.0057, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.4414019692241437e-06, |
|
"loss": 2.2701, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.4184902828767288e-06, |
|
"loss": 1.8745, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.395732016485406e-06, |
|
"loss": 1.8174, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.373128144938563e-06, |
|
"loss": 1.9715, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.3506796365108232e-06, |
|
"loss": 1.8625, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.3283874528215735e-06, |
|
"loss": 2.1635, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.30625254879377e-06, |
|
"loss": 1.9431, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.2842758726130283e-06, |
|
"loss": 1.7817, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.2624583656870153e-06, |
|
"loss": 2.1267, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.2408009626051137e-06, |
|
"loss": 2.2342, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.2193045910983864e-06, |
|
"loss": 1.8949, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.1979701719998454e-06, |
|
"loss": 1.8478, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.1767986192049986e-06, |
|
"loss": 1.927, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.1557908396327028e-06, |
|
"loss": 1.7817, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.134947733186315e-06, |
|
"loss": 2.1291, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.1142701927151456e-06, |
|
"loss": 2.0033, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.0937591039762086e-06, |
|
"loss": 1.8841, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.0734153455962765e-06, |
|
"loss": 1.4088, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.0532397890342506e-06, |
|
"loss": 1.9065, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.0332332985438248e-06, |
|
"loss": 1.9917, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.013396731136465e-06, |
|
"loss": 1.7053, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 9.937309365446973e-07, |
|
"loss": 1.7789, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 9.742367571857092e-07, |
|
"loss": 1.9505, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 1.6999, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 9.357665770419244e-07, |
|
"loss": 1.8436, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 9.167922241916055e-07, |
|
"loss": 1.9749, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 8.979927823724321e-07, |
|
"loss": 2.0322, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 8.793690568899216e-07, |
|
"loss": 2.0703, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 8.609218455224893e-07, |
|
"loss": 1.9611, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 8.426519384872733e-07, |
|
"loss": 2.1701, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 8.245601184062851e-07, |
|
"loss": 2.0095, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 8.066471602728804e-07, |
|
"loss": 1.8908, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 7.88913831418568e-07, |
|
"loss": 2.1547, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 7.71360891480134e-07, |
|
"loss": 1.9623, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 7.539890923671061e-07, |
|
"loss": 1.8405, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 7.367991782295392e-07, |
|
"loss": 1.926, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 7.197918854261432e-07, |
|
"loss": 2.2612, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 7.029679424927366e-07, |
|
"loss": 2.0554, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 6.863280701110409e-07, |
|
"loss": 1.7727, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 1.8858, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 6.536033802742814e-07, |
|
"loss": 1.8505, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 6.375199646360142e-07, |
|
"loss": 2.0033, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 6.216234231230012e-07, |
|
"loss": 2.0853, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 6.059144366901737e-07, |
|
"loss": 1.684, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 5.903936782582253e-07, |
|
"loss": 2.0125, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 5.750618126847912e-07, |
|
"loss": 1.7071, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 5.599194967359639e-07, |
|
"loss": 2.0058, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 5.449673790581611e-07, |
|
"loss": 1.8514, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 5.302061001503395e-07, |
|
"loss": 1.7583, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 5.156362923365587e-07, |
|
"loss": 1.649, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 5.012585797388936e-07, |
|
"loss": 2.2002, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.87073578250698e-07, |
|
"loss": 2.2455, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.730818955102234e-07, |
|
"loss": 1.6698, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.5928413087459325e-07, |
|
"loss": 1.9811, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.456808753941205e-07, |
|
"loss": 1.9963, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 1.5793, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.1906021441432074e-07, |
|
"loss": 1.7237, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.0604394925550906e-07, |
|
"loss": 1.6936, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.9322447388403796e-07, |
|
"loss": 2.0876, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 1.8514, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.6817808062440953e-07, |
|
"loss": 2.3487, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.5595223564037884e-07, |
|
"loss": 1.8364, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.439253262059822e-07, |
|
"loss": 1.9163, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.320978675139919e-07, |
|
"loss": 1.9985, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.204703662133724e-07, |
|
"loss": 1.9434, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.0904332038757977e-07, |
|
"loss": 1.9705, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.9781721953322627e-07, |
|
"loss": 2.2868, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.867925445391079e-07, |
|
"loss": 1.8121, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.7596976766560977e-07, |
|
"loss": 1.6713, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.653493525244721e-07, |
|
"loss": 1.8284, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.5493175405893076e-07, |
|
"loss": 2.011, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 2.1148, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.3470678346851517e-07, |
|
"loss": 2.0046, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.2490027771406686e-07, |
|
"loss": 1.9366, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.152983213389559e-07, |
|
"loss": 2.1379, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.0590132565903475e-07, |
|
"loss": 1.9382, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.9670969321032407e-07, |
|
"loss": 1.8226, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.8772381773176417e-07, |
|
"loss": 2.2887, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.7894408414835362e-07, |
|
"loss": 1.7401, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.7037086855465902e-07, |
|
"loss": 1.713, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.6200453819870122e-07, |
|
"loss": 1.9873, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.5384545146622854e-07, |
|
"loss": 2.2339, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.4589395786535954e-07, |
|
"loss": 2.4256, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.3815039801161723e-07, |
|
"loss": 2.1304, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.3061510361333186e-07, |
|
"loss": 1.829, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.232883974574367e-07, |
|
"loss": 1.9966, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.1617059339563807e-07, |
|
"loss": 1.8668, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 1.7622, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.0256290220474308e-07, |
|
"loss": 1.9253, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 9.607359798384785e-08, |
|
"loss": 1.8139, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 8.979436164848088e-08, |
|
"loss": 1.9552, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 8.372546218022747e-08, |
|
"loss": 1.5488, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 7.786715955054202e-08, |
|
"loss": 2.1687, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 7.221970470961125e-08, |
|
"loss": 1.7, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 6.678333957560513e-08, |
|
"loss": 2.0648, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 6.15582970243117e-08, |
|
"loss": 1.8835, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 5.654480087916303e-08, |
|
"loss": 1.8804, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 5.174306590164879e-08, |
|
"loss": 2.2831, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 4.715329778211375e-08, |
|
"loss": 1.9674, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.2775693130948094e-08, |
|
"loss": 2.0009, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.861043947016474e-08, |
|
"loss": 1.6058, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 3.465771522536854e-08, |
|
"loss": 1.7313, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.09176897181096e-08, |
|
"loss": 1.8912, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.7390523158633552e-08, |
|
"loss": 2.0388, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.4076366639015914e-08, |
|
"loss": 1.8365, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.097536212669171e-08, |
|
"loss": 1.9795, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.8087642458373135e-08, |
|
"loss": 1.9468, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.541333133436018e-08, |
|
"loss": 1.7218, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.2952543313240474e-08, |
|
"loss": 1.3056, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.0705383806982606e-08, |
|
"loss": 1.6465, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 8.671949076420883e-09, |
|
"loss": 1.6287, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 6.852326227130835e-09, |
|
"loss": 1.9142, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 5.246593205699424e-09, |
|
"loss": 2.4245, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.854818796385495e-09, |
|
"loss": 1.9302, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.6770626181715776e-09, |
|
"loss": 1.5904, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.7133751222137007e-09, |
|
"loss": 1.9541, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 9.637975896759077e-10, |
|
"loss": 1.7176, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.283621299649987e-10, |
|
"loss": 1.9053, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.0709167935385456e-10, |
|
"loss": 2.0116, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 0.0, |
|
"loss": 1.9722, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"step": 485, |
|
"total_flos": 4.3609614855438336e+17, |
|
"train_loss": 2.0201566543775735, |
|
"train_runtime": 8786.7992, |
|
"train_samples_per_second": 0.443, |
|
"train_steps_per_second": 0.055 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 485, |
|
"num_train_epochs": 5, |
|
"save_steps": 1000, |
|
"total_flos": 4.3609614855438336e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|