|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 1730, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.890173410404624e-07, |
|
"loss": 3.8145, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.780346820809248e-07, |
|
"loss": 3.7852, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.670520231213873e-07, |
|
"loss": 3.8008, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1560693641618497e-06, |
|
"loss": 3.7793, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.4450867052023122e-06, |
|
"loss": 3.8008, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7341040462427746e-06, |
|
"loss": 3.7832, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.023121387283237e-06, |
|
"loss": 3.832, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.3121387283236993e-06, |
|
"loss": 3.7871, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.601156069364162e-06, |
|
"loss": 3.8164, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.8901734104046244e-06, |
|
"loss": 3.8027, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.1791907514450866e-06, |
|
"loss": 3.7695, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.468208092485549e-06, |
|
"loss": 3.8301, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.757225433526012e-06, |
|
"loss": 3.8301, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.046242774566474e-06, |
|
"loss": 3.8066, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.3352601156069365e-06, |
|
"loss": 3.7871, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.624277456647399e-06, |
|
"loss": 3.7988, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.913294797687862e-06, |
|
"loss": 3.8184, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.202312138728324e-06, |
|
"loss": 3.793, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.491329479768787e-06, |
|
"loss": 3.8008, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 5.780346820809249e-06, |
|
"loss": 3.7988, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.069364161849711e-06, |
|
"loss": 3.7754, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.358381502890173e-06, |
|
"loss": 3.7793, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.647398843930635e-06, |
|
"loss": 3.7871, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 6.936416184971098e-06, |
|
"loss": 3.7812, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.225433526011561e-06, |
|
"loss": 3.832, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.514450867052024e-06, |
|
"loss": 3.7852, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 7.803468208092486e-06, |
|
"loss": 3.8125, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.092485549132949e-06, |
|
"loss": 3.8398, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.38150289017341e-06, |
|
"loss": 3.791, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.670520231213873e-06, |
|
"loss": 3.7734, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.959537572254335e-06, |
|
"loss": 3.8105, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.248554913294797e-06, |
|
"loss": 3.8574, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.53757225433526e-06, |
|
"loss": 3.793, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.826589595375723e-06, |
|
"loss": 3.7852, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.0115606936416185e-05, |
|
"loss": 3.7988, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.0404624277456647e-05, |
|
"loss": 3.8301, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.0693641618497111e-05, |
|
"loss": 3.8105, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.0982658959537573e-05, |
|
"loss": 3.7656, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.1271676300578036e-05, |
|
"loss": 3.8262, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.1560693641618498e-05, |
|
"loss": 3.8164, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.184971098265896e-05, |
|
"loss": 3.7441, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.2138728323699422e-05, |
|
"loss": 3.6875, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.2427745664739884e-05, |
|
"loss": 3.6914, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.2716763005780346e-05, |
|
"loss": 3.6641, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.3005780346820809e-05, |
|
"loss": 3.8535, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.329479768786127e-05, |
|
"loss": 3.6719, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.3583815028901733e-05, |
|
"loss": 3.8555, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.3872832369942197e-05, |
|
"loss": 3.8047, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.416184971098266e-05, |
|
"loss": 3.7461, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.4450867052023123e-05, |
|
"loss": 3.8848, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.4739884393063585e-05, |
|
"loss": 3.707, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.5028901734104049e-05, |
|
"loss": 3.8203, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.531791907514451e-05, |
|
"loss": 3.6758, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.5606936416184973e-05, |
|
"loss": 3.8066, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5895953757225435e-05, |
|
"loss": 3.8438, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6184971098265897e-05, |
|
"loss": 3.8008, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.647398843930636e-05, |
|
"loss": 3.7988, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.676300578034682e-05, |
|
"loss": 3.6484, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7052023121387284e-05, |
|
"loss": 3.7012, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7341040462427746e-05, |
|
"loss": 3.6719, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7630057803468208e-05, |
|
"loss": 3.7695, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.791907514450867e-05, |
|
"loss": 3.7656, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8208092485549132e-05, |
|
"loss": 3.7539, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8497109826589594e-05, |
|
"loss": 3.7148, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8786127167630057e-05, |
|
"loss": 3.7559, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.907514450867052e-05, |
|
"loss": 3.748, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.936416184971098e-05, |
|
"loss": 3.832, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9653179190751446e-05, |
|
"loss": 3.8457, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.994219653179191e-05, |
|
"loss": 3.7754, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.023121387283237e-05, |
|
"loss": 3.6875, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.0520231213872833e-05, |
|
"loss": 3.7578, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.0809248554913295e-05, |
|
"loss": 3.6797, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.1098265895953757e-05, |
|
"loss": 3.6953, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.1387283236994223e-05, |
|
"loss": 3.6074, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.1676300578034685e-05, |
|
"loss": 3.7793, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.1965317919075147e-05, |
|
"loss": 3.8164, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.225433526011561e-05, |
|
"loss": 3.7832, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.254335260115607e-05, |
|
"loss": 3.7988, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2832369942196533e-05, |
|
"loss": 3.4785, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.3121387283236996e-05, |
|
"loss": 3.6484, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.3410404624277458e-05, |
|
"loss": 3.8516, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.369942196531792e-05, |
|
"loss": 3.8262, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.3988439306358382e-05, |
|
"loss": 3.7285, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.4277456647398844e-05, |
|
"loss": 3.7773, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.4566473988439306e-05, |
|
"loss": 3.5781, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.485549132947977e-05, |
|
"loss": 3.7598, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5144508670520234e-05, |
|
"loss": 3.9316, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5433526011560693e-05, |
|
"loss": 3.5234, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5722543352601158e-05, |
|
"loss": 3.6504, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.6011560693641617e-05, |
|
"loss": 3.7988, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.6300578034682083e-05, |
|
"loss": 3.6309, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.658959537572254e-05, |
|
"loss": 3.4141, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.6878612716763007e-05, |
|
"loss": 3.8125, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.7167630057803466e-05, |
|
"loss": 3.4297, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.745664739884393e-05, |
|
"loss": 4.0391, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.7745664739884393e-05, |
|
"loss": 3.7617, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.8034682080924855e-05, |
|
"loss": 3.6523, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.832369942196532e-05, |
|
"loss": 3.5996, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.861271676300578e-05, |
|
"loss": 3.7695, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.8901734104046245e-05, |
|
"loss": 3.6758, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.9190751445086707e-05, |
|
"loss": 3.6582, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.947976878612717e-05, |
|
"loss": 3.7969, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.9768786127167632e-05, |
|
"loss": 3.7559, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.0057803468208097e-05, |
|
"loss": 3.5879, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.0346820809248556e-05, |
|
"loss": 3.7832, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.063583815028902e-05, |
|
"loss": 3.6191, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.092485549132948e-05, |
|
"loss": 3.6992, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.1213872832369946e-05, |
|
"loss": 3.625, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.1502890173410405e-05, |
|
"loss": 3.7461, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.179190751445087e-05, |
|
"loss": 3.7793, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.208092485549133e-05, |
|
"loss": 3.8594, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.2369942196531794e-05, |
|
"loss": 3.7559, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.265895953757225e-05, |
|
"loss": 3.6875, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.294797687861272e-05, |
|
"loss": 3.791, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.323699421965318e-05, |
|
"loss": 3.6172, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.352601156069364e-05, |
|
"loss": 3.5801, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.381502890173411e-05, |
|
"loss": 3.6484, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.410404624277457e-05, |
|
"loss": 3.5664, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.439306358381503e-05, |
|
"loss": 3.5938, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.468208092485549e-05, |
|
"loss": 3.6289, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.497109826589596e-05, |
|
"loss": 3.8945, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.5260115606936416e-05, |
|
"loss": 3.7188, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.554913294797688e-05, |
|
"loss": 3.2852, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.583815028901734e-05, |
|
"loss": 3.7109, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.6127167630057806e-05, |
|
"loss": 3.791, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6416184971098265e-05, |
|
"loss": 3.4883, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.670520231213873e-05, |
|
"loss": 3.8301, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.699421965317919e-05, |
|
"loss": 3.6797, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7283236994219654e-05, |
|
"loss": 4.0547, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.757225433526011e-05, |
|
"loss": 3.5664, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.786127167630058e-05, |
|
"loss": 3.3965, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.815028901734104e-05, |
|
"loss": 3.5488, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.84393063583815e-05, |
|
"loss": 3.6367, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.872832369942196e-05, |
|
"loss": 3.5117, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.901734104046243e-05, |
|
"loss": 3.4551, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.930635838150289e-05, |
|
"loss": 3.7344, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.959537572254335e-05, |
|
"loss": 3.5859, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.988439306358382e-05, |
|
"loss": 3.4102, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.0173410404624276e-05, |
|
"loss": 3.9043, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.046242774566474e-05, |
|
"loss": 3.6445, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.07514450867052e-05, |
|
"loss": 3.3105, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1040462427745666e-05, |
|
"loss": 3.8223, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.132947976878613e-05, |
|
"loss": 3.7578, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.161849710982659e-05, |
|
"loss": 3.957, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.1907514450867055e-05, |
|
"loss": 3.8027, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.2196531791907514e-05, |
|
"loss": 3.4688, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.248554913294798e-05, |
|
"loss": 3.3691, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.2774566473988445e-05, |
|
"loss": 3.7266, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.3063583815028904e-05, |
|
"loss": 3.5117, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.335260115606937e-05, |
|
"loss": 3.7031, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.364161849710983e-05, |
|
"loss": 3.6777, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.3930635838150294e-05, |
|
"loss": 3.5352, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.421965317919075e-05, |
|
"loss": 3.6387, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.450867052023122e-05, |
|
"loss": 3.5117, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.4797687861271684e-05, |
|
"loss": 3.5586, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.508670520231214e-05, |
|
"loss": 3.6445, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.537572254335261e-05, |
|
"loss": 3.7578, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.566473988439307e-05, |
|
"loss": 3.4922, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.595375722543353e-05, |
|
"loss": 3.1094, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.624277456647399e-05, |
|
"loss": 3.3203, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.653179190751446e-05, |
|
"loss": 3.5625, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.6820809248554915e-05, |
|
"loss": 3.8477, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.710982658959538e-05, |
|
"loss": 3.4609, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.739884393063584e-05, |
|
"loss": 3.3984, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.7687861271676305e-05, |
|
"loss": 3.291, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.7976878612716764e-05, |
|
"loss": 3.4043, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.826589595375723e-05, |
|
"loss": 3.2461, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.855491329479769e-05, |
|
"loss": 3.3008, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.8843930635838154e-05, |
|
"loss": 3.2188, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.913294797687861e-05, |
|
"loss": 3.6191, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.942196531791908e-05, |
|
"loss": 3.3047, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.971098265895954e-05, |
|
"loss": 3.6602, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5e-05, |
|
"loss": 3.6875, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.028901734104047e-05, |
|
"loss": 3.7637, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.0578034682080933e-05, |
|
"loss": 3.3535, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.0867052023121385e-05, |
|
"loss": 3.3828, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.115606936416185e-05, |
|
"loss": 3.8594, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.1445086705202317e-05, |
|
"loss": 3.2793, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.173410404624278e-05, |
|
"loss": 2.8145, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.2023121387283234e-05, |
|
"loss": 3.5508, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.23121387283237e-05, |
|
"loss": 3.8457, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.2601156069364165e-05, |
|
"loss": 3.291, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.289017341040463e-05, |
|
"loss": 3.418, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.317919075144508e-05, |
|
"loss": 3.3281, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.346820809248555e-05, |
|
"loss": 3.1738, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.3757225433526014e-05, |
|
"loss": 3.7246, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.404624277456648e-05, |
|
"loss": 3.6602, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.433526011560693e-05, |
|
"loss": 3.5723, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.46242774566474e-05, |
|
"loss": 3.666, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.491329479768786e-05, |
|
"loss": 3.6445, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.520231213872833e-05, |
|
"loss": 3.2383, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.5491329479768787e-05, |
|
"loss": 3.6875, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.5780346820809245e-05, |
|
"loss": 3.7266, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.606936416184971e-05, |
|
"loss": 3.4688, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.6358381502890176e-05, |
|
"loss": 3.5273, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.664739884393064e-05, |
|
"loss": 3.7109, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.69364161849711e-05, |
|
"loss": 3.2871, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.722543352601156e-05, |
|
"loss": 3.4297, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 5.7514450867052025e-05, |
|
"loss": 3.3496, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.780346820809249e-05, |
|
"loss": 3.2344, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.809248554913295e-05, |
|
"loss": 2.9121, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.8381502890173415e-05, |
|
"loss": 3.9609, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.8670520231213874e-05, |
|
"loss": 3.6348, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.895953757225434e-05, |
|
"loss": 3.5859, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.92485549132948e-05, |
|
"loss": 2.9785, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.9537572254335263e-05, |
|
"loss": 3.4844, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.982658959537573e-05, |
|
"loss": 3.166, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.0115606936416195e-05, |
|
"loss": 3.3711, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.0404624277456646e-05, |
|
"loss": 2.8223, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.069364161849711e-05, |
|
"loss": 4.2969, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.098265895953758e-05, |
|
"loss": 3.0547, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.127167630057804e-05, |
|
"loss": 3.5566, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.156069364161851e-05, |
|
"loss": 3.8164, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.184971098265896e-05, |
|
"loss": 3.5566, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.213872832369943e-05, |
|
"loss": 2.6309, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.242774566473989e-05, |
|
"loss": 3.291, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.271676300578036e-05, |
|
"loss": 3.4062, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.300578034682081e-05, |
|
"loss": 3.7715, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.329479768786127e-05, |
|
"loss": 2.6523, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.358381502890174e-05, |
|
"loss": 3.3418, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.38728323699422e-05, |
|
"loss": 3.0469, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.416184971098266e-05, |
|
"loss": 3.5352, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 6.445086705202312e-05, |
|
"loss": 2.959, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 6.473988439306359e-05, |
|
"loss": 2.5117, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 6.502890173410405e-05, |
|
"loss": 2.832, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 6.53179190751445e-05, |
|
"loss": 2.9121, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 6.560693641618497e-05, |
|
"loss": 3.4961, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.589595375722544e-05, |
|
"loss": 3.3203, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.61849710982659e-05, |
|
"loss": 3.1758, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.647398843930635e-05, |
|
"loss": 3.0273, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.676300578034682e-05, |
|
"loss": 2.543, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.705202312138729e-05, |
|
"loss": 2.9844, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.734104046242775e-05, |
|
"loss": 3.0137, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.763005780346822e-05, |
|
"loss": 2.709, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.791907514450867e-05, |
|
"loss": 3.4805, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.820809248554913e-05, |
|
"loss": 3.8672, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.84971098265896e-05, |
|
"loss": 3.3477, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.878612716763007e-05, |
|
"loss": 3.0762, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.907514450867052e-05, |
|
"loss": 2.5781, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.936416184971098e-05, |
|
"loss": 3.6328, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.965317919075145e-05, |
|
"loss": 3.2188, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.994219653179191e-05, |
|
"loss": 3.6328, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 7.023121387283237e-05, |
|
"loss": 3.4766, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 7.052023121387283e-05, |
|
"loss": 3.373, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 7.08092485549133e-05, |
|
"loss": 3.0312, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 7.109826589595376e-05, |
|
"loss": 4.2695, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.138728323699421e-05, |
|
"loss": 3.0273, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.167630057803468e-05, |
|
"loss": 3.709, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 7.196531791907515e-05, |
|
"loss": 3.1328, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 7.225433526011561e-05, |
|
"loss": 3.3145, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 7.254335260115608e-05, |
|
"loss": 3.4922, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 7.283236994219653e-05, |
|
"loss": 3.0547, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 7.3121387283237e-05, |
|
"loss": 2.0879, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 7.341040462427746e-05, |
|
"loss": 3.2148, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 7.369942196531793e-05, |
|
"loss": 3.3164, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 7.398843930635838e-05, |
|
"loss": 3.291, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 7.427745664739884e-05, |
|
"loss": 3.2148, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 7.456647398843931e-05, |
|
"loss": 2.6348, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.485549132947977e-05, |
|
"loss": 3.3555, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.514450867052023e-05, |
|
"loss": 3.2227, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.543352601156069e-05, |
|
"loss": 3.1035, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.572254335260116e-05, |
|
"loss": 3.6172, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.601156069364162e-05, |
|
"loss": 3.207, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.630057803468207e-05, |
|
"loss": 3.0391, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.658959537572254e-05, |
|
"loss": 3.1328, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 7.6878612716763e-05, |
|
"loss": 2.3301, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 7.716763005780347e-05, |
|
"loss": 4.0, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 7.745664739884392e-05, |
|
"loss": 2.5586, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 7.774566473988439e-05, |
|
"loss": 2.7559, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 7.803468208092485e-05, |
|
"loss": 3.0293, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 7.832369942196532e-05, |
|
"loss": 3.459, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 7.861271676300579e-05, |
|
"loss": 2.1367, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 7.890173410404624e-05, |
|
"loss": 2.9531, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 7.91907514450867e-05, |
|
"loss": 3.207, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 7.947976878612717e-05, |
|
"loss": 2.8477, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 7.976878612716763e-05, |
|
"loss": 3.0352, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 8.005780346820809e-05, |
|
"loss": 3.2246, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.034682080924855e-05, |
|
"loss": 2.8008, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.063583815028902e-05, |
|
"loss": 3.0918, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.092485549132948e-05, |
|
"loss": 2.8828, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.121387283236995e-05, |
|
"loss": 3.0664, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.15028901734104e-05, |
|
"loss": 3.0762, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.179190751445087e-05, |
|
"loss": 2.3008, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.208092485549133e-05, |
|
"loss": 3.5859, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.23699421965318e-05, |
|
"loss": 2.4805, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.265895953757226e-05, |
|
"loss": 2.6543, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.294797687861271e-05, |
|
"loss": 2.9824, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.323699421965318e-05, |
|
"loss": 2.1426, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.352601156069365e-05, |
|
"loss": 3.3086, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.381502890173411e-05, |
|
"loss": 2.4414, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.410404624277458e-05, |
|
"loss": 2.3789, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.439306358381503e-05, |
|
"loss": 2.6992, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.46820809248555e-05, |
|
"loss": 1.8965, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.497109826589596e-05, |
|
"loss": 3.5527, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.526011560693643e-05, |
|
"loss": 2.875, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.554913294797689e-05, |
|
"loss": 2.1914, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.583815028901736e-05, |
|
"loss": 2.4492, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.612716763005781e-05, |
|
"loss": 2.7461, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.641618497109827e-05, |
|
"loss": 2.3281, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.670520231213874e-05, |
|
"loss": 2.8887, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.69942196531792e-05, |
|
"loss": 2.625, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.728323699421966e-05, |
|
"loss": 2.7031, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.757225433526012e-05, |
|
"loss": 1.4668, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.786127167630059e-05, |
|
"loss": 2.6074, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.815028901734105e-05, |
|
"loss": 2.1035, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.84393063583815e-05, |
|
"loss": 2.3281, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.872832369942197e-05, |
|
"loss": 3.4531, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.901734104046244e-05, |
|
"loss": 2.1699, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.93063583815029e-05, |
|
"loss": 2.5996, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.959537572254337e-05, |
|
"loss": 2.3086, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.988439306358382e-05, |
|
"loss": 3.748, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.017341040462428e-05, |
|
"loss": 2.9141, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 9.046242774566475e-05, |
|
"loss": 2.7031, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.075144508670522e-05, |
|
"loss": 2.082, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.104046242774567e-05, |
|
"loss": 3.7324, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.132947976878613e-05, |
|
"loss": 2.4766, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.16184971098266e-05, |
|
"loss": 2.7109, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.190751445086706e-05, |
|
"loss": 2.834, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.219653179190752e-05, |
|
"loss": 3.127, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 9.248554913294798e-05, |
|
"loss": 2.293, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.277456647398845e-05, |
|
"loss": 2.6465, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.306358381502891e-05, |
|
"loss": 2.166, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.335260115606937e-05, |
|
"loss": 1.9834, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.364161849710983e-05, |
|
"loss": 3.2656, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.39306358381503e-05, |
|
"loss": 2.5859, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.421965317919076e-05, |
|
"loss": 2.5938, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 9.450867052023123e-05, |
|
"loss": 2.2461, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.479768786127168e-05, |
|
"loss": 2.6406, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.508670520231214e-05, |
|
"loss": 2.9961, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.537572254335261e-05, |
|
"loss": 2.541, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.566473988439308e-05, |
|
"loss": 2.0059, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 9.595375722543353e-05, |
|
"loss": 3.9473, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 9.6242774566474e-05, |
|
"loss": 2.6875, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 9.653179190751446e-05, |
|
"loss": 2.5527, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.682080924855492e-05, |
|
"loss": 2.1738, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.710982658959538e-05, |
|
"loss": 3.1914, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 9.739884393063584e-05, |
|
"loss": 4.4922, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 9.768786127167631e-05, |
|
"loss": 1.8311, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.797687861271677e-05, |
|
"loss": 3.0078, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 9.826589595375723e-05, |
|
"loss": 1.9141, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 9.855491329479769e-05, |
|
"loss": 1.8818, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.884393063583816e-05, |
|
"loss": 3.1504, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.913294797687862e-05, |
|
"loss": 1.9385, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.942196531791907e-05, |
|
"loss": 2.8203, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.971098265895954e-05, |
|
"loss": 2.3594, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0001, |
|
"loss": 2.9375, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.99277456647399e-05, |
|
"loss": 1.8301, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.985549132947977e-05, |
|
"loss": 2.127, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.978323699421965e-05, |
|
"loss": 2.0527, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.971098265895954e-05, |
|
"loss": 1.7529, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.963872832369943e-05, |
|
"loss": 1.9395, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.95664739884393e-05, |
|
"loss": 2.873, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.94942196531792e-05, |
|
"loss": 2.0312, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.942196531791907e-05, |
|
"loss": 1.7881, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.934971098265896e-05, |
|
"loss": 1.7637, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.927745664739884e-05, |
|
"loss": 3.1562, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.920520231213873e-05, |
|
"loss": 2.0059, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.913294797687862e-05, |
|
"loss": 2.7695, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.906069364161851e-05, |
|
"loss": 1.2988, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.898843930635839e-05, |
|
"loss": 1.9062, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.891618497109827e-05, |
|
"loss": 2.748, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.884393063583816e-05, |
|
"loss": 1.0225, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.877167630057805e-05, |
|
"loss": 2.0723, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.869942196531792e-05, |
|
"loss": 2.6523, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.86271676300578e-05, |
|
"loss": 2.5371, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.855491329479769e-05, |
|
"loss": 1.8691, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.848265895953758e-05, |
|
"loss": 2.0273, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.841040462427746e-05, |
|
"loss": 1.7139, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.833815028901735e-05, |
|
"loss": 2.4238, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.826589595375723e-05, |
|
"loss": 1.499, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.819364161849712e-05, |
|
"loss": 1.6475, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.812138728323699e-05, |
|
"loss": 1.7852, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.804913294797688e-05, |
|
"loss": 2.0703, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.797687861271677e-05, |
|
"loss": 1.4629, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.790462427745665e-05, |
|
"loss": 3.1895, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.783236994219653e-05, |
|
"loss": 3.0273, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.776011560693642e-05, |
|
"loss": 2.4062, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.768786127167631e-05, |
|
"loss": 0.98, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.76156069364162e-05, |
|
"loss": 1.3408, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.754335260115607e-05, |
|
"loss": 2.043, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.747109826589595e-05, |
|
"loss": 1.6006, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.739884393063584e-05, |
|
"loss": 2.209, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.732658959537573e-05, |
|
"loss": 3.5352, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.725433526011561e-05, |
|
"loss": 1.4072, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.71820809248555e-05, |
|
"loss": 2.2773, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.710982658959538e-05, |
|
"loss": 1.9326, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.703757225433527e-05, |
|
"loss": 1.8008, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.696531791907514e-05, |
|
"loss": 2.6582, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.689306358381503e-05, |
|
"loss": 2.9941, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.682080924855492e-05, |
|
"loss": 2.1348, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 9.67485549132948e-05, |
|
"loss": 1.9922, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.667630057803468e-05, |
|
"loss": 1.8955, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.660404624277457e-05, |
|
"loss": 3.209, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 9.653179190751446e-05, |
|
"loss": 2.3867, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 9.645953757225435e-05, |
|
"loss": 2.252, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 9.638728323699421e-05, |
|
"loss": 1.5752, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 9.63150289017341e-05, |
|
"loss": 2.6055, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 9.6242774566474e-05, |
|
"loss": 2.1348, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 9.617052023121388e-05, |
|
"loss": 2.4121, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 9.609826589595376e-05, |
|
"loss": 3.168, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 9.602601156069365e-05, |
|
"loss": 2.459, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 9.595375722543353e-05, |
|
"loss": 2.0488, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 9.588150289017342e-05, |
|
"loss": 1.5723, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 9.58092485549133e-05, |
|
"loss": 0.7979, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 9.573699421965319e-05, |
|
"loss": 2.0664, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 9.566473988439308e-05, |
|
"loss": 2.8223, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 9.559248554913295e-05, |
|
"loss": 2.0898, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 9.552023121387283e-05, |
|
"loss": 2.709, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 9.544797687861272e-05, |
|
"loss": 2.6562, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 9.537572254335261e-05, |
|
"loss": 1.4248, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.53034682080925e-05, |
|
"loss": 2.5039, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.523121387283236e-05, |
|
"loss": 1.709, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 9.515895953757225e-05, |
|
"loss": 1.4287, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 9.508670520231214e-05, |
|
"loss": 1.5596, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.501445086705204e-05, |
|
"loss": 3.4746, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.494219653179191e-05, |
|
"loss": 2.2285, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.486994219653179e-05, |
|
"loss": 1.7188, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.479768786127168e-05, |
|
"loss": 3.0742, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.472543352601157e-05, |
|
"loss": 1.6416, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.465317919075145e-05, |
|
"loss": 2.2012, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.458092485549134e-05, |
|
"loss": 2.1211, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.450867052023123e-05, |
|
"loss": 1.4551, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.44364161849711e-05, |
|
"loss": 2.6055, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.436416184971098e-05, |
|
"loss": 1.0186, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.429190751445087e-05, |
|
"loss": 2.0977, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.421965317919076e-05, |
|
"loss": 1.7656, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.414739884393064e-05, |
|
"loss": 3.1582, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.407514450867052e-05, |
|
"loss": 0.9648, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.40028901734104e-05, |
|
"loss": 2.6426, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.39306358381503e-05, |
|
"loss": 2.1328, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.385838150289019e-05, |
|
"loss": 1.8848, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.378612716763006e-05, |
|
"loss": 2.1133, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.371387283236994e-05, |
|
"loss": 2.0391, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.364161849710983e-05, |
|
"loss": 1.4375, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.356936416184972e-05, |
|
"loss": 1.7705, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 9.34971098265896e-05, |
|
"loss": 1.7529, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 9.342485549132949e-05, |
|
"loss": 2.875, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 9.335260115606937e-05, |
|
"loss": 2.4785, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 9.328034682080926e-05, |
|
"loss": 2.4375, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 9.320809248554913e-05, |
|
"loss": 1.959, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.313583815028902e-05, |
|
"loss": 2.4629, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.306358381502891e-05, |
|
"loss": 2.1309, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.299132947976879e-05, |
|
"loss": 1.25, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.291907514450867e-05, |
|
"loss": 2.8906, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.284682080924856e-05, |
|
"loss": 1.8184, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.277456647398845e-05, |
|
"loss": 3.1348, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.270231213872832e-05, |
|
"loss": 1.6465, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.263005780346821e-05, |
|
"loss": 3.2148, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.255780346820809e-05, |
|
"loss": 2.3438, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.248554913294798e-05, |
|
"loss": 2.0488, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.241329479768787e-05, |
|
"loss": 2.375, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.234104046242775e-05, |
|
"loss": 2.9824, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.226878612716764e-05, |
|
"loss": 2.0176, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.219653179190752e-05, |
|
"loss": 0.6484, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.212427745664741e-05, |
|
"loss": 1.8838, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.205202312138728e-05, |
|
"loss": 1.4443, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.197976878612717e-05, |
|
"loss": 2.127, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.190751445086706e-05, |
|
"loss": 1.957, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.183526011560694e-05, |
|
"loss": 2.3984, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.176300578034682e-05, |
|
"loss": 1.1777, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.169075144508671e-05, |
|
"loss": 1.5605, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.16184971098266e-05, |
|
"loss": 1.6924, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.154624277456648e-05, |
|
"loss": 1.915, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.147398843930635e-05, |
|
"loss": 2.2949, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.140173410404624e-05, |
|
"loss": 2.8555, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.132947976878613e-05, |
|
"loss": 2.4082, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.125722543352601e-05, |
|
"loss": 1.4141, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.11849710982659e-05, |
|
"loss": 3.4844, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.111271676300579e-05, |
|
"loss": 0.6567, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.104046242774567e-05, |
|
"loss": 1.0703, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.096820809248556e-05, |
|
"loss": 1.584, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.089595375722544e-05, |
|
"loss": 1.6572, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.082369942196533e-05, |
|
"loss": 2.7852, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.075144508670522e-05, |
|
"loss": 1.6104, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.067919075144509e-05, |
|
"loss": 1.7295, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.060693641618497e-05, |
|
"loss": 1.2539, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 9.053468208092486e-05, |
|
"loss": 2.0, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 9.046242774566475e-05, |
|
"loss": 3.1328, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 9.039017341040463e-05, |
|
"loss": 1.7354, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 9.03179190751445e-05, |
|
"loss": 2.2734, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 9.02456647398844e-05, |
|
"loss": 1.6748, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 9.017341040462428e-05, |
|
"loss": 2.3848, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 9.010115606936416e-05, |
|
"loss": 2.2539, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 9.002890173410405e-05, |
|
"loss": 1.8789, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.995664739884393e-05, |
|
"loss": 1.1904, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.988439306358382e-05, |
|
"loss": 3.0625, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.98121387283237e-05, |
|
"loss": 1.4922, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.973988439306359e-05, |
|
"loss": 1.3135, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.966763005780348e-05, |
|
"loss": 1.332, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.959537572254337e-05, |
|
"loss": 1.8145, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.952312138728323e-05, |
|
"loss": 2.957, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.945086705202312e-05, |
|
"loss": 0.8486, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.937861271676301e-05, |
|
"loss": 2.1836, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.93063583815029e-05, |
|
"loss": 2.207, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.923410404624278e-05, |
|
"loss": 1.2178, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.916184971098266e-05, |
|
"loss": 2.7207, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.908959537572255e-05, |
|
"loss": 2.2285, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.901734104046244e-05, |
|
"loss": 1.5977, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.894508670520231e-05, |
|
"loss": 1.0723, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.88728323699422e-05, |
|
"loss": 2.0098, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 8.880057803468208e-05, |
|
"loss": 3.8672, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 8.872832369942197e-05, |
|
"loss": 1.8477, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.865606936416185e-05, |
|
"loss": 1.9775, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.858381502890174e-05, |
|
"loss": 2.502, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 8.851156069364163e-05, |
|
"loss": 2.2969, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 8.84393063583815e-05, |
|
"loss": 1.0859, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 8.836705202312138e-05, |
|
"loss": 2.1113, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 8.829479768786127e-05, |
|
"loss": 2.5781, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 8.822254335260116e-05, |
|
"loss": 0.7017, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.815028901734105e-05, |
|
"loss": 0.915, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.807803468208093e-05, |
|
"loss": 2.8047, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.800578034682081e-05, |
|
"loss": 1.0518, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.79335260115607e-05, |
|
"loss": 2.2129, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.786127167630059e-05, |
|
"loss": 1.6592, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.778901734104046e-05, |
|
"loss": 0.7651, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.771676300578036e-05, |
|
"loss": 1.3008, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 8.764450867052023e-05, |
|
"loss": 1.8633, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 8.757225433526012e-05, |
|
"loss": 1.1982, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.75e-05, |
|
"loss": 1.5439, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.742774566473989e-05, |
|
"loss": 2.0391, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.735549132947978e-05, |
|
"loss": 0.2642, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.728323699421966e-05, |
|
"loss": 1.2285, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.721098265895953e-05, |
|
"loss": 0.647, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.713872832369942e-05, |
|
"loss": 0.9629, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.706647398843931e-05, |
|
"loss": 0.77, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.69942196531792e-05, |
|
"loss": 0.8516, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 8.692196531791907e-05, |
|
"loss": 3.4238, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 8.684971098265896e-05, |
|
"loss": 1.3691, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 8.677745664739885e-05, |
|
"loss": 0.771, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 8.670520231213874e-05, |
|
"loss": 1.542, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 8.663294797687862e-05, |
|
"loss": 1.8145, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 8.65606936416185e-05, |
|
"loss": 2.127, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 8.648843930635838e-05, |
|
"loss": 1.8428, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 8.641618497109827e-05, |
|
"loss": 1.2607, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 8.634393063583815e-05, |
|
"loss": 0.8179, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 8.627167630057804e-05, |
|
"loss": 2.1562, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 8.619942196531793e-05, |
|
"loss": 0.6484, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 8.612716763005781e-05, |
|
"loss": 1.251, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.605491329479768e-05, |
|
"loss": 0.9678, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.598265895953758e-05, |
|
"loss": 2.209, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 8.591040462427747e-05, |
|
"loss": 0.7319, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 8.583815028901736e-05, |
|
"loss": 1.3066, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 8.576589595375722e-05, |
|
"loss": 2.2129, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 8.569364161849711e-05, |
|
"loss": 2.1289, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 8.5621387283237e-05, |
|
"loss": 2.1406, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 8.554913294797689e-05, |
|
"loss": 0.7563, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 8.547687861271677e-05, |
|
"loss": 0.7163, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 8.540462427745664e-05, |
|
"loss": 1.7539, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 8.533236994219653e-05, |
|
"loss": 1.0635, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 8.526011560693643e-05, |
|
"loss": 2.2539, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 8.51878612716763e-05, |
|
"loss": 1.1982, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 8.511560693641619e-05, |
|
"loss": 0.2034, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 8.504335260115608e-05, |
|
"loss": 1.2529, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 8.497109826589596e-05, |
|
"loss": 1.2021, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 8.489884393063584e-05, |
|
"loss": 1.3154, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 8.482658959537573e-05, |
|
"loss": 0.5991, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 8.475433526011562e-05, |
|
"loss": 1.1367, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 8.46820809248555e-05, |
|
"loss": 1.4717, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 8.460982658959537e-05, |
|
"loss": 1.9199, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 8.453757225433526e-05, |
|
"loss": 0.96, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 8.446531791907515e-05, |
|
"loss": 1.4648, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 8.439306358381503e-05, |
|
"loss": 1.2793, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 8.432080924855492e-05, |
|
"loss": 2.1484, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 8.42485549132948e-05, |
|
"loss": 0.3433, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 8.417630057803469e-05, |
|
"loss": 0.6904, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 8.410404624277458e-05, |
|
"loss": 1.1885, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 8.403179190751445e-05, |
|
"loss": 0.8369, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 8.395953757225434e-05, |
|
"loss": 0.8232, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.388728323699422e-05, |
|
"loss": 0.7954, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.381502890173411e-05, |
|
"loss": 0.7241, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 8.374277456647399e-05, |
|
"loss": 0.7163, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.367052023121388e-05, |
|
"loss": 0.6465, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.359826589595377e-05, |
|
"loss": 0.4871, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 8.352601156069365e-05, |
|
"loss": 0.8379, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 8.345375722543352e-05, |
|
"loss": 1.5342, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.338150289017341e-05, |
|
"loss": 1.4932, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.33092485549133e-05, |
|
"loss": 1.0342, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.323699421965318e-05, |
|
"loss": 2.0918, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 8.316473988439307e-05, |
|
"loss": 2.8633, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 8.309248554913295e-05, |
|
"loss": 1.1895, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.302023121387284e-05, |
|
"loss": 1.4795, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.294797687861271e-05, |
|
"loss": 0.9106, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 8.28757225433526e-05, |
|
"loss": 0.9492, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.28034682080925e-05, |
|
"loss": 1.3086, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.273121387283237e-05, |
|
"loss": 0.5005, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 8.265895953757226e-05, |
|
"loss": 1.166, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 8.258670520231214e-05, |
|
"loss": 0.8462, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.251445086705203e-05, |
|
"loss": 0.9277, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.244219653179192e-05, |
|
"loss": 1.0859, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 8.23699421965318e-05, |
|
"loss": 1.167, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 8.229768786127167e-05, |
|
"loss": 0.8462, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 8.222543352601156e-05, |
|
"loss": 1.1865, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 8.215317919075145e-05, |
|
"loss": 1.2061, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 8.208092485549133e-05, |
|
"loss": 0.6431, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 8.200867052023122e-05, |
|
"loss": 0.7578, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 8.19364161849711e-05, |
|
"loss": 2.7637, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 8.186416184971099e-05, |
|
"loss": 0.5347, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 8.179190751445087e-05, |
|
"loss": 0.6401, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 8.171965317919076e-05, |
|
"loss": 0.6567, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.164739884393065e-05, |
|
"loss": 1.0566, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.157514450867052e-05, |
|
"loss": 1.0703, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 8.15028901734104e-05, |
|
"loss": 0.9473, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.143063583815029e-05, |
|
"loss": 1.3223, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.135838150289018e-05, |
|
"loss": 1.4961, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.128612716763007e-05, |
|
"loss": 1.9814, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.121387283236995e-05, |
|
"loss": 1.2295, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.114161849710983e-05, |
|
"loss": 1.6816, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.106936416184972e-05, |
|
"loss": 1.6738, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.09971098265896e-05, |
|
"loss": 0.978, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.092485549132948e-05, |
|
"loss": 1.21, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.085260115606936e-05, |
|
"loss": 0.6855, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.078034682080925e-05, |
|
"loss": 1.6504, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.070809248554914e-05, |
|
"loss": 1.7666, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.063583815028902e-05, |
|
"loss": 0.5835, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.056358381502891e-05, |
|
"loss": 1.1113, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 8.049132947976878e-05, |
|
"loss": 0.793, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.041907514450867e-05, |
|
"loss": 1.6406, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.034682080924855e-05, |
|
"loss": 0.6562, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.027456647398844e-05, |
|
"loss": 0.6675, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.020231213872833e-05, |
|
"loss": 0.3252, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 8.013005780346822e-05, |
|
"loss": 0.8599, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 8.005780346820809e-05, |
|
"loss": 1.0771, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 7.998554913294798e-05, |
|
"loss": 0.6504, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 7.991329479768787e-05, |
|
"loss": 2.4629, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 7.984104046242776e-05, |
|
"loss": 0.5884, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 7.976878612716763e-05, |
|
"loss": 0.6709, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 7.969653179190751e-05, |
|
"loss": 1.2275, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 7.96242774566474e-05, |
|
"loss": 0.9023, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 7.955202312138729e-05, |
|
"loss": 0.8711, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 7.947976878612717e-05, |
|
"loss": 2.6641, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 7.940751445086706e-05, |
|
"loss": 1.0176, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 7.933526011560694e-05, |
|
"loss": 0.645, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 7.926300578034683e-05, |
|
"loss": 1.4189, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 7.91907514450867e-05, |
|
"loss": 0.7056, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 7.91184971098266e-05, |
|
"loss": 1.7725, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 7.904624277456648e-05, |
|
"loss": 0.6953, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 7.897398843930636e-05, |
|
"loss": 1.1865, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 7.890173410404624e-05, |
|
"loss": 1.3799, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 7.882947976878613e-05, |
|
"loss": 0.4417, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 7.875722543352602e-05, |
|
"loss": 0.5322, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 7.868497109826591e-05, |
|
"loss": 0.957, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 7.861271676300579e-05, |
|
"loss": 0.3308, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 7.854046242774566e-05, |
|
"loss": 0.8237, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 7.846820809248555e-05, |
|
"loss": 0.8208, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 7.839595375722544e-05, |
|
"loss": 0.3904, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 7.832369942196532e-05, |
|
"loss": 1.6475, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 7.825144508670521e-05, |
|
"loss": 0.5352, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 7.817919075144509e-05, |
|
"loss": 0.9473, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 7.810693641618498e-05, |
|
"loss": 1.3525, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 7.803468208092485e-05, |
|
"loss": 1.5947, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 7.796242774566474e-05, |
|
"loss": 0.8428, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 7.789017341040464e-05, |
|
"loss": 0.6968, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 7.781791907514451e-05, |
|
"loss": 1.5352, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 7.774566473988439e-05, |
|
"loss": 1.1396, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 7.767341040462428e-05, |
|
"loss": 1.7617, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 7.760115606936417e-05, |
|
"loss": 1.1445, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.752890173410406e-05, |
|
"loss": 1.9502, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.745664739884392e-05, |
|
"loss": 0.9048, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 7.738439306358381e-05, |
|
"loss": 1.7373, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 7.73121387283237e-05, |
|
"loss": 0.2493, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 7.72398843930636e-05, |
|
"loss": 0.5571, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 7.716763005780347e-05, |
|
"loss": 0.9624, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 7.709537572254336e-05, |
|
"loss": 1.0078, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.702312138728324e-05, |
|
"loss": 0.543, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.695086705202313e-05, |
|
"loss": 0.6665, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 7.6878612716763e-05, |
|
"loss": 0.5049, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 7.68063583815029e-05, |
|
"loss": 0.9136, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 7.673410404624279e-05, |
|
"loss": 1.6494, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 7.666184971098266e-05, |
|
"loss": 1.6484, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 7.658959537572254e-05, |
|
"loss": 2.1992, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 7.651734104046243e-05, |
|
"loss": 0.7026, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 7.644508670520232e-05, |
|
"loss": 1.8701, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 7.63728323699422e-05, |
|
"loss": 1.2461, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 7.630057803468207e-05, |
|
"loss": 1.8457, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 7.622832369942197e-05, |
|
"loss": 1.3818, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 7.615606936416186e-05, |
|
"loss": 1.3438, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 7.608381502890175e-05, |
|
"loss": 0.5293, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 7.601156069364162e-05, |
|
"loss": 1.4971, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 7.59393063583815e-05, |
|
"loss": 0.7163, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 7.586705202312139e-05, |
|
"loss": 1.1074, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 7.579479768786128e-05, |
|
"loss": 0.7866, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 7.572254335260116e-05, |
|
"loss": 3.1758, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 7.565028901734105e-05, |
|
"loss": 0.8877, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 7.557803468208094e-05, |
|
"loss": 0.8252, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 7.550578034682081e-05, |
|
"loss": 0.874, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 7.543352601156069e-05, |
|
"loss": 0.2052, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 7.536127167630058e-05, |
|
"loss": 0.5786, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 7.528901734104047e-05, |
|
"loss": 0.5962, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 7.521676300578035e-05, |
|
"loss": 1.3105, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 7.514450867052023e-05, |
|
"loss": 0.9766, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 7.507225433526012e-05, |
|
"loss": 0.6572, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.4854, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 7.492774566473988e-05, |
|
"loss": 0.7168, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 7.485549132947977e-05, |
|
"loss": 0.8154, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 7.478323699421965e-05, |
|
"loss": 0.1877, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 7.471098265895954e-05, |
|
"loss": 0.2507, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 7.463872832369942e-05, |
|
"loss": 0.7983, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 7.456647398843931e-05, |
|
"loss": 0.7524, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 7.44942196531792e-05, |
|
"loss": 0.7378, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 7.442196531791908e-05, |
|
"loss": 0.5991, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 7.434971098265897e-05, |
|
"loss": 0.5728, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 7.427745664739884e-05, |
|
"loss": 0.9297, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 7.420520231213873e-05, |
|
"loss": 0.4419, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 7.413294797687862e-05, |
|
"loss": 0.7456, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 7.40606936416185e-05, |
|
"loss": 0.2932, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 7.398843930635838e-05, |
|
"loss": 1.54, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 7.391618497109827e-05, |
|
"loss": 1.5947, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 7.384393063583816e-05, |
|
"loss": 0.936, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 7.377167630057804e-05, |
|
"loss": 0.9072, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 7.369942196531793e-05, |
|
"loss": 0.9575, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 7.36271676300578e-05, |
|
"loss": 0.0718, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 7.355491329479769e-05, |
|
"loss": 0.3481, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 7.348265895953757e-05, |
|
"loss": 0.7949, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 7.341040462427746e-05, |
|
"loss": 0.8638, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 7.333815028901735e-05, |
|
"loss": 0.7446, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 7.326589595375723e-05, |
|
"loss": 0.3567, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 7.31936416184971e-05, |
|
"loss": 0.2634, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 7.3121387283237e-05, |
|
"loss": 0.9561, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 7.304913294797688e-05, |
|
"loss": 0.4019, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 7.297687861271678e-05, |
|
"loss": 0.1997, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 7.290462427745665e-05, |
|
"loss": 0.2766, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 7.283236994219653e-05, |
|
"loss": 1.3291, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 7.276011560693642e-05, |
|
"loss": 0.9097, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 7.268786127167631e-05, |
|
"loss": 0.3206, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 7.261560693641619e-05, |
|
"loss": 0.79, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 7.254335260115608e-05, |
|
"loss": 1.4492, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 7.247109826589595e-05, |
|
"loss": 0.4182, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 7.239884393063584e-05, |
|
"loss": 0.6021, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 7.232658959537572e-05, |
|
"loss": 1.6348, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 7.225433526011561e-05, |
|
"loss": 2.4414, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 7.21820809248555e-05, |
|
"loss": 1.3262, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 7.210982658959538e-05, |
|
"loss": 0.0728, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 7.203757225433526e-05, |
|
"loss": 1.4277, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 7.196531791907515e-05, |
|
"loss": 0.978, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 7.189306358381504e-05, |
|
"loss": 0.6304, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 7.182080924855493e-05, |
|
"loss": 0.2275, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 7.174855491329479e-05, |
|
"loss": 0.3618, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 7.167630057803468e-05, |
|
"loss": 2.9316, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 7.160404624277457e-05, |
|
"loss": 0.3352, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 7.153179190751446e-05, |
|
"loss": 1.3799, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 7.145953757225434e-05, |
|
"loss": 0.3552, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 7.138728323699421e-05, |
|
"loss": 1.5254, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 7.13150289017341e-05, |
|
"loss": 1.5742, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 7.1242774566474e-05, |
|
"loss": 0.8462, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 7.117052023121387e-05, |
|
"loss": 1.2773, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 7.109826589595376e-05, |
|
"loss": 0.3086, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 7.102601156069365e-05, |
|
"loss": 0.3745, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 7.095375722543353e-05, |
|
"loss": 1.3057, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 7.088150289017341e-05, |
|
"loss": 0.5737, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 7.08092485549133e-05, |
|
"loss": 1.208, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 7.073699421965319e-05, |
|
"loss": 1.082, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 7.066473988439308e-05, |
|
"loss": 0.5708, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 7.059248554913294e-05, |
|
"loss": 0.8008, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 7.052023121387283e-05, |
|
"loss": 3.1895, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 7.044797687861272e-05, |
|
"loss": 0.7192, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 7.037572254335261e-05, |
|
"loss": 0.246, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 7.030346820809249e-05, |
|
"loss": 1.1982, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 7.023121387283237e-05, |
|
"loss": 0.3928, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 7.015895953757226e-05, |
|
"loss": 0.1206, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 7.008670520231215e-05, |
|
"loss": 0.5464, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.001445086705202e-05, |
|
"loss": 0.4485, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 6.994219653179191e-05, |
|
"loss": 0.3047, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 6.986994219653179e-05, |
|
"loss": 1.0312, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 6.979768786127168e-05, |
|
"loss": 0.3481, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 6.972543352601156e-05, |
|
"loss": 0.5161, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.965317919075145e-05, |
|
"loss": 0.6694, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.958092485549134e-05, |
|
"loss": 0.5723, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.950867052023122e-05, |
|
"loss": 0.5503, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.943641618497109e-05, |
|
"loss": 0.2603, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.936416184971098e-05, |
|
"loss": 0.1652, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.929190751445087e-05, |
|
"loss": 0.7349, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.921965317919076e-05, |
|
"loss": 0.9585, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 6.914739884393064e-05, |
|
"loss": 0.5176, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 6.907514450867052e-05, |
|
"loss": 0.6919, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 6.900289017341041e-05, |
|
"loss": 0.3958, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 6.89306358381503e-05, |
|
"loss": 0.915, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 6.885838150289018e-05, |
|
"loss": 0.2267, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 6.878612716763007e-05, |
|
"loss": 0.2448, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 6.871387283236994e-05, |
|
"loss": 2.1348, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 6.864161849710983e-05, |
|
"loss": 0.1556, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 6.856936416184971e-05, |
|
"loss": 0.604, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 6.84971098265896e-05, |
|
"loss": 0.447, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 6.842485549132949e-05, |
|
"loss": 1.1621, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 6.835260115606937e-05, |
|
"loss": 1.0186, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 6.828034682080924e-05, |
|
"loss": 1.3984, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 6.820809248554913e-05, |
|
"loss": 0.2722, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 6.813583815028903e-05, |
|
"loss": 0.3308, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 6.80635838150289e-05, |
|
"loss": 1.0127, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 6.799132947976878e-05, |
|
"loss": 0.3582, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 6.791907514450867e-05, |
|
"loss": 0.9277, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 6.784682080924856e-05, |
|
"loss": 0.1014, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 6.777456647398845e-05, |
|
"loss": 0.0942, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 6.770231213872833e-05, |
|
"loss": 0.0748, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 6.763005780346822e-05, |
|
"loss": 1.7529, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 6.75578034682081e-05, |
|
"loss": 0.1633, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 6.748554913294798e-05, |
|
"loss": 0.3679, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 6.741329479768786e-05, |
|
"loss": 0.3796, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 6.734104046242775e-05, |
|
"loss": 0.3879, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 6.726878612716764e-05, |
|
"loss": 0.1392, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 6.719653179190752e-05, |
|
"loss": 0.6958, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 6.71242774566474e-05, |
|
"loss": 0.7627, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 6.705202312138729e-05, |
|
"loss": 1.3213, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 6.697976878612718e-05, |
|
"loss": 0.7412, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 6.690751445086705e-05, |
|
"loss": 1.9873, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 6.683526011560693e-05, |
|
"loss": 2.0957, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 6.676300578034682e-05, |
|
"loss": 0.645, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 6.669075144508671e-05, |
|
"loss": 0.1798, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 6.661849710982659e-05, |
|
"loss": 0.2683, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 6.654624277456648e-05, |
|
"loss": 0.0817, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 6.647398843930635e-05, |
|
"loss": 2.6328, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 6.640173410404625e-05, |
|
"loss": 0.1882, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 6.632947976878614e-05, |
|
"loss": 0.4387, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 6.625722543352601e-05, |
|
"loss": 0.179, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 6.61849710982659e-05, |
|
"loss": 1.6758, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 6.61127167630058e-05, |
|
"loss": 1.0859, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 6.604046242774567e-05, |
|
"loss": 0.2534, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 6.596820809248555e-05, |
|
"loss": 0.282, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 6.589595375722544e-05, |
|
"loss": 1.0127, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 6.582369942196533e-05, |
|
"loss": 1.1582, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 6.57514450867052e-05, |
|
"loss": 0.1227, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 6.567919075144508e-05, |
|
"loss": 0.132, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 6.560693641618497e-05, |
|
"loss": 0.0927, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 6.553468208092486e-05, |
|
"loss": 0.8086, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 6.546242774566474e-05, |
|
"loss": 0.3208, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 6.539017341040463e-05, |
|
"loss": 0.3301, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 6.53179190751445e-05, |
|
"loss": 0.9961, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 6.52456647398844e-05, |
|
"loss": 0.135, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 6.517341040462427e-05, |
|
"loss": 0.7505, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 6.510115606936416e-05, |
|
"loss": 0.707, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 6.502890173410405e-05, |
|
"loss": 0.2096, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 6.495664739884393e-05, |
|
"loss": 0.313, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 6.488439306358381e-05, |
|
"loss": 0.3015, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 6.48121387283237e-05, |
|
"loss": 0.3948, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 6.473988439306359e-05, |
|
"loss": 0.1351, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.466763005780348e-05, |
|
"loss": 0.397, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.459537572254336e-05, |
|
"loss": 0.7754, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 6.452312138728323e-05, |
|
"loss": 0.4358, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 6.445086705202312e-05, |
|
"loss": 0.3955, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 6.437861271676301e-05, |
|
"loss": 0.3838, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 6.430635838150289e-05, |
|
"loss": 0.5825, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 6.423410404624278e-05, |
|
"loss": 0.1262, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 6.416184971098266e-05, |
|
"loss": 1.1172, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 6.408959537572255e-05, |
|
"loss": 0.5957, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 6.401734104046243e-05, |
|
"loss": 0.126, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 6.394508670520232e-05, |
|
"loss": 1.1279, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 6.38728323699422e-05, |
|
"loss": 0.2252, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 6.380057803468208e-05, |
|
"loss": 1.0674, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 6.372832369942196e-05, |
|
"loss": 0.8145, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 6.365606936416185e-05, |
|
"loss": 0.3667, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 6.358381502890174e-05, |
|
"loss": 0.4365, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 6.351156069364163e-05, |
|
"loss": 0.6084, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 6.34393063583815e-05, |
|
"loss": 0.343, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 6.336705202312138e-05, |
|
"loss": 1.5518, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 6.329479768786127e-05, |
|
"loss": 2.1113, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 6.322254335260117e-05, |
|
"loss": 1.542, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 6.315028901734104e-05, |
|
"loss": 2.2168, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 6.307803468208093e-05, |
|
"loss": 0.4385, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 6.300578034682081e-05, |
|
"loss": 1.1543, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 6.29335260115607e-05, |
|
"loss": 0.1339, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 6.286127167630058e-05, |
|
"loss": 0.1678, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 6.278901734104047e-05, |
|
"loss": 2.0801, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 6.271676300578036e-05, |
|
"loss": 1.417, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 6.264450867052023e-05, |
|
"loss": 0.793, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 6.257225433526011e-05, |
|
"loss": 0.1017, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.9209, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 6.242774566473989e-05, |
|
"loss": 0.6675, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 6.235549132947978e-05, |
|
"loss": 0.5308, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 6.228323699421965e-05, |
|
"loss": 0.1527, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 6.221098265895954e-05, |
|
"loss": 1.4238, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 6.213872832369943e-05, |
|
"loss": 0.1262, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 6.206647398843932e-05, |
|
"loss": 0.1543, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 6.19942196531792e-05, |
|
"loss": 1.1572, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 6.192196531791907e-05, |
|
"loss": 0.2355, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 6.184971098265896e-05, |
|
"loss": 0.2046, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 6.177745664739885e-05, |
|
"loss": 0.1271, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 6.170520231213873e-05, |
|
"loss": 0.2119, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 6.163294797687862e-05, |
|
"loss": 0.2289, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 6.156069364161851e-05, |
|
"loss": 0.1641, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 6.148843930635839e-05, |
|
"loss": 0.0983, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 6.141618497109826e-05, |
|
"loss": 0.6348, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 6.134393063583815e-05, |
|
"loss": 0.1884, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 6.127167630057804e-05, |
|
"loss": 1.1025, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 6.119942196531793e-05, |
|
"loss": 0.1639, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 6.11271676300578e-05, |
|
"loss": 0.3169, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 6.105491329479769e-05, |
|
"loss": 0.0669, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 6.098265895953758e-05, |
|
"loss": 0.4001, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 6.091040462427746e-05, |
|
"loss": 0.1901, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 6.083815028901735e-05, |
|
"loss": 0.7515, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 6.076589595375722e-05, |
|
"loss": 1.0029, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 6.069364161849711e-05, |
|
"loss": 1.4229, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 6.0621387283236996e-05, |
|
"loss": 0.405, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 6.0549132947976886e-05, |
|
"loss": 0.0482, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 6.047687861271677e-05, |
|
"loss": 0.1689, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 6.0404624277456646e-05, |
|
"loss": 0.1427, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 6.033236994219653e-05, |
|
"loss": 0.2035, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 6.026011560693642e-05, |
|
"loss": 0.2183, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 6.0187861271676304e-05, |
|
"loss": 0.2076, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 6.0115606936416195e-05, |
|
"loss": 0.0494, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 6.004335260115608e-05, |
|
"loss": 0.4253, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 5.9971098265895955e-05, |
|
"loss": 0.1998, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 5.989884393063584e-05, |
|
"loss": 0.0816, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 5.982658959537573e-05, |
|
"loss": 0.119, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 5.975433526011561e-05, |
|
"loss": 0.4331, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 5.9682080924855496e-05, |
|
"loss": 1.5527, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 5.960982658959537e-05, |
|
"loss": 0.4216, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 5.9537572254335263e-05, |
|
"loss": 0.5083, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 5.946531791907515e-05, |
|
"loss": 0.6611, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 5.939306358381503e-05, |
|
"loss": 0.4089, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 5.932080924855492e-05, |
|
"loss": 0.2084, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 5.92485549132948e-05, |
|
"loss": 0.0862, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 5.917630057803468e-05, |
|
"loss": 0.0308, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 5.910404624277457e-05, |
|
"loss": 1.04, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 5.9031791907514456e-05, |
|
"loss": 0.5933, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 5.895953757225434e-05, |
|
"loss": 0.3857, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 5.8887283236994216e-05, |
|
"loss": 0.3113, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 5.8815028901734106e-05, |
|
"loss": 1.0283, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 5.874277456647399e-05, |
|
"loss": 0.1606, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 5.8670520231213874e-05, |
|
"loss": 0.1316, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 5.8598265895953764e-05, |
|
"loss": 0.1326, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 5.852601156069365e-05, |
|
"loss": 0.1426, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 5.8453757225433524e-05, |
|
"loss": 0.0912, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 5.8381502890173415e-05, |
|
"loss": 0.1223, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 5.83092485549133e-05, |
|
"loss": 0.4419, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 5.823699421965318e-05, |
|
"loss": 0.0721, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 5.816473988439307e-05, |
|
"loss": 0.087, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 5.809248554913295e-05, |
|
"loss": 1.0381, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 5.802023121387283e-05, |
|
"loss": 0.0871, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 5.7947976878612717e-05, |
|
"loss": 0.2406, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 5.787572254335261e-05, |
|
"loss": 0.0811, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 5.780346820809249e-05, |
|
"loss": 0.1663, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 5.773121387283237e-05, |
|
"loss": 0.4058, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 5.765895953757226e-05, |
|
"loss": 0.0524, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 5.758670520231214e-05, |
|
"loss": 0.3625, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 5.7514450867052025e-05, |
|
"loss": 0.4182, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 5.7442196531791915e-05, |
|
"loss": 0.1842, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 5.736994219653179e-05, |
|
"loss": 0.2188, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 5.7297687861271676e-05, |
|
"loss": 0.0976, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 5.722543352601156e-05, |
|
"loss": 0.4324, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 5.715317919075145e-05, |
|
"loss": 0.6494, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 5.7080924855491333e-05, |
|
"loss": 1.0283, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 5.7008670520231224e-05, |
|
"loss": 0.1549, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 5.69364161849711e-05, |
|
"loss": 0.0479, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 5.6864161849710984e-05, |
|
"loss": 0.6104, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 5.679190751445087e-05, |
|
"loss": 0.1044, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 5.671965317919076e-05, |
|
"loss": 1.0371, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 5.664739884393064e-05, |
|
"loss": 0.0698, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 5.657514450867052e-05, |
|
"loss": 1.0039, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 5.65028901734104e-05, |
|
"loss": 0.665, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 5.643063583815029e-05, |
|
"loss": 1.8359, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 5.6358381502890176e-05, |
|
"loss": 0.1306, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 5.628612716763007e-05, |
|
"loss": 0.2312, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 5.6213872832369944e-05, |
|
"loss": 0.5083, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 5.614161849710983e-05, |
|
"loss": 0.1721, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 5.606936416184971e-05, |
|
"loss": 0.1969, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 5.59971098265896e-05, |
|
"loss": 0.2544, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 5.5924855491329485e-05, |
|
"loss": 0.5366, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 5.585260115606936e-05, |
|
"loss": 0.1372, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 5.5780346820809245e-05, |
|
"loss": 0.3704, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 5.5708092485549136e-05, |
|
"loss": 0.9263, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 5.563583815028902e-05, |
|
"loss": 0.7959, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 5.556358381502891e-05, |
|
"loss": 0.2479, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 5.5491329479768787e-05, |
|
"loss": 0.752, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 5.541907514450867e-05, |
|
"loss": 0.5405, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 5.5346820809248554e-05, |
|
"loss": 0.5664, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 5.5274566473988444e-05, |
|
"loss": 0.1226, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 5.520231213872833e-05, |
|
"loss": 0.1069, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 5.513005780346822e-05, |
|
"loss": 0.1293, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 5.505780346820809e-05, |
|
"loss": 0.5967, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 5.498554913294798e-05, |
|
"loss": 1.249, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 5.491329479768786e-05, |
|
"loss": 0.1059, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 5.484104046242775e-05, |
|
"loss": 0.0981, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 5.4768786127167636e-05, |
|
"loss": 0.2263, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 5.469653179190751e-05, |
|
"loss": 0.0743, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 5.46242774566474e-05, |
|
"loss": 0.6899, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 5.455202312138729e-05, |
|
"loss": 0.5317, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 5.447976878612717e-05, |
|
"loss": 0.0978, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 5.440751445086706e-05, |
|
"loss": 0.6572, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 5.433526011560693e-05, |
|
"loss": 0.0288, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 5.426300578034682e-05, |
|
"loss": 0.1472, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 5.4190751445086705e-05, |
|
"loss": 0.0677, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 5.4118497109826596e-05, |
|
"loss": 0.1234, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 5.404624277456648e-05, |
|
"loss": 0.2278, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 5.3973988439306356e-05, |
|
"loss": 0.0499, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 5.390173410404624e-05, |
|
"loss": 0.3049, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 5.382947976878613e-05, |
|
"loss": 0.118, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 5.3757225433526014e-05, |
|
"loss": 1.0068, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 5.3684971098265904e-05, |
|
"loss": 0.5439, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 5.361271676300579e-05, |
|
"loss": 0.7544, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 5.3540462427745665e-05, |
|
"loss": 0.1864, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 5.346820809248555e-05, |
|
"loss": 0.0692, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 5.339595375722544e-05, |
|
"loss": 0.1442, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 5.332369942196532e-05, |
|
"loss": 0.3025, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 5.325144508670521e-05, |
|
"loss": 1.2441, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 5.317919075144508e-05, |
|
"loss": 0.3381, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 5.310693641618497e-05, |
|
"loss": 1.0391, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 5.303468208092486e-05, |
|
"loss": 0.2793, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 5.296242774566475e-05, |
|
"loss": 0.051, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 5.289017341040463e-05, |
|
"loss": 0.0452, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 5.281791907514451e-05, |
|
"loss": 0.0527, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 5.274566473988439e-05, |
|
"loss": 0.0787, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 5.267341040462428e-05, |
|
"loss": 0.2859, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 5.2601156069364165e-05, |
|
"loss": 0.1048, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 5.2528901734104056e-05, |
|
"loss": 0.3464, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 5.2456647398843926e-05, |
|
"loss": 0.0637, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 5.2384393063583816e-05, |
|
"loss": 0.468, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 5.23121387283237e-05, |
|
"loss": 0.8721, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 5.223988439306359e-05, |
|
"loss": 0.248, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 5.2167630057803474e-05, |
|
"loss": 1.4512, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 5.209537572254336e-05, |
|
"loss": 0.2844, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 5.2023121387283234e-05, |
|
"loss": 0.0779, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 5.1950867052023124e-05, |
|
"loss": 0.8091, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 5.187861271676301e-05, |
|
"loss": 0.3804, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 5.18063583815029e-05, |
|
"loss": 0.498, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 5.173410404624278e-05, |
|
"loss": 0.29, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 5.166184971098266e-05, |
|
"loss": 0.4446, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 5.158959537572254e-05, |
|
"loss": 0.2842, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 5.151734104046243e-05, |
|
"loss": 0.2937, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 5.1445086705202317e-05, |
|
"loss": 0.0732, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 5.13728323699422e-05, |
|
"loss": 0.0523, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 5.130057803468208e-05, |
|
"loss": 0.283, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 5.122832369942197e-05, |
|
"loss": 0.0865, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 5.115606936416185e-05, |
|
"loss": 0.71, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 5.108381502890174e-05, |
|
"loss": 0.4834, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 5.1011560693641625e-05, |
|
"loss": 0.5327, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 5.09393063583815e-05, |
|
"loss": 0.583, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 5.0867052023121385e-05, |
|
"loss": 0.6245, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 5.0794797687861276e-05, |
|
"loss": 0.1218, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 5.072254335260116e-05, |
|
"loss": 0.3779, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 5.065028901734104e-05, |
|
"loss": 0.0681, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 5.0578034682080933e-05, |
|
"loss": 0.5293, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 5.050578034682081e-05, |
|
"loss": 0.2615, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 5.0433526011560694e-05, |
|
"loss": 0.1216, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 5.0361271676300584e-05, |
|
"loss": 0.1024, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 5.028901734104047e-05, |
|
"loss": 0.4194, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 5.021676300578035e-05, |
|
"loss": 0.0668, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 5.014450867052023e-05, |
|
"loss": 0.2603, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 5.007225433526012e-05, |
|
"loss": 0.236, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1864, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 4.9927745664739886e-05, |
|
"loss": 0.5059, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 4.985549132947977e-05, |
|
"loss": 0.1103, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 4.978323699421965e-05, |
|
"loss": 1.1426, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 4.971098265895954e-05, |
|
"loss": 0.0718, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 4.963872832369942e-05, |
|
"loss": 0.0851, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 4.956647398843931e-05, |
|
"loss": 0.1479, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 4.9494219653179194e-05, |
|
"loss": 0.3003, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 4.942196531791908e-05, |
|
"loss": 1.4795, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 4.934971098265896e-05, |
|
"loss": 0.4636, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 4.9277456647398845e-05, |
|
"loss": 0.2698, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 4.920520231213873e-05, |
|
"loss": 0.4043, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 4.913294797687861e-05, |
|
"loss": 0.0864, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 4.9060693641618496e-05, |
|
"loss": 0.3259, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 4.8988439306358387e-05, |
|
"loss": 0.4578, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 4.8916184971098263e-05, |
|
"loss": 0.7275, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 4.8843930635838154e-05, |
|
"loss": 0.1146, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 4.877167630057804e-05, |
|
"loss": 0.0964, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 4.869942196531792e-05, |
|
"loss": 1.2549, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 4.8627167630057805e-05, |
|
"loss": 0.1674, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 4.855491329479769e-05, |
|
"loss": 0.5093, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 4.848265895953757e-05, |
|
"loss": 0.0292, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 4.841040462427746e-05, |
|
"loss": 0.2324, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 4.833815028901734e-05, |
|
"loss": 0.0645, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 4.826589595375723e-05, |
|
"loss": 0.1932, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 4.8193641618497106e-05, |
|
"loss": 0.5078, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 4.8121387283237e-05, |
|
"loss": 0.1044, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.804913294797688e-05, |
|
"loss": 0.0692, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.7976878612716764e-05, |
|
"loss": 0.1609, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 4.790462427745665e-05, |
|
"loss": 0.2805, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 4.783236994219654e-05, |
|
"loss": 0.0351, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 4.7760115606936415e-05, |
|
"loss": 0.0269, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 4.7687861271676305e-05, |
|
"loss": 0.0909, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 4.761560693641618e-05, |
|
"loss": 0.3835, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 4.754335260115607e-05, |
|
"loss": 0.1043, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 4.7471098265895956e-05, |
|
"loss": 0.1198, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 4.739884393063584e-05, |
|
"loss": 0.0962, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 4.732658959537572e-05, |
|
"loss": 0.0391, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 4.7254335260115614e-05, |
|
"loss": 0.0626, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 4.718208092485549e-05, |
|
"loss": 0.2886, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 4.710982658959538e-05, |
|
"loss": 0.3933, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 4.703757225433526e-05, |
|
"loss": 0.0483, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 4.696531791907515e-05, |
|
"loss": 0.5444, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 4.689306358381503e-05, |
|
"loss": 0.2971, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 4.6820809248554915e-05, |
|
"loss": 0.0339, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 4.67485549132948e-05, |
|
"loss": 0.1923, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 4.667630057803468e-05, |
|
"loss": 0.3647, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 4.6604046242774566e-05, |
|
"loss": 0.261, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 4.653179190751446e-05, |
|
"loss": 0.5928, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 4.6459537572254333e-05, |
|
"loss": 0.2321, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 4.6387283236994224e-05, |
|
"loss": 0.4026, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 4.631502890173411e-05, |
|
"loss": 0.1016, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 4.624277456647399e-05, |
|
"loss": 0.1064, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 4.6170520231213875e-05, |
|
"loss": 0.142, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 4.609826589595376e-05, |
|
"loss": 0.1277, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 4.602601156069364e-05, |
|
"loss": 0.129, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 4.595375722543353e-05, |
|
"loss": 0.3604, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 4.588150289017341e-05, |
|
"loss": 0.1687, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 4.58092485549133e-05, |
|
"loss": 0.0752, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 4.5736994219653176e-05, |
|
"loss": 0.2542, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 4.566473988439307e-05, |
|
"loss": 0.0568, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 4.559248554913295e-05, |
|
"loss": 0.4595, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 4.5520231213872834e-05, |
|
"loss": 0.0514, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 4.544797687861272e-05, |
|
"loss": 0.1414, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 4.537572254335261e-05, |
|
"loss": 0.1995, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 4.5303468208092485e-05, |
|
"loss": 0.7603, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 4.5231213872832375e-05, |
|
"loss": 0.4648, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 4.515895953757225e-05, |
|
"loss": 0.0458, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 4.508670520231214e-05, |
|
"loss": 0.2793, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 4.5014450867052026e-05, |
|
"loss": 0.0698, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 4.494219653179191e-05, |
|
"loss": 0.5542, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 4.486994219653179e-05, |
|
"loss": 0.1351, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 4.4797687861271684e-05, |
|
"loss": 0.1388, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 4.472543352601156e-05, |
|
"loss": 0.2688, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 4.465317919075145e-05, |
|
"loss": 0.2279, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 4.458092485549133e-05, |
|
"loss": 0.8984, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 4.450867052023122e-05, |
|
"loss": 0.1462, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 4.44364161849711e-05, |
|
"loss": 0.1348, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 4.4364161849710985e-05, |
|
"loss": 0.9751, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 4.429190751445087e-05, |
|
"loss": 0.8076, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 4.421965317919075e-05, |
|
"loss": 0.0341, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 4.4147398843930636e-05, |
|
"loss": 0.0415, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 4.407514450867053e-05, |
|
"loss": 0.8198, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 4.4002890173410404e-05, |
|
"loss": 0.0991, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 4.3930635838150294e-05, |
|
"loss": 0.27, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 4.385838150289018e-05, |
|
"loss": 0.1782, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 4.378612716763006e-05, |
|
"loss": 0.0435, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 4.3713872832369945e-05, |
|
"loss": 1.3643, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 4.364161849710983e-05, |
|
"loss": 0.1512, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 4.356936416184971e-05, |
|
"loss": 0.3682, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 4.34971098265896e-05, |
|
"loss": 0.2361, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 4.342485549132948e-05, |
|
"loss": 0.1429, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 4.335260115606937e-05, |
|
"loss": 0.0474, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 4.328034682080925e-05, |
|
"loss": 0.1757, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 4.320809248554914e-05, |
|
"loss": 0.1002, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 4.313583815028902e-05, |
|
"loss": 0.0467, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 4.3063583815028904e-05, |
|
"loss": 0.3843, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 4.299132947976879e-05, |
|
"loss": 0.1632, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 4.291907514450868e-05, |
|
"loss": 0.0807, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 4.2846820809248555e-05, |
|
"loss": 0.0778, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 4.2774566473988445e-05, |
|
"loss": 0.0561, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 4.270231213872832e-05, |
|
"loss": 0.1298, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 4.263005780346821e-05, |
|
"loss": 0.0257, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 4.2557803468208096e-05, |
|
"loss": 0.3228, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 4.248554913294798e-05, |
|
"loss": 0.2866, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 4.2413294797687863e-05, |
|
"loss": 0.0176, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 4.234104046242775e-05, |
|
"loss": 0.501, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 4.226878612716763e-05, |
|
"loss": 0.4587, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 4.2196531791907514e-05, |
|
"loss": 0.0371, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 4.21242774566474e-05, |
|
"loss": 0.261, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 4.205202312138729e-05, |
|
"loss": 0.1233, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 4.197976878612717e-05, |
|
"loss": 0.1749, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 4.1907514450867055e-05, |
|
"loss": 0.5347, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 4.183526011560694e-05, |
|
"loss": 1.168, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 4.176300578034682e-05, |
|
"loss": 0.0302, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 4.1690751445086706e-05, |
|
"loss": 1.5771, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 4.161849710982659e-05, |
|
"loss": 0.0875, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 4.1546242774566474e-05, |
|
"loss": 0.4185, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 4.147398843930636e-05, |
|
"loss": 0.0222, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 4.140173410404625e-05, |
|
"loss": 0.0307, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 4.132947976878613e-05, |
|
"loss": 0.0816, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 4.1257225433526015e-05, |
|
"loss": 0.7207, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 4.11849710982659e-05, |
|
"loss": 0.1414, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 4.111271676300578e-05, |
|
"loss": 0.4385, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 4.1040462427745666e-05, |
|
"loss": 0.0222, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 4.096820809248555e-05, |
|
"loss": 0.1887, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 4.089595375722543e-05, |
|
"loss": 0.915, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 4.082369942196532e-05, |
|
"loss": 0.2737, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 4.07514450867052e-05, |
|
"loss": 0.0715, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 4.067919075144509e-05, |
|
"loss": 0.0723, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 4.0606936416184974e-05, |
|
"loss": 0.4812, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 4.053468208092486e-05, |
|
"loss": 0.0267, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 4.046242774566474e-05, |
|
"loss": 0.1722, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 4.0390173410404625e-05, |
|
"loss": 0.2179, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 4.031791907514451e-05, |
|
"loss": 0.1813, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 4.024566473988439e-05, |
|
"loss": 0.0319, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 4.0173410404624276e-05, |
|
"loss": 0.3083, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 4.0101156069364166e-05, |
|
"loss": 0.1238, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 4.002890173410404e-05, |
|
"loss": 0.0918, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 3.9956647398843933e-05, |
|
"loss": 0.1139, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 3.988439306358382e-05, |
|
"loss": 0.0865, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.98121387283237e-05, |
|
"loss": 0.0482, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.9739884393063584e-05, |
|
"loss": 0.1104, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.966763005780347e-05, |
|
"loss": 0.0356, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.959537572254335e-05, |
|
"loss": 1.3721, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.952312138728324e-05, |
|
"loss": 0.3274, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.945086705202312e-05, |
|
"loss": 1.5762, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.937861271676301e-05, |
|
"loss": 0.8618, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 3.930635838150289e-05, |
|
"loss": 0.0488, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 3.9234104046242776e-05, |
|
"loss": 0.0551, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.916184971098266e-05, |
|
"loss": 0.1334, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.9089595375722544e-05, |
|
"loss": 0.0598, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.901734104046243e-05, |
|
"loss": 0.1169, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.894508670520232e-05, |
|
"loss": 0.0349, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 3.8872832369942194e-05, |
|
"loss": 0.0823, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.8800578034682085e-05, |
|
"loss": 0.1059, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.872832369942196e-05, |
|
"loss": 0.2155, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 3.865606936416185e-05, |
|
"loss": 0.2937, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 3.8583815028901736e-05, |
|
"loss": 0.1575, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 3.851156069364162e-05, |
|
"loss": 0.0522, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 3.84393063583815e-05, |
|
"loss": 0.0586, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.836705202312139e-05, |
|
"loss": 0.1429, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 3.829479768786127e-05, |
|
"loss": 0.0614, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 3.822254335260116e-05, |
|
"loss": 0.1046, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.815028901734104e-05, |
|
"loss": 0.2932, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.807803468208093e-05, |
|
"loss": 0.1882, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 3.800578034682081e-05, |
|
"loss": 0.335, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 3.7933526011560695e-05, |
|
"loss": 0.0467, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 3.786127167630058e-05, |
|
"loss": 0.0432, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.778901734104047e-05, |
|
"loss": 0.0472, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.7716763005780346e-05, |
|
"loss": 0.1842, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.7644508670520236e-05, |
|
"loss": 0.125, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.757225433526011e-05, |
|
"loss": 0.3108, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.0698, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 3.742774566473989e-05, |
|
"loss": 0.0881, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 3.735549132947977e-05, |
|
"loss": 0.1825, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 3.7283236994219654e-05, |
|
"loss": 0.0883, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 3.721098265895954e-05, |
|
"loss": 0.3293, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.713872832369942e-05, |
|
"loss": 0.0947, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.706647398843931e-05, |
|
"loss": 0.2255, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 3.699421965317919e-05, |
|
"loss": 0.0228, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 3.692196531791908e-05, |
|
"loss": 0.1392, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 3.684971098265896e-05, |
|
"loss": 0.1849, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 3.6777456647398846e-05, |
|
"loss": 0.1449, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 3.670520231213873e-05, |
|
"loss": 0.0373, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 3.6632947976878614e-05, |
|
"loss": 0.3066, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 3.65606936416185e-05, |
|
"loss": 0.1376, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 3.648843930635839e-05, |
|
"loss": 0.0597, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 3.6416184971098265e-05, |
|
"loss": 0.3782, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 3.6343930635838155e-05, |
|
"loss": 0.3362, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.627167630057804e-05, |
|
"loss": 0.079, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.619942196531792e-05, |
|
"loss": 0.0582, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 3.6127167630057806e-05, |
|
"loss": 0.0238, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.605491329479769e-05, |
|
"loss": 0.0421, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.598265895953757e-05, |
|
"loss": 0.0196, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 3.591040462427746e-05, |
|
"loss": 0.0848, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 3.583815028901734e-05, |
|
"loss": 0.04, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 3.576589595375723e-05, |
|
"loss": 0.0686, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 3.569364161849711e-05, |
|
"loss": 0.0565, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 3.5621387283237e-05, |
|
"loss": 0.0416, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 3.554913294797688e-05, |
|
"loss": 0.0447, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 3.5476878612716765e-05, |
|
"loss": 0.3757, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 3.540462427745665e-05, |
|
"loss": 0.0224, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 3.533236994219654e-05, |
|
"loss": 0.0817, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 3.5260115606936416e-05, |
|
"loss": 0.1531, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 3.5187861271676306e-05, |
|
"loss": 0.0746, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 3.511560693641618e-05, |
|
"loss": 0.9326, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 3.5043352601156074e-05, |
|
"loss": 0.2896, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 3.497109826589596e-05, |
|
"loss": 0.0511, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.489884393063584e-05, |
|
"loss": 0.0388, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.4826589595375724e-05, |
|
"loss": 0.1256, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 3.475433526011561e-05, |
|
"loss": 0.025, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 3.468208092485549e-05, |
|
"loss": 0.0149, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 3.460982658959538e-05, |
|
"loss": 0.1381, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.453757225433526e-05, |
|
"loss": 0.0465, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.446531791907515e-05, |
|
"loss": 0.0399, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 3.439306358381503e-05, |
|
"loss": 0.082, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 3.4320809248554916e-05, |
|
"loss": 0.0382, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 3.42485549132948e-05, |
|
"loss": 0.0133, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 3.4176300578034684e-05, |
|
"loss": 0.1542, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 3.410404624277457e-05, |
|
"loss": 0.0593, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 3.403179190751445e-05, |
|
"loss": 0.0452, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 3.3959537572254335e-05, |
|
"loss": 0.0316, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 3.3887283236994225e-05, |
|
"loss": 0.0949, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 3.381502890173411e-05, |
|
"loss": 0.0709, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 3.374277456647399e-05, |
|
"loss": 0.0396, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.3670520231213876e-05, |
|
"loss": 0.0512, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.359826589595376e-05, |
|
"loss": 0.4504, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 3.352601156069364e-05, |
|
"loss": 0.0439, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 3.345375722543353e-05, |
|
"loss": 0.0438, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.338150289017341e-05, |
|
"loss": 0.0262, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.3309248554913294e-05, |
|
"loss": 0.0635, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.323699421965318e-05, |
|
"loss": 0.0672, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 3.316473988439307e-05, |
|
"loss": 0.2834, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 3.309248554913295e-05, |
|
"loss": 0.1714, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 3.3020231213872835e-05, |
|
"loss": 0.0411, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 3.294797687861272e-05, |
|
"loss": 0.1542, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 3.28757225433526e-05, |
|
"loss": 0.0361, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 3.2803468208092486e-05, |
|
"loss": 0.1713, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 3.273121387283237e-05, |
|
"loss": 0.0525, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 3.265895953757225e-05, |
|
"loss": 0.1263, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 3.258670520231214e-05, |
|
"loss": 0.0382, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.251445086705203e-05, |
|
"loss": 0.0246, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.2442196531791904e-05, |
|
"loss": 0.2162, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 3.2369942196531794e-05, |
|
"loss": 0.1227, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.229768786127168e-05, |
|
"loss": 0.4153, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.222543352601156e-05, |
|
"loss": 0.0349, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 3.2153179190751445e-05, |
|
"loss": 0.0902, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 3.208092485549133e-05, |
|
"loss": 0.0881, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 3.200867052023121e-05, |
|
"loss": 0.0595, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 3.19364161849711e-05, |
|
"loss": 0.0819, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 3.186416184971098e-05, |
|
"loss": 0.03, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 3.179190751445087e-05, |
|
"loss": 0.7544, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 3.171965317919075e-05, |
|
"loss": 0.0212, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 3.164739884393064e-05, |
|
"loss": 0.0339, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 3.157514450867052e-05, |
|
"loss": 0.0273, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 3.1502890173410405e-05, |
|
"loss": 0.0856, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 3.143063583815029e-05, |
|
"loss": 0.124, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 3.135838150289018e-05, |
|
"loss": 0.0275, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 3.1286127167630055e-05, |
|
"loss": 0.0222, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 3.1213872832369946e-05, |
|
"loss": 0.0211, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 3.114161849710982e-05, |
|
"loss": 0.0296, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 3.106936416184971e-05, |
|
"loss": 0.1104, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 3.09971098265896e-05, |
|
"loss": 0.2375, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 3.092485549132948e-05, |
|
"loss": 0.3435, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 3.0852601156069364e-05, |
|
"loss": 0.0267, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 3.0780346820809254e-05, |
|
"loss": 0.9507, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 3.070809248554913e-05, |
|
"loss": 0.0515, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 3.063583815028902e-05, |
|
"loss": 0.0495, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 3.05635838150289e-05, |
|
"loss": 0.2415, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 3.049132947976879e-05, |
|
"loss": 0.0771, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 3.0419075144508676e-05, |
|
"loss": 0.0344, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 3.0346820809248556e-05, |
|
"loss": 0.0413, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 3.0274566473988443e-05, |
|
"loss": 0.1548, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 3.0202312138728323e-05, |
|
"loss": 0.1913, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 3.013005780346821e-05, |
|
"loss": 0.0509, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 3.0057803468208097e-05, |
|
"loss": 0.0396, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 2.9985549132947977e-05, |
|
"loss": 0.0523, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 2.9913294797687864e-05, |
|
"loss": 0.9546, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 2.9841040462427748e-05, |
|
"loss": 0.0198, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 2.9768786127167632e-05, |
|
"loss": 0.7148, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 2.9696531791907515e-05, |
|
"loss": 0.4314, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 2.96242774566474e-05, |
|
"loss": 0.3684, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 2.9552023121387286e-05, |
|
"loss": 0.439, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 2.947976878612717e-05, |
|
"loss": 0.1277, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 2.9407514450867053e-05, |
|
"loss": 0.0875, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 2.9335260115606937e-05, |
|
"loss": 0.2046, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 2.9263005780346824e-05, |
|
"loss": 0.046, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 2.9190751445086707e-05, |
|
"loss": 0.193, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 2.911849710982659e-05, |
|
"loss": 0.2615, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 2.9046242774566475e-05, |
|
"loss": 0.0159, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 2.8973988439306358e-05, |
|
"loss": 0.3022, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 2.8901734104046245e-05, |
|
"loss": 0.092, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 2.882947976878613e-05, |
|
"loss": 0.1024, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 2.8757225433526013e-05, |
|
"loss": 0.0329, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 2.8684971098265896e-05, |
|
"loss": 0.0205, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 2.861271676300578e-05, |
|
"loss": 0.1547, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.8540462427745667e-05, |
|
"loss": 0.1194, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.846820809248555e-05, |
|
"loss": 0.0185, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 2.8395953757225434e-05, |
|
"loss": 0.0756, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 2.832369942196532e-05, |
|
"loss": 0.1338, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 2.82514450867052e-05, |
|
"loss": 0.1195, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 2.8179190751445088e-05, |
|
"loss": 0.0305, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 2.8106936416184972e-05, |
|
"loss": 0.0224, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 2.8034682080924855e-05, |
|
"loss": 0.0414, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 2.7962427745664742e-05, |
|
"loss": 0.1261, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 2.7890173410404623e-05, |
|
"loss": 0.0228, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 2.781791907514451e-05, |
|
"loss": 0.0641, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 2.7745664739884393e-05, |
|
"loss": 0.3105, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 2.7673410404624277e-05, |
|
"loss": 1.2959, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 2.7601156069364164e-05, |
|
"loss": 0.0487, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 2.7528901734104044e-05, |
|
"loss": 0.0652, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 2.745664739884393e-05, |
|
"loss": 0.0761, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 2.7384393063583818e-05, |
|
"loss": 0.0232, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 2.73121387283237e-05, |
|
"loss": 0.0774, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 2.7239884393063585e-05, |
|
"loss": 0.0276, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 2.7167630057803466e-05, |
|
"loss": 0.0746, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 2.7095375722543353e-05, |
|
"loss": 0.688, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.702312138728324e-05, |
|
"loss": 0.8516, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.695086705202312e-05, |
|
"loss": 0.0777, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 2.6878612716763007e-05, |
|
"loss": 0.1597, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 2.6806358381502894e-05, |
|
"loss": 0.0302, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 2.6734104046242774e-05, |
|
"loss": 0.1183, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 2.666184971098266e-05, |
|
"loss": 0.2192, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 2.658959537572254e-05, |
|
"loss": 0.1561, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.651734104046243e-05, |
|
"loss": 0.0732, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.6445086705202315e-05, |
|
"loss": 0.1304, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 2.6372832369942196e-05, |
|
"loss": 0.3911, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 2.6300578034682083e-05, |
|
"loss": 0.2898, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 2.6228323699421963e-05, |
|
"loss": 0.2178, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 2.615606936416185e-05, |
|
"loss": 0.0424, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 2.6083815028901737e-05, |
|
"loss": 0.0442, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 2.6011560693641617e-05, |
|
"loss": 0.1331, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 2.5939306358381504e-05, |
|
"loss": 0.1705, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 2.586705202312139e-05, |
|
"loss": 0.0554, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 2.579479768786127e-05, |
|
"loss": 0.0732, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 2.5722543352601158e-05, |
|
"loss": 0.1725, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 2.565028901734104e-05, |
|
"loss": 0.0399, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 2.5578034682080925e-05, |
|
"loss": 0.2034, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 2.5505780346820812e-05, |
|
"loss": 0.0613, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.5433526011560693e-05, |
|
"loss": 0.1105, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.536127167630058e-05, |
|
"loss": 0.1292, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 2.5289017341040467e-05, |
|
"loss": 0.0261, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 2.5216763005780347e-05, |
|
"loss": 0.2015, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 2.5144508670520234e-05, |
|
"loss": 0.0551, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 2.5072254335260114e-05, |
|
"loss": 0.0399, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.0609, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 2.4927745664739885e-05, |
|
"loss": 0.0462, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 2.485549132947977e-05, |
|
"loss": 0.0491, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 2.4783236994219655e-05, |
|
"loss": 0.2063, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 2.471098265895954e-05, |
|
"loss": 0.0217, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 2.4638728323699423e-05, |
|
"loss": 0.055, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 2.4566473988439306e-05, |
|
"loss": 0.5132, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 2.4494219653179193e-05, |
|
"loss": 0.0756, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 2.4421965317919077e-05, |
|
"loss": 0.5049, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 2.434971098265896e-05, |
|
"loss": 0.0509, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.4277456647398844e-05, |
|
"loss": 0.0416, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.420520231213873e-05, |
|
"loss": 0.4929, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 2.4132947976878615e-05, |
|
"loss": 0.1276, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 2.40606936416185e-05, |
|
"loss": 0.0826, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 2.3988439306358382e-05, |
|
"loss": 0.0464, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 2.391618497109827e-05, |
|
"loss": 0.0181, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 2.3843930635838153e-05, |
|
"loss": 0.098, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 2.3771676300578036e-05, |
|
"loss": 0.1777, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 2.369942196531792e-05, |
|
"loss": 0.0375, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 2.3627167630057807e-05, |
|
"loss": 0.0615, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 2.355491329479769e-05, |
|
"loss": 0.027, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 2.3482658959537574e-05, |
|
"loss": 0.304, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 2.3410404624277458e-05, |
|
"loss": 0.021, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 2.333815028901734e-05, |
|
"loss": 0.1176, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 2.326589595375723e-05, |
|
"loss": 0.0246, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 2.3193641618497112e-05, |
|
"loss": 0.0887, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 2.3121387283236996e-05, |
|
"loss": 0.0623, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 2.304913294797688e-05, |
|
"loss": 0.0132, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 2.2976878612716766e-05, |
|
"loss": 0.0682, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 2.290462427745665e-05, |
|
"loss": 0.6489, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 2.2832369942196533e-05, |
|
"loss": 0.1528, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 2.2760115606936417e-05, |
|
"loss": 0.0308, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 2.2687861271676304e-05, |
|
"loss": 0.0777, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 2.2615606936416188e-05, |
|
"loss": 0.0204, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 2.254335260115607e-05, |
|
"loss": 0.161, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 2.2471098265895955e-05, |
|
"loss": 0.1113, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 2.2398843930635842e-05, |
|
"loss": 0.0415, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 2.2326589595375725e-05, |
|
"loss": 0.0702, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 2.225433526011561e-05, |
|
"loss": 0.49, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 2.2182080924855493e-05, |
|
"loss": 0.2705, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 2.2109826589595376e-05, |
|
"loss": 0.0251, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 2.2037572254335263e-05, |
|
"loss": 0.0559, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 2.1965317919075147e-05, |
|
"loss": 0.1243, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 2.189306358381503e-05, |
|
"loss": 0.0339, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 2.1820809248554914e-05, |
|
"loss": 0.0378, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 2.17485549132948e-05, |
|
"loss": 0.0256, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 2.1676300578034685e-05, |
|
"loss": 0.0397, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 2.160404624277457e-05, |
|
"loss": 0.0274, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 2.1531791907514452e-05, |
|
"loss": 0.1582, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 2.145953757225434e-05, |
|
"loss": 0.0289, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 2.1387283236994223e-05, |
|
"loss": 0.0604, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 2.1315028901734106e-05, |
|
"loss": 0.1317, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 2.124277456647399e-05, |
|
"loss": 0.0428, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 2.1170520231213874e-05, |
|
"loss": 0.0729, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 2.1098265895953757e-05, |
|
"loss": 0.0305, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 2.1026011560693644e-05, |
|
"loss": 0.0807, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 2.0953757225433528e-05, |
|
"loss": 0.023, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 2.088150289017341e-05, |
|
"loss": 0.106, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 2.0809248554913295e-05, |
|
"loss": 0.0979, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 2.073699421965318e-05, |
|
"loss": 0.1731, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 2.0664739884393066e-05, |
|
"loss": 0.3472, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 2.059248554913295e-05, |
|
"loss": 0.2023, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 2.0520231213872833e-05, |
|
"loss": 0.0451, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 2.0447976878612716e-05, |
|
"loss": 0.0191, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 2.03757225433526e-05, |
|
"loss": 0.0496, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 2.0303468208092487e-05, |
|
"loss": 0.051, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 2.023121387283237e-05, |
|
"loss": 0.0271, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 2.0158959537572254e-05, |
|
"loss": 0.118, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 2.0086705202312138e-05, |
|
"loss": 0.0345, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 2.001445086705202e-05, |
|
"loss": 0.0218, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 1.994219653179191e-05, |
|
"loss": 0.1061, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 1.9869942196531792e-05, |
|
"loss": 0.0587, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 1.9797687861271676e-05, |
|
"loss": 0.0661, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 1.972543352601156e-05, |
|
"loss": 0.0892, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 1.9653179190751446e-05, |
|
"loss": 0.0189, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 1.958092485549133e-05, |
|
"loss": 0.0526, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 1.9508670520231214e-05, |
|
"loss": 0.0432, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 1.9436416184971097e-05, |
|
"loss": 0.0303, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 1.936416184971098e-05, |
|
"loss": 0.1111, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 1.9291907514450868e-05, |
|
"loss": 0.0666, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 1.921965317919075e-05, |
|
"loss": 0.0798, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.9147398843930635e-05, |
|
"loss": 0.0916, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.907514450867052e-05, |
|
"loss": 0.4644, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 1.9002890173410406e-05, |
|
"loss": 0.0297, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 1.893063583815029e-05, |
|
"loss": 0.0386, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 1.8858381502890173e-05, |
|
"loss": 0.0203, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 1.8786127167630057e-05, |
|
"loss": 0.0457, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 1.8713872832369944e-05, |
|
"loss": 0.0369, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 1.8641618497109827e-05, |
|
"loss": 0.0294, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 1.856936416184971e-05, |
|
"loss": 0.0279, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 1.8497109826589594e-05, |
|
"loss": 0.0315, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.842485549132948e-05, |
|
"loss": 0.0154, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.8352601156069365e-05, |
|
"loss": 0.0484, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 1.828034682080925e-05, |
|
"loss": 0.2974, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 1.8208092485549132e-05, |
|
"loss": 0.0248, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 1.813583815028902e-05, |
|
"loss": 0.0406, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 1.8063583815028903e-05, |
|
"loss": 0.077, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 1.7991329479768786e-05, |
|
"loss": 0.0266, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 1.791907514450867e-05, |
|
"loss": 0.0296, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 1.7846820809248554e-05, |
|
"loss": 0.1115, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 1.777456647398844e-05, |
|
"loss": 0.074, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 1.7702312138728324e-05, |
|
"loss": 0.0756, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 1.7630057803468208e-05, |
|
"loss": 0.0334, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 1.755780346820809e-05, |
|
"loss": 0.0178, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 1.748554913294798e-05, |
|
"loss": 0.0671, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 1.7413294797687862e-05, |
|
"loss": 0.344, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 1.7341040462427746e-05, |
|
"loss": 0.2012, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 1.726878612716763e-05, |
|
"loss": 0.0569, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 1.7196531791907516e-05, |
|
"loss": 0.1346, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 1.71242774566474e-05, |
|
"loss": 0.1506, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 1.7052023121387284e-05, |
|
"loss": 0.1271, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 1.6979768786127167e-05, |
|
"loss": 0.0223, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 1.6907514450867054e-05, |
|
"loss": 0.2837, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 1.6835260115606938e-05, |
|
"loss": 0.0215, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 1.676300578034682e-05, |
|
"loss": 0.1266, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 1.6690751445086705e-05, |
|
"loss": 0.1578, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 1.661849710982659e-05, |
|
"loss": 0.036, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 1.6546242774566476e-05, |
|
"loss": 0.0279, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 1.647398843930636e-05, |
|
"loss": 0.0256, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.6401734104046243e-05, |
|
"loss": 0.0388, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.6329479768786127e-05, |
|
"loss": 0.0486, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 1.6257225433526014e-05, |
|
"loss": 0.0488, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 1.6184971098265897e-05, |
|
"loss": 0.0513, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 1.611271676300578e-05, |
|
"loss": 0.037, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 1.6040462427745664e-05, |
|
"loss": 0.1323, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 1.596820809248555e-05, |
|
"loss": 0.0687, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 1.5895953757225435e-05, |
|
"loss": 0.0646, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 1.582369942196532e-05, |
|
"loss": 0.0289, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 1.5751445086705202e-05, |
|
"loss": 0.0191, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 1.567919075144509e-05, |
|
"loss": 0.0551, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 1.5606936416184973e-05, |
|
"loss": 0.0269, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 1.5534682080924857e-05, |
|
"loss": 0.0853, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 1.546242774566474e-05, |
|
"loss": 0.0433, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 1.5390173410404627e-05, |
|
"loss": 0.0286, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 1.531791907514451e-05, |
|
"loss": 0.0332, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 1.5245664739884394e-05, |
|
"loss": 0.0547, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 1.5173410404624278e-05, |
|
"loss": 0.0748, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 1.5101156069364162e-05, |
|
"loss": 0.1246, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 1.5028901734104049e-05, |
|
"loss": 0.0483, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 1.4956647398843932e-05, |
|
"loss": 0.0229, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 1.4884393063583816e-05, |
|
"loss": 0.0593, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 1.48121387283237e-05, |
|
"loss": 0.5259, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 1.4739884393063585e-05, |
|
"loss": 0.0922, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 1.4667630057803468e-05, |
|
"loss": 0.1433, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 1.4595375722543354e-05, |
|
"loss": 0.0159, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 1.4523121387283237e-05, |
|
"loss": 0.0137, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 1.4450867052023123e-05, |
|
"loss": 0.025, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 1.4378612716763006e-05, |
|
"loss": 0.2031, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 1.430635838150289e-05, |
|
"loss": 0.0135, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 1.4234104046242775e-05, |
|
"loss": 0.0573, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 1.416184971098266e-05, |
|
"loss": 0.0684, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 1.4089595375722544e-05, |
|
"loss": 0.5547, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 1.4017341040462428e-05, |
|
"loss": 0.0906, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 1.3945086705202311e-05, |
|
"loss": 0.0323, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 1.3872832369942197e-05, |
|
"loss": 0.0905, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 1.3800578034682082e-05, |
|
"loss": 0.032, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 1.3728323699421966e-05, |
|
"loss": 0.0259, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.365606936416185e-05, |
|
"loss": 0.4873, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.3583815028901733e-05, |
|
"loss": 0.0556, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 1.351156069364162e-05, |
|
"loss": 0.0514, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 1.3439306358381503e-05, |
|
"loss": 0.0388, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 1.3367052023121387e-05, |
|
"loss": 0.0911, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 1.329479768786127e-05, |
|
"loss": 0.1078, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 1.3222543352601158e-05, |
|
"loss": 0.0186, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 1.3150289017341041e-05, |
|
"loss": 0.0295, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 1.3078034682080925e-05, |
|
"loss": 0.0221, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 1.3005780346820809e-05, |
|
"loss": 0.0116, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.2933526011560696e-05, |
|
"loss": 0.02, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.2861271676300579e-05, |
|
"loss": 0.023, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 1.2789017341040463e-05, |
|
"loss": 0.0559, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 1.2716763005780346e-05, |
|
"loss": 0.0979, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 1.2644508670520233e-05, |
|
"loss": 0.1414, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 1.2572254335260117e-05, |
|
"loss": 0.1943, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.0967, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 1.2427745664739884e-05, |
|
"loss": 0.5552, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 1.235549132947977e-05, |
|
"loss": 0.0372, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 1.2283236994219653e-05, |
|
"loss": 0.0811, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 1.2210982658959538e-05, |
|
"loss": 0.0554, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 1.2138728323699422e-05, |
|
"loss": 0.0517, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 1.2066473988439307e-05, |
|
"loss": 0.0318, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 1.1994219653179191e-05, |
|
"loss": 0.0148, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 1.1921965317919076e-05, |
|
"loss": 0.0638, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 1.184971098265896e-05, |
|
"loss": 0.0417, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 1.1777456647398845e-05, |
|
"loss": 0.0251, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 1.1705202312138729e-05, |
|
"loss": 0.0494, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 1.1632947976878614e-05, |
|
"loss": 0.0215, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 1.1560693641618498e-05, |
|
"loss": 0.1754, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 1.1488439306358383e-05, |
|
"loss": 0.0341, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 1.1416184971098267e-05, |
|
"loss": 0.0317, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 1.1343930635838152e-05, |
|
"loss": 0.0346, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 1.1271676300578036e-05, |
|
"loss": 0.0194, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 1.1199421965317921e-05, |
|
"loss": 0.2197, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 1.1127167630057805e-05, |
|
"loss": 0.0927, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 1.1054913294797688e-05, |
|
"loss": 0.0565, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 1.0982658959537573e-05, |
|
"loss": 0.0223, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 1.0910404624277457e-05, |
|
"loss": 0.0334, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 1.0838150289017342e-05, |
|
"loss": 0.0231, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 1.0765895953757226e-05, |
|
"loss": 0.0267, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 1.0693641618497111e-05, |
|
"loss": 0.0388, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 1.0621387283236995e-05, |
|
"loss": 0.0477, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 1.0549132947976879e-05, |
|
"loss": 0.0282, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 1.0476878612716764e-05, |
|
"loss": 0.1819, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 1.0404624277456647e-05, |
|
"loss": 0.2483, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 1.0332369942196533e-05, |
|
"loss": 0.0476, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 1.0260115606936416e-05, |
|
"loss": 0.0494, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 1.01878612716763e-05, |
|
"loss": 0.0325, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 1.0115606936416185e-05, |
|
"loss": 0.0222, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 1.0043352601156069e-05, |
|
"loss": 0.0642, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 9.971098265895954e-06, |
|
"loss": 0.0204, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 9.898843930635838e-06, |
|
"loss": 0.0435, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 9.826589595375723e-06, |
|
"loss": 0.0951, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 9.754335260115607e-06, |
|
"loss": 0.0222, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 9.68208092485549e-06, |
|
"loss": 0.095, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 9.609826589595376e-06, |
|
"loss": 0.018, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 9.53757225433526e-06, |
|
"loss": 0.03, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 9.465317919075145e-06, |
|
"loss": 0.0434, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 9.393063583815028e-06, |
|
"loss": 0.0175, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 9.320809248554914e-06, |
|
"loss": 0.0804, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 9.248554913294797e-06, |
|
"loss": 0.0348, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 9.176300578034683e-06, |
|
"loss": 0.1162, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 9.104046242774566e-06, |
|
"loss": 0.1036, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 9.031791907514451e-06, |
|
"loss": 0.0302, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 8.959537572254335e-06, |
|
"loss": 0.0199, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 8.88728323699422e-06, |
|
"loss": 0.0276, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 8.815028901734104e-06, |
|
"loss": 0.0303, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 8.74277456647399e-06, |
|
"loss": 0.0139, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 8.670520231213873e-06, |
|
"loss": 0.0318, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 8.598265895953758e-06, |
|
"loss": 0.0252, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 8.526011560693642e-06, |
|
"loss": 0.0413, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 8.453757225433527e-06, |
|
"loss": 0.1241, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 8.38150289017341e-06, |
|
"loss": 0.0232, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 8.309248554913294e-06, |
|
"loss": 0.0975, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 8.23699421965318e-06, |
|
"loss": 0.0437, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 8.164739884393063e-06, |
|
"loss": 0.0493, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 8.092485549132949e-06, |
|
"loss": 0.0529, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 8.020231213872832e-06, |
|
"loss": 0.0148, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 7.947976878612718e-06, |
|
"loss": 0.0516, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 7.875722543352601e-06, |
|
"loss": 0.0614, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 7.803468208092486e-06, |
|
"loss": 0.0579, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 7.73121387283237e-06, |
|
"loss": 0.1614, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 7.658959537572255e-06, |
|
"loss": 0.1004, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 7.586705202312139e-06, |
|
"loss": 0.0403, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 7.514450867052024e-06, |
|
"loss": 0.1814, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 7.442196531791908e-06, |
|
"loss": 0.0324, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 7.369942196531792e-06, |
|
"loss": 0.0259, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 7.297687861271677e-06, |
|
"loss": 0.0599, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 7.225433526011561e-06, |
|
"loss": 0.0181, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 7.153179190751445e-06, |
|
"loss": 0.0489, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 7.08092485549133e-06, |
|
"loss": 0.0284, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 7.008670520231214e-06, |
|
"loss": 0.0197, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 6.936416184971098e-06, |
|
"loss": 0.1008, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 6.864161849710983e-06, |
|
"loss": 0.0171, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 6.791907514450866e-06, |
|
"loss": 0.0316, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 6.719653179190752e-06, |
|
"loss": 0.046, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 6.647398843930635e-06, |
|
"loss": 0.0383, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 6.575144508670521e-06, |
|
"loss": 0.2247, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 6.502890173410404e-06, |
|
"loss": 0.0194, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 6.4306358381502896e-06, |
|
"loss": 0.0895, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 6.358381502890173e-06, |
|
"loss": 0.0614, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 6.2861271676300585e-06, |
|
"loss": 0.0826, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 6.213872832369942e-06, |
|
"loss": 0.0322, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 6.1416184971098266e-06, |
|
"loss": 0.1702, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 6.069364161849711e-06, |
|
"loss": 0.069, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 5.9971098265895955e-06, |
|
"loss": 0.0471, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 5.92485549132948e-06, |
|
"loss": 0.0178, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 5.852601156069364e-06, |
|
"loss": 0.0098, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 5.780346820809249e-06, |
|
"loss": 0.052, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 5.708092485549133e-06, |
|
"loss": 0.0398, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 5.635838150289018e-06, |
|
"loss": 0.0509, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 5.563583815028902e-06, |
|
"loss": 0.0262, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 5.491329479768787e-06, |
|
"loss": 0.1782, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 5.419075144508671e-06, |
|
"loss": 0.0326, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 5.346820809248556e-06, |
|
"loss": 0.0357, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 5.274566473988439e-06, |
|
"loss": 0.0235, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 5.202312138728324e-06, |
|
"loss": 0.0777, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 5.130057803468208e-06, |
|
"loss": 0.0171, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 5.057803468208093e-06, |
|
"loss": 0.0195, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 4.985549132947977e-06, |
|
"loss": 0.0251, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 4.913294797687862e-06, |
|
"loss": 0.0369, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 4.841040462427745e-06, |
|
"loss": 0.0529, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 4.76878612716763e-06, |
|
"loss": 0.0206, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 4.696531791907514e-06, |
|
"loss": 0.0382, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 4.624277456647399e-06, |
|
"loss": 0.0623, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 4.552023121387283e-06, |
|
"loss": 0.3542, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 4.4797687861271675e-06, |
|
"loss": 0.0281, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 4.407514450867052e-06, |
|
"loss": 0.1573, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 4.3352601156069365e-06, |
|
"loss": 0.0541, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 4.263005780346821e-06, |
|
"loss": 0.0393, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 4.190751445086705e-06, |
|
"loss": 0.0213, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 4.11849710982659e-06, |
|
"loss": 0.0591, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 4.046242774566474e-06, |
|
"loss": 0.0338, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 3.973988439306359e-06, |
|
"loss": 0.0187, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 3.901734104046243e-06, |
|
"loss": 0.0185, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 3.829479768786128e-06, |
|
"loss": 0.1882, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 3.757225433526012e-06, |
|
"loss": 0.0365, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 3.684971098265896e-06, |
|
"loss": 0.064, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 3.6127167630057807e-06, |
|
"loss": 0.1229, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 3.540462427745665e-06, |
|
"loss": 0.3818, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 3.468208092485549e-06, |
|
"loss": 0.0607, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 3.395953757225433e-06, |
|
"loss": 0.0252, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 3.3236994219653177e-06, |
|
"loss": 0.027, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 3.251445086705202e-06, |
|
"loss": 0.0225, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 3.1791907514450866e-06, |
|
"loss": 0.0499, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 3.106936416184971e-06, |
|
"loss": 0.1255, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 3.0346820809248555e-06, |
|
"loss": 0.0671, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 2.96242774566474e-06, |
|
"loss": 0.0345, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 2.8901734104046244e-06, |
|
"loss": 0.0274, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 2.817919075144509e-06, |
|
"loss": 0.026, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 2.7456647398843934e-06, |
|
"loss": 0.177, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 2.673410404624278e-06, |
|
"loss": 0.0391, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 2.601156069364162e-06, |
|
"loss": 0.1362, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 2.5289017341040463e-06, |
|
"loss": 0.0405, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 2.456647398843931e-06, |
|
"loss": 0.0358, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 2.384393063583815e-06, |
|
"loss": 0.0383, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 2.3121387283236993e-06, |
|
"loss": 0.0351, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 2.2398843930635838e-06, |
|
"loss": 0.1339, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 2.1676300578034682e-06, |
|
"loss": 0.0235, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 2.0953757225433527e-06, |
|
"loss": 0.0123, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 2.023121387283237e-06, |
|
"loss": 0.0562, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 1.9508670520231216e-06, |
|
"loss": 0.0138, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 1.878612716763006e-06, |
|
"loss": 0.026, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 1.8063583815028903e-06, |
|
"loss": 0.1153, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 1.7341040462427746e-06, |
|
"loss": 0.0261, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 1.6618497109826588e-06, |
|
"loss": 0.1375, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 1.5895953757225433e-06, |
|
"loss": 0.0372, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 1.5173410404624278e-06, |
|
"loss": 0.3872, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 1.4450867052023122e-06, |
|
"loss": 0.376, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 1.3728323699421967e-06, |
|
"loss": 0.0347, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 1.300578034682081e-06, |
|
"loss": 0.0246, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 1.2283236994219654e-06, |
|
"loss": 0.0294, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.1560693641618497e-06, |
|
"loss": 0.0346, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0838150289017341e-06, |
|
"loss": 0.04, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 1.0115606936416186e-06, |
|
"loss": 0.0903, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 9.39306358381503e-07, |
|
"loss": 0.1242, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 8.670520231213873e-07, |
|
"loss": 0.0394, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 7.947976878612716e-07, |
|
"loss": 0.0211, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 7.225433526011561e-07, |
|
"loss": 0.037, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 6.502890173410405e-07, |
|
"loss": 0.0667, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 5.780346820809248e-07, |
|
"loss": 0.0327, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 5.057803468208093e-07, |
|
"loss": 0.0365, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 4.3352601156069365e-07, |
|
"loss": 0.0388, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 3.6127167630057806e-07, |
|
"loss": 0.0814, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 2.890173410404624e-07, |
|
"loss": 0.0214, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 2.1676300578034682e-07, |
|
"loss": 0.0443, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 1.445086705202312e-07, |
|
"loss": 0.0837, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 7.22543352601156e-08, |
|
"loss": 0.0186, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0674, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 1730, |
|
"total_flos": 3.3456234411117773e+21, |
|
"train_loss": 1.1614258385807104, |
|
"train_runtime": 785.8758, |
|
"train_samples_per_second": 282.411, |
|
"train_steps_per_second": 2.201 |
|
} |
|
], |
|
"max_steps": 1730, |
|
"num_train_epochs": 10, |
|
"total_flos": 3.3456234411117773e+21, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|