|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 1542, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.2653721682847896e-07, |
|
"loss": 5.9961, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.530744336569579e-07, |
|
"loss": 6.0273, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.796116504854367e-07, |
|
"loss": 5.9609, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.061488673139158e-07, |
|
"loss": 6.0117, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1326860841423946e-06, |
|
"loss": 6.0156, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3592233009708735e-06, |
|
"loss": 6.0234, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.5857605177993528e-06, |
|
"loss": 5.9766, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8122977346278317e-06, |
|
"loss": 5.9766, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.0388349514563107e-06, |
|
"loss": 5.9531, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.265372168284789e-06, |
|
"loss": 5.9805, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.4919093851132685e-06, |
|
"loss": 5.9648, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.718446601941747e-06, |
|
"loss": 5.9883, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9449838187702263e-06, |
|
"loss": 5.9297, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.1715210355987056e-06, |
|
"loss": 5.9609, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.398058252427184e-06, |
|
"loss": 5.9141, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.6245954692556633e-06, |
|
"loss": 5.9297, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.851132686084142e-06, |
|
"loss": 5.9414, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.0776699029126215e-06, |
|
"loss": 5.8984, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.3042071197411e-06, |
|
"loss": 5.8867, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.530744336569578e-06, |
|
"loss": 5.8359, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.757281553398058e-06, |
|
"loss": 5.8125, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.983818770226537e-06, |
|
"loss": 5.7969, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.210355987055016e-06, |
|
"loss": 5.8203, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.436893203883494e-06, |
|
"loss": 5.8125, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.663430420711973e-06, |
|
"loss": 5.7812, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.8899676375404525e-06, |
|
"loss": 5.7422, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.116504854368932e-06, |
|
"loss": 5.6953, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.343042071197411e-06, |
|
"loss": 5.6719, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.569579288025889e-06, |
|
"loss": 5.6602, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.796116504854368e-06, |
|
"loss": 5.5977, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.022653721682847e-06, |
|
"loss": 5.5195, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.249190938511327e-06, |
|
"loss": 5.4961, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.475728155339805e-06, |
|
"loss": 5.4258, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.702265372168284e-06, |
|
"loss": 5.3281, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.928802588996764e-06, |
|
"loss": 5.3008, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.155339805825243e-06, |
|
"loss": 5.2266, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.38187702265372e-06, |
|
"loss": 5.0859, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.6084142394822e-06, |
|
"loss": 4.9844, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.83495145631068e-06, |
|
"loss": 5.0039, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.061488673139157e-06, |
|
"loss": 4.7812, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.288025889967638e-06, |
|
"loss": 4.7578, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.514563106796115e-06, |
|
"loss": 4.7695, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.741100323624595e-06, |
|
"loss": 4.8906, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.967637540453074e-06, |
|
"loss": 4.7578, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0194174757281552e-05, |
|
"loss": 4.582, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0420711974110033e-05, |
|
"loss": 4.3047, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.064724919093851e-05, |
|
"loss": 4.582, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0873786407766988e-05, |
|
"loss": 4.3242, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1100323624595469e-05, |
|
"loss": 4.0664, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1326860841423946e-05, |
|
"loss": 4.1875, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1553398058252427e-05, |
|
"loss": 3.9863, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1779935275080905e-05, |
|
"loss": 4.0781, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.2006472491909383e-05, |
|
"loss": 3.9512, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2233009708737864e-05, |
|
"loss": 3.9141, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2459546925566341e-05, |
|
"loss": 3.6641, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2686084142394822e-05, |
|
"loss": 3.1035, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.29126213592233e-05, |
|
"loss": 3.7227, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3139158576051777e-05, |
|
"loss": 3.2754, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3365695792880258e-05, |
|
"loss": 3.6465, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3592233009708736e-05, |
|
"loss": 3.1211, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3818770226537215e-05, |
|
"loss": 3.4707, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4045307443365695e-05, |
|
"loss": 3.1699, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4271844660194172e-05, |
|
"loss": 3.7422, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4498381877022653e-05, |
|
"loss": 2.7637, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.472491909385113e-05, |
|
"loss": 3.2949, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.495145631067961e-05, |
|
"loss": 2.543, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.517799352750809e-05, |
|
"loss": 2.0547, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.540453074433657e-05, |
|
"loss": 2.2539, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.5631067961165046e-05, |
|
"loss": 2.7383, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.5857605177993527e-05, |
|
"loss": 2.2031, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6084142394822005e-05, |
|
"loss": 2.873, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6310679611650486e-05, |
|
"loss": 2.2285, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6537216828478964e-05, |
|
"loss": 2.5234, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.676375404530744e-05, |
|
"loss": 1.9932, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.6990291262135922e-05, |
|
"loss": 1.793, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.72168284789644e-05, |
|
"loss": 1.9678, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.744336569579288e-05, |
|
"loss": 2.1387, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.766990291262136e-05, |
|
"loss": 2.0762, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.789644012944984e-05, |
|
"loss": 1.7725, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8122977346278314e-05, |
|
"loss": 1.7256, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8349514563106795e-05, |
|
"loss": 1.9189, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8576051779935276e-05, |
|
"loss": 2.3809, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.880258899676375e-05, |
|
"loss": 2.0996, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.902912621359223e-05, |
|
"loss": 1.6475, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9255663430420712e-05, |
|
"loss": 2.9414, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.948220064724919e-05, |
|
"loss": 2.2305, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9708737864077667e-05, |
|
"loss": 2.3281, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9935275080906148e-05, |
|
"loss": 2.2109, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.016181229773463e-05, |
|
"loss": 1.7871, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.0388349514563103e-05, |
|
"loss": 1.6367, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.0614886731391584e-05, |
|
"loss": 1.9697, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.0841423948220065e-05, |
|
"loss": 1.7139, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.106796116504854e-05, |
|
"loss": 2.2852, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.129449838187702e-05, |
|
"loss": 1.6562, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.15210355987055e-05, |
|
"loss": 1.9287, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.1747572815533976e-05, |
|
"loss": 1.126, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.1974110032362457e-05, |
|
"loss": 1.7559, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.2200647249190938e-05, |
|
"loss": 1.9482, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.242718446601942e-05, |
|
"loss": 1.5293, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.2653721682847893e-05, |
|
"loss": 2.0527, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.2880258899676374e-05, |
|
"loss": 1.6289, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.3106796116504855e-05, |
|
"loss": 1.6133, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.333333333333333e-05, |
|
"loss": 1.7148, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.355987055016181e-05, |
|
"loss": 1.4717, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.378640776699029e-05, |
|
"loss": 1.8027, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4012944983818765e-05, |
|
"loss": 2.1504, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4239482200647246e-05, |
|
"loss": 1.665, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4466019417475727e-05, |
|
"loss": 1.4902, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.46925566343042e-05, |
|
"loss": 1.6582, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4919093851132682e-05, |
|
"loss": 1.6836, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.5145631067961163e-05, |
|
"loss": 1.25, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.5372168284789644e-05, |
|
"loss": 1.5781, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.559870550161812e-05, |
|
"loss": 1.4697, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.58252427184466e-05, |
|
"loss": 1.6738, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.605177993527508e-05, |
|
"loss": 1.8496, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6278317152103555e-05, |
|
"loss": 1.0762, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6504854368932036e-05, |
|
"loss": 1.8564, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6731391585760517e-05, |
|
"loss": 1.3311, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6957928802588994e-05, |
|
"loss": 1.7188, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.7184466019417472e-05, |
|
"loss": 1.6914, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.7411003236245953e-05, |
|
"loss": 1.3633, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.763754045307443e-05, |
|
"loss": 1.2354, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.7864077669902908e-05, |
|
"loss": 1.3633, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.809061488673139e-05, |
|
"loss": 1.9648, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.831715210355987e-05, |
|
"loss": 1.8584, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.8543689320388344e-05, |
|
"loss": 2.3164, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.8770226537216825e-05, |
|
"loss": 1.4102, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.8996763754045306e-05, |
|
"loss": 1.6836, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9223300970873784e-05, |
|
"loss": 2.4863, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.944983818770226e-05, |
|
"loss": 1.1318, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9676375404530743e-05, |
|
"loss": 1.748, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.990291262135922e-05, |
|
"loss": 1.0938, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.0129449838187698e-05, |
|
"loss": 1.2949, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.035598705501618e-05, |
|
"loss": 0.9761, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.058252427184465e-05, |
|
"loss": 1.5547, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.080906148867314e-05, |
|
"loss": 1.0957, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.1035598705501615e-05, |
|
"loss": 1.0596, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.126213592233009e-05, |
|
"loss": 1.627, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.148867313915857e-05, |
|
"loss": 1.3721, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.1715210355987055e-05, |
|
"loss": 1.8223, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.194174757281553e-05, |
|
"loss": 1.2832, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.216828478964401e-05, |
|
"loss": 1.8086, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.239482200647249e-05, |
|
"loss": 1.6064, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.262135922330097e-05, |
|
"loss": 1.2197, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.284789644012944e-05, |
|
"loss": 1.3887, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.307443365695793e-05, |
|
"loss": 1.2725, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.3300970873786405e-05, |
|
"loss": 0.9053, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.352750809061488e-05, |
|
"loss": 1.4629, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.375404530744336e-05, |
|
"loss": 1.2461, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.3980582524271844e-05, |
|
"loss": 1.4775, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.420711974110032e-05, |
|
"loss": 1.4043, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.44336569579288e-05, |
|
"loss": 1.2822, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.466019417475728e-05, |
|
"loss": 1.0049, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.488673139158576e-05, |
|
"loss": 1.3574, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.511326860841424e-05, |
|
"loss": 0.7646, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.533980582524272e-05, |
|
"loss": 1.3389, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.5566343042071194e-05, |
|
"loss": 1.0557, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.579288025889968e-05, |
|
"loss": 0.9287, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.601941747572815e-05, |
|
"loss": 1.0508, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.624595469255663e-05, |
|
"loss": 1.3984, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.647249190938511e-05, |
|
"loss": 1.3623, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.669902912621359e-05, |
|
"loss": 1.3555, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.692556634304207e-05, |
|
"loss": 1.5928, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.715210355987055e-05, |
|
"loss": 1.3867, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.737864077669902e-05, |
|
"loss": 0.9873, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.76051779935275e-05, |
|
"loss": 1.1855, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.7831715210355984e-05, |
|
"loss": 1.3008, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.805825242718446e-05, |
|
"loss": 0.9136, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.8284789644012946e-05, |
|
"loss": 1.3359, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.8511326860841424e-05, |
|
"loss": 1.208, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.87378640776699e-05, |
|
"loss": 1.5762, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.896440129449838e-05, |
|
"loss": 1.8926, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.9190938511326856e-05, |
|
"loss": 1.4688, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.9417475728155334e-05, |
|
"loss": 0.9941, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.964401294498382e-05, |
|
"loss": 1.6846, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.9870550161812296e-05, |
|
"loss": 1.5684, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.0097087378640774e-05, |
|
"loss": 1.208, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.032362459546926e-05, |
|
"loss": 1.5352, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.055016181229773e-05, |
|
"loss": 1.0684, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.0776699029126206e-05, |
|
"loss": 1.0049, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.100323624595469e-05, |
|
"loss": 1.4014, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.122977346278317e-05, |
|
"loss": 1.1924, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.1456310679611646e-05, |
|
"loss": 1.4521, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.168284789644013e-05, |
|
"loss": 0.7993, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.19093851132686e-05, |
|
"loss": 1.0498, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.213592233009708e-05, |
|
"loss": 0.9438, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.236245954692556e-05, |
|
"loss": 1.625, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.258899676375404e-05, |
|
"loss": 1.2715, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.281553398058252e-05, |
|
"loss": 0.9678, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3042071197411e-05, |
|
"loss": 1.1182, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.326860841423948e-05, |
|
"loss": 1.2285, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.349514563106795e-05, |
|
"loss": 0.5293, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3721682847896436e-05, |
|
"loss": 1.1416, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.394822006472491e-05, |
|
"loss": 1.1992, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.41747572815534e-05, |
|
"loss": 1.543, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.4401294498381875e-05, |
|
"loss": 1.6172, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.462783171521035e-05, |
|
"loss": 1.3574, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.485436893203884e-05, |
|
"loss": 1.5391, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.508090614886731e-05, |
|
"loss": 1.4824, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.5307443365695786e-05, |
|
"loss": 0.9688, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.553398058252427e-05, |
|
"loss": 1.125, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.576051779935275e-05, |
|
"loss": 1.1611, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.5987055016181225e-05, |
|
"loss": 1.2461, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.621359223300971e-05, |
|
"loss": 1.0439, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.644012944983818e-05, |
|
"loss": 0.8789, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.666666666666666e-05, |
|
"loss": 1.8975, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.689320388349514e-05, |
|
"loss": 1.3994, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.711974110032362e-05, |
|
"loss": 1.1035, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.73462783171521e-05, |
|
"loss": 1.1377, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.757281553398058e-05, |
|
"loss": 1.8164, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.779935275080906e-05, |
|
"loss": 0.9883, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.802588996763753e-05, |
|
"loss": 1.7783, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.8252427184466015e-05, |
|
"loss": 1.0312, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.847896440129449e-05, |
|
"loss": 0.605, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.870550161812297e-05, |
|
"loss": 1.0938, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.8932038834951454e-05, |
|
"loss": 1.2627, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.915857605177993e-05, |
|
"loss": 1.2236, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.93851132686084e-05, |
|
"loss": 1.2236, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.961165048543689e-05, |
|
"loss": 1.1504, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.9838187702265365e-05, |
|
"loss": 1.667, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.006472491909385e-05, |
|
"loss": 1.3867, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.029126213592233e-05, |
|
"loss": 0.6914, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.0517799352750804e-05, |
|
"loss": 0.7217, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.074433656957929e-05, |
|
"loss": 0.9858, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.0970873786407766e-05, |
|
"loss": 1.0264, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.119741100323624e-05, |
|
"loss": 1.4688, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.142394822006472e-05, |
|
"loss": 1.1348, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.16504854368932e-05, |
|
"loss": 0.8359, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.187702265372168e-05, |
|
"loss": 1.4746, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.210355987055016e-05, |
|
"loss": 1.4648, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.233009708737864e-05, |
|
"loss": 0.9995, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.255663430420711e-05, |
|
"loss": 1.2109, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.2783171521035594e-05, |
|
"loss": 1.0557, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.300970873786407e-05, |
|
"loss": 0.896, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.323624595469255e-05, |
|
"loss": 0.5439, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.3462783171521034e-05, |
|
"loss": 1.4521, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.368932038834951e-05, |
|
"loss": 1.2764, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.391585760517799e-05, |
|
"loss": 1.1455, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.4142394822006467e-05, |
|
"loss": 1.6074, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.4368932038834944e-05, |
|
"loss": 1.1514, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.459546925566342e-05, |
|
"loss": 1.6689, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.4822006472491906e-05, |
|
"loss": 1.2422, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.5048543689320384e-05, |
|
"loss": 1.3496, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.527508090614886e-05, |
|
"loss": 0.7944, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.5501618122977346e-05, |
|
"loss": 1.1084, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.5728155339805817e-05, |
|
"loss": 1.0918, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.59546925566343e-05, |
|
"loss": 1.4404, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.618122977346278e-05, |
|
"loss": 0.9751, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.6407766990291256e-05, |
|
"loss": 1.1484, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.663430420711974e-05, |
|
"loss": 0.5786, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.686084142394822e-05, |
|
"loss": 1.1953, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.708737864077669e-05, |
|
"loss": 1.1045, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.731391585760517e-05, |
|
"loss": 0.9697, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.754045307443365e-05, |
|
"loss": 1.0615, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.776699029126213e-05, |
|
"loss": 1.126, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.799352750809061e-05, |
|
"loss": 0.4761, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.822006472491909e-05, |
|
"loss": 1.0898, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.844660194174757e-05, |
|
"loss": 0.8623, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.8673139158576046e-05, |
|
"loss": 1.2393, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.889967637540452e-05, |
|
"loss": 0.7666, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.9126213592233e-05, |
|
"loss": 0.5508, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.9352750809061485e-05, |
|
"loss": 0.8848, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.957928802588996e-05, |
|
"loss": 0.7822, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.980582524271844e-05, |
|
"loss": 1.6299, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.0032362459546925e-05, |
|
"loss": 0.7979, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.0258899676375396e-05, |
|
"loss": 1.1875, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.0485436893203873e-05, |
|
"loss": 1.085, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.071197411003236e-05, |
|
"loss": 1.5332, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.0938511326860835e-05, |
|
"loss": 1.5684, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.11650485436893e-05, |
|
"loss": 0.999, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.139158576051779e-05, |
|
"loss": 1.1016, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.161812297734627e-05, |
|
"loss": 0.9336, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.184466019417476e-05, |
|
"loss": 0.6851, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.207119741100323e-05, |
|
"loss": 0.5234, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.229773462783171e-05, |
|
"loss": 2.0, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.252427184466019e-05, |
|
"loss": 0.978, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.275080906148867e-05, |
|
"loss": 1.3438, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.297734627831714e-05, |
|
"loss": 1.2803, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.320388349514563e-05, |
|
"loss": 0.9878, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.343042071197411e-05, |
|
"loss": 0.6392, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.365695792880258e-05, |
|
"loss": 0.8438, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.388349514563106e-05, |
|
"loss": 0.8643, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.411003236245954e-05, |
|
"loss": 1.2822, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.433656957928802e-05, |
|
"loss": 0.8174, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.45631067961165e-05, |
|
"loss": 1.1475, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.478964401294498e-05, |
|
"loss": 0.9644, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.501618122977346e-05, |
|
"loss": 0.5596, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.524271844660194e-05, |
|
"loss": 0.873, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.546925566343041e-05, |
|
"loss": 1.1631, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.569579288025889e-05, |
|
"loss": 1.5566, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.592233009708737e-05, |
|
"loss": 0.8647, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.614886731391585e-05, |
|
"loss": 1.626, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.637540453074433e-05, |
|
"loss": 0.9136, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.660194174757281e-05, |
|
"loss": 1.1504, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.68284789644013e-05, |
|
"loss": 1.0781, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.705501618122976e-05, |
|
"loss": 0.668, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.728155339805825e-05, |
|
"loss": 1.207, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.750809061488672e-05, |
|
"loss": 1.0244, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.77346278317152e-05, |
|
"loss": 0.8579, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.796116504854369e-05, |
|
"loss": 1.2969, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.818770226537216e-05, |
|
"loss": 1.0801, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.841423948220064e-05, |
|
"loss": 0.9541, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.864077669902911e-05, |
|
"loss": 1.0059, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.88673139158576e-05, |
|
"loss": 0.9102, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.909385113268608e-05, |
|
"loss": 0.7891, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.932038834951455e-05, |
|
"loss": 0.9858, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.954692556634304e-05, |
|
"loss": 1.2471, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.977346278317152e-05, |
|
"loss": 0.9438, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7e-05, |
|
"loss": 1.0449, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.994322789943227e-05, |
|
"loss": 0.7666, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.988645579886455e-05, |
|
"loss": 1.9082, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.982968369829683e-05, |
|
"loss": 0.8555, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.97729115977291e-05, |
|
"loss": 1.2793, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.971613949716139e-05, |
|
"loss": 0.6646, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.965936739659366e-05, |
|
"loss": 0.8003, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.960259529602595e-05, |
|
"loss": 1.1543, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.954582319545822e-05, |
|
"loss": 0.9565, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.948905109489051e-05, |
|
"loss": 1.5156, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.943227899432278e-05, |
|
"loss": 1.6465, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.937550689375507e-05, |
|
"loss": 1.3076, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.931873479318734e-05, |
|
"loss": 0.8125, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.926196269261962e-05, |
|
"loss": 0.7104, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.92051905920519e-05, |
|
"loss": 1.3789, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.914841849148418e-05, |
|
"loss": 0.9189, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.909164639091645e-05, |
|
"loss": 0.8179, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.903487429034874e-05, |
|
"loss": 1.8174, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.897810218978101e-05, |
|
"loss": 0.769, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.89213300892133e-05, |
|
"loss": 1.6465, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.886455798864557e-05, |
|
"loss": 0.8848, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.880778588807786e-05, |
|
"loss": 1.2383, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.875101378751013e-05, |
|
"loss": 1.209, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.869424168694241e-05, |
|
"loss": 0.9526, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.86374695863747e-05, |
|
"loss": 0.6997, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.858069748580697e-05, |
|
"loss": 1.3457, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.852392538523924e-05, |
|
"loss": 0.8306, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.846715328467153e-05, |
|
"loss": 1.21, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 6.84103811841038e-05, |
|
"loss": 1.6484, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 6.835360908353608e-05, |
|
"loss": 1.0049, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 6.829683698296836e-05, |
|
"loss": 0.9609, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 6.824006488240064e-05, |
|
"loss": 1.5117, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 6.818329278183292e-05, |
|
"loss": 1.1641, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 6.81265206812652e-05, |
|
"loss": 1.0146, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 6.806974858069749e-05, |
|
"loss": 0.5044, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 6.801297648012976e-05, |
|
"loss": 0.5928, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 6.795620437956205e-05, |
|
"loss": 0.7925, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 6.789943227899432e-05, |
|
"loss": 0.8438, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.784266017842659e-05, |
|
"loss": 0.853, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.778588807785888e-05, |
|
"loss": 0.5464, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.772911597729115e-05, |
|
"loss": 1.1338, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.767234387672343e-05, |
|
"loss": 1.0312, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.761557177615571e-05, |
|
"loss": 0.9209, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.755879967558799e-05, |
|
"loss": 0.8164, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 6.750202757502026e-05, |
|
"loss": 1.0479, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 6.744525547445255e-05, |
|
"loss": 1.4258, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 6.738848337388482e-05, |
|
"loss": 0.6206, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 6.733171127331711e-05, |
|
"loss": 0.7578, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 6.727493917274938e-05, |
|
"loss": 0.9341, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.721816707218167e-05, |
|
"loss": 1.1143, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.716139497161394e-05, |
|
"loss": 1.0234, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.710462287104623e-05, |
|
"loss": 1.3389, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.70478507704785e-05, |
|
"loss": 1.752, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.699107866991078e-05, |
|
"loss": 1.4131, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.693430656934307e-05, |
|
"loss": 0.7134, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.687753446877534e-05, |
|
"loss": 1.1045, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.682076236820761e-05, |
|
"loss": 0.6382, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.67639902676399e-05, |
|
"loss": 0.6548, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.670721816707217e-05, |
|
"loss": 0.9341, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.665044606650446e-05, |
|
"loss": 0.9302, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.659367396593673e-05, |
|
"loss": 1.0996, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.653690186536902e-05, |
|
"loss": 1.1855, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.64801297648013e-05, |
|
"loss": 0.7583, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.642335766423357e-05, |
|
"loss": 0.7979, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.636658556366586e-05, |
|
"loss": 1.2578, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.630981346309813e-05, |
|
"loss": 0.7188, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.62530413625304e-05, |
|
"loss": 1.3262, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.619626926196269e-05, |
|
"loss": 0.728, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.613949716139496e-05, |
|
"loss": 0.957, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.608272506082724e-05, |
|
"loss": 1.0469, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.602595296025952e-05, |
|
"loss": 0.6592, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.59691808596918e-05, |
|
"loss": 0.999, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.591240875912408e-05, |
|
"loss": 0.9458, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.585563665855636e-05, |
|
"loss": 0.8193, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.579886455798865e-05, |
|
"loss": 1.4395, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.574209245742092e-05, |
|
"loss": 1.1357, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.56853203568532e-05, |
|
"loss": 0.6948, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.562854825628548e-05, |
|
"loss": 1.4521, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.557177615571775e-05, |
|
"loss": 2.0938, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.551500405515004e-05, |
|
"loss": 0.8203, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.545823195458231e-05, |
|
"loss": 0.918, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.540145985401459e-05, |
|
"loss": 0.666, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.534468775344687e-05, |
|
"loss": 0.8672, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.528791565287915e-05, |
|
"loss": 0.6846, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.523114355231142e-05, |
|
"loss": 0.7441, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.517437145174371e-05, |
|
"loss": 0.6484, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.511759935117598e-05, |
|
"loss": 1.6113, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.506082725060827e-05, |
|
"loss": 0.9854, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.500405515004054e-05, |
|
"loss": 0.9419, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.494728304947283e-05, |
|
"loss": 0.6738, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.48905109489051e-05, |
|
"loss": 0.813, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.483373884833739e-05, |
|
"loss": 1.2549, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.477696674776966e-05, |
|
"loss": 0.5796, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.472019464720194e-05, |
|
"loss": 0.6401, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.466342254663421e-05, |
|
"loss": 0.6421, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.46066504460665e-05, |
|
"loss": 0.8018, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.454987834549877e-05, |
|
"loss": 1.9502, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.449310624493106e-05, |
|
"loss": 1.3848, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.443633414436333e-05, |
|
"loss": 0.8154, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.437956204379562e-05, |
|
"loss": 0.8955, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.43227899432279e-05, |
|
"loss": 0.9185, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.426601784266018e-05, |
|
"loss": 0.7637, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.420924574209245e-05, |
|
"loss": 1.5586, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.415247364152473e-05, |
|
"loss": 1.1768, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.409570154095702e-05, |
|
"loss": 1.0439, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.403892944038929e-05, |
|
"loss": 0.7505, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.398215733982156e-05, |
|
"loss": 0.5596, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.392538523925385e-05, |
|
"loss": 1.1162, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.386861313868612e-05, |
|
"loss": 1.2373, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.38118410381184e-05, |
|
"loss": 1.1953, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.375506893755068e-05, |
|
"loss": 0.9351, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.369829683698296e-05, |
|
"loss": 0.7739, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.364152473641524e-05, |
|
"loss": 1.2188, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.358475263584752e-05, |
|
"loss": 0.5005, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.35279805352798e-05, |
|
"loss": 0.8481, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.347120843471208e-05, |
|
"loss": 1.125, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.341443633414437e-05, |
|
"loss": 0.5742, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.335766423357664e-05, |
|
"loss": 0.8838, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.330089213300891e-05, |
|
"loss": 1.1221, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.32441200324412e-05, |
|
"loss": 1.1895, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.318734793187347e-05, |
|
"loss": 1.0889, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.313057583130575e-05, |
|
"loss": 0.5264, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.307380373073803e-05, |
|
"loss": 1.124, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.301703163017031e-05, |
|
"loss": 1.2236, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.296025952960258e-05, |
|
"loss": 1.1484, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.290348742903487e-05, |
|
"loss": 1.4531, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.284671532846714e-05, |
|
"loss": 0.7983, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.278994322789943e-05, |
|
"loss": 1.2295, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.27331711273317e-05, |
|
"loss": 0.9453, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.267639902676399e-05, |
|
"loss": 1.1094, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.261962692619626e-05, |
|
"loss": 0.7842, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.256285482562854e-05, |
|
"loss": 0.8726, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.250608272506082e-05, |
|
"loss": 0.8071, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.24493106244931e-05, |
|
"loss": 1.0039, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.239253852392537e-05, |
|
"loss": 0.8965, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.233576642335766e-05, |
|
"loss": 1.5293, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.227899432278993e-05, |
|
"loss": 1.6602, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.222222222222222e-05, |
|
"loss": 1.1553, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.21654501216545e-05, |
|
"loss": 0.9912, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.210867802108678e-05, |
|
"loss": 0.6924, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.205190592051905e-05, |
|
"loss": 1.1436, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.199513381995134e-05, |
|
"loss": 0.9307, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.193836171938361e-05, |
|
"loss": 1.2578, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.188158961881589e-05, |
|
"loss": 1.0254, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.182481751824818e-05, |
|
"loss": 1.1309, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.176804541768045e-05, |
|
"loss": 1.0244, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.171127331711272e-05, |
|
"loss": 0.6079, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.165450121654501e-05, |
|
"loss": 0.5811, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.159772911597728e-05, |
|
"loss": 0.7163, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.154095701540956e-05, |
|
"loss": 0.6978, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.148418491484184e-05, |
|
"loss": 0.7197, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.142741281427412e-05, |
|
"loss": 0.9082, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.13706407137064e-05, |
|
"loss": 0.5596, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.131386861313868e-05, |
|
"loss": 0.5796, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.125709651257097e-05, |
|
"loss": 1.249, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.120032441200324e-05, |
|
"loss": 0.6138, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.114355231143553e-05, |
|
"loss": 0.7173, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.10867802108678e-05, |
|
"loss": 1.2246, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.1030008110300073e-05, |
|
"loss": 1.167, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.097323600973236e-05, |
|
"loss": 0.604, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.0916463909164634e-05, |
|
"loss": 0.5674, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.085969180859691e-05, |
|
"loss": 0.6787, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.0802919708029195e-05, |
|
"loss": 0.7441, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.074614760746147e-05, |
|
"loss": 1.1162, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.068937550689375e-05, |
|
"loss": 0.7705, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.063260340632603e-05, |
|
"loss": 0.8652, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.057583130575831e-05, |
|
"loss": 1.2236, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.051905920519058e-05, |
|
"loss": 1.5527, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.046228710462287e-05, |
|
"loss": 1.1719, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.0405515004055144e-05, |
|
"loss": 0.9263, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.0348742903487424e-05, |
|
"loss": 1.04, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.02919708029197e-05, |
|
"loss": 0.6138, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.0235198702351985e-05, |
|
"loss": 1.252, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.017842660178426e-05, |
|
"loss": 0.9878, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.012165450121654e-05, |
|
"loss": 1.1973, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.006488240064882e-05, |
|
"loss": 0.9111, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.00081103000811e-05, |
|
"loss": 1.0986, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.995133819951337e-05, |
|
"loss": 0.835, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.989456609894566e-05, |
|
"loss": 0.7715, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.9837793998377934e-05, |
|
"loss": 0.5693, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.978102189781021e-05, |
|
"loss": 0.6743, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.9724249797242494e-05, |
|
"loss": 0.7642, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.966747769667477e-05, |
|
"loss": 0.8359, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.961070559610705e-05, |
|
"loss": 0.8711, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.955393349553933e-05, |
|
"loss": 0.6592, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.949716139497161e-05, |
|
"loss": 1.0557, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.944038929440388e-05, |
|
"loss": 1.5254, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.938361719383617e-05, |
|
"loss": 0.4392, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.9326845093268443e-05, |
|
"loss": 0.916, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.9270072992700724e-05, |
|
"loss": 0.9312, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.9213300892133004e-05, |
|
"loss": 0.8643, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.9156528791565285e-05, |
|
"loss": 0.8589, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.909975669099756e-05, |
|
"loss": 1.0107, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.9042984590429845e-05, |
|
"loss": 0.5156, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.898621248986212e-05, |
|
"loss": 1.5537, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.89294403892944e-05, |
|
"loss": 1.2002, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.887266828872668e-05, |
|
"loss": 1.2891, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.881589618815896e-05, |
|
"loss": 1.1338, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.8759124087591233e-05, |
|
"loss": 0.9946, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.870235198702352e-05, |
|
"loss": 0.8799, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.8645579886455794e-05, |
|
"loss": 0.9248, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.858880778588807e-05, |
|
"loss": 0.8345, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.8532035685320355e-05, |
|
"loss": 0.7388, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.847526358475263e-05, |
|
"loss": 0.7773, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.841849148418491e-05, |
|
"loss": 1.0996, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.8361719383617196e-05, |
|
"loss": 0.4233, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.830494728304947e-05, |
|
"loss": 0.6987, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.824817518248174e-05, |
|
"loss": 0.7031, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.8191403081914023e-05, |
|
"loss": 1.0371, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.8134630981346304e-05, |
|
"loss": 0.4688, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.8077858880778584e-05, |
|
"loss": 0.9785, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.802108678021086e-05, |
|
"loss": 1.0127, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.7964314679643145e-05, |
|
"loss": 0.4585, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.790754257907542e-05, |
|
"loss": 0.7402, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.78507704785077e-05, |
|
"loss": 0.8706, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.779399837793998e-05, |
|
"loss": 1.0312, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.773722627737226e-05, |
|
"loss": 0.7119, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.768045417680453e-05, |
|
"loss": 0.6558, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.762368207623682e-05, |
|
"loss": 0.4546, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.7566909975669094e-05, |
|
"loss": 0.7456, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.7510137875101374e-05, |
|
"loss": 1.2949, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.7453365774533655e-05, |
|
"loss": 0.7246, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.7396593673965935e-05, |
|
"loss": 0.7959, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.733982157339821e-05, |
|
"loss": 1.5547, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.7283049472830496e-05, |
|
"loss": 1.3662, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.722627737226277e-05, |
|
"loss": 0.7383, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.716950527169504e-05, |
|
"loss": 0.5659, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.711273317112733e-05, |
|
"loss": 0.813, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.7055961070559603e-05, |
|
"loss": 0.5952, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.6999188969991884e-05, |
|
"loss": 0.7021, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.6942416869424164e-05, |
|
"loss": 1.4424, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.6885644768856445e-05, |
|
"loss": 0.6577, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.682887266828872e-05, |
|
"loss": 0.6777, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.6772100567721005e-05, |
|
"loss": 0.3254, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.671532846715328e-05, |
|
"loss": 0.7036, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.665855636658556e-05, |
|
"loss": 0.6216, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.660178426601784e-05, |
|
"loss": 0.3704, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.654501216545012e-05, |
|
"loss": 1.3623, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.6488240064882394e-05, |
|
"loss": 0.5874, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.643146796431468e-05, |
|
"loss": 0.8247, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.6374695863746954e-05, |
|
"loss": 0.7695, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.6317923763179235e-05, |
|
"loss": 0.7334, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.6261151662611515e-05, |
|
"loss": 0.3455, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.6204379562043795e-05, |
|
"loss": 0.7368, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.614760746147607e-05, |
|
"loss": 0.6914, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.609083536090834e-05, |
|
"loss": 0.6787, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.603406326034063e-05, |
|
"loss": 0.7505, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.59772911597729e-05, |
|
"loss": 0.583, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.5920519059205184e-05, |
|
"loss": 0.666, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.5863746958637464e-05, |
|
"loss": 0.5659, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.5806974858069744e-05, |
|
"loss": 1.3018, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.575020275750202e-05, |
|
"loss": 1.0088, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.5693430656934305e-05, |
|
"loss": 0.2649, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.563665855636658e-05, |
|
"loss": 0.7944, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.557988645579886e-05, |
|
"loss": 0.5659, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.552311435523114e-05, |
|
"loss": 0.6167, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.546634225466342e-05, |
|
"loss": 0.5488, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.540957015409569e-05, |
|
"loss": 0.2141, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.535279805352798e-05, |
|
"loss": 1.0156, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.5296025952960254e-05, |
|
"loss": 0.314, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.5239253852392534e-05, |
|
"loss": 1.252, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.5182481751824815e-05, |
|
"loss": 1.1738, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.5125709651257095e-05, |
|
"loss": 0.303, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.506893755068937e-05, |
|
"loss": 0.8926, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.5012165450121656e-05, |
|
"loss": 0.8857, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.495539334955393e-05, |
|
"loss": 0.5151, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.48986212489862e-05, |
|
"loss": 0.647, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.484184914841849e-05, |
|
"loss": 0.4602, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.4785077047850764e-05, |
|
"loss": 0.6899, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.4728304947283044e-05, |
|
"loss": 0.7246, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.4671532846715324e-05, |
|
"loss": 0.5537, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.4614760746147605e-05, |
|
"loss": 0.4607, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.455798864557988e-05, |
|
"loss": 0.8325, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.4501216545012165e-05, |
|
"loss": 0.6743, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.444444444444444e-05, |
|
"loss": 0.9932, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.438767234387672e-05, |
|
"loss": 0.6094, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.4330900243309e-05, |
|
"loss": 0.8311, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.427412814274128e-05, |
|
"loss": 0.2861, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.4217356042173554e-05, |
|
"loss": 0.4551, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.416058394160584e-05, |
|
"loss": 0.7676, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 5.4103811841038114e-05, |
|
"loss": 0.5674, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 5.4047039740470395e-05, |
|
"loss": 0.9771, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 5.399026763990267e-05, |
|
"loss": 1.2178, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 5.3933495539334955e-05, |
|
"loss": 0.4553, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 5.387672343876723e-05, |
|
"loss": 0.4836, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.38199513381995e-05, |
|
"loss": 1.502, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.376317923763179e-05, |
|
"loss": 0.5693, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.370640713706406e-05, |
|
"loss": 0.9121, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.3649635036496344e-05, |
|
"loss": 1.2461, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.3592862935928624e-05, |
|
"loss": 0.3044, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.3536090835360904e-05, |
|
"loss": 0.7896, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.347931873479318e-05, |
|
"loss": 0.9272, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.3422546634225465e-05, |
|
"loss": 0.8188, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.336577453365774e-05, |
|
"loss": 0.8926, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.330900243309002e-05, |
|
"loss": 1.1768, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.32522303325223e-05, |
|
"loss": 0.7588, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.319545823195458e-05, |
|
"loss": 0.9399, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.313868613138685e-05, |
|
"loss": 0.6055, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.308191403081914e-05, |
|
"loss": 0.8447, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.3025141930251414e-05, |
|
"loss": 0.5488, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.2968369829683694e-05, |
|
"loss": 0.7842, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.2911597729115975e-05, |
|
"loss": 0.7739, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.2854825628548255e-05, |
|
"loss": 1.0391, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.279805352798053e-05, |
|
"loss": 0.812, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.2741281427412816e-05, |
|
"loss": 0.9531, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.268450932684509e-05, |
|
"loss": 1.0264, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.262773722627736e-05, |
|
"loss": 0.7812, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.257096512570965e-05, |
|
"loss": 1.5, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.2514193025141924e-05, |
|
"loss": 0.4814, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.2457420924574204e-05, |
|
"loss": 0.4253, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.2400648824006484e-05, |
|
"loss": 0.7495, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.2343876723438765e-05, |
|
"loss": 0.4854, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.228710462287104e-05, |
|
"loss": 0.6201, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.2230332522303325e-05, |
|
"loss": 0.4961, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.21735604217356e-05, |
|
"loss": 0.4758, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.211678832116788e-05, |
|
"loss": 0.5234, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.206001622060016e-05, |
|
"loss": 0.8047, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.200324412003244e-05, |
|
"loss": 0.3557, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.1946472019464714e-05, |
|
"loss": 0.6372, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.1889699918896994e-05, |
|
"loss": 0.7012, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.1832927818329274e-05, |
|
"loss": 0.6177, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.1776155717761555e-05, |
|
"loss": 0.8203, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.171938361719383e-05, |
|
"loss": 0.2954, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.1662611516626115e-05, |
|
"loss": 0.4924, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.160583941605839e-05, |
|
"loss": 0.3064, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.154906731549066e-05, |
|
"loss": 0.4688, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.149229521492295e-05, |
|
"loss": 1.1973, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.143552311435522e-05, |
|
"loss": 0.3574, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.1378751013787504e-05, |
|
"loss": 0.8818, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.1321978913219784e-05, |
|
"loss": 0.5815, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.1265206812652064e-05, |
|
"loss": 0.9336, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.120843471208434e-05, |
|
"loss": 0.7153, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.1151662611516625e-05, |
|
"loss": 0.8955, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.10948905109489e-05, |
|
"loss": 0.7441, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.103811841038118e-05, |
|
"loss": 0.6123, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.098134630981346e-05, |
|
"loss": 0.4082, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.092457420924574e-05, |
|
"loss": 0.7158, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.086780210867801e-05, |
|
"loss": 0.7539, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.08110300081103e-05, |
|
"loss": 0.8965, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.0754257907542574e-05, |
|
"loss": 0.522, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.0697485806974854e-05, |
|
"loss": 0.4617, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.0640713706407135e-05, |
|
"loss": 0.4121, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.0583941605839415e-05, |
|
"loss": 0.4231, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.052716950527169e-05, |
|
"loss": 0.7451, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.0470397404703976e-05, |
|
"loss": 1.0732, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.041362530413625e-05, |
|
"loss": 0.9907, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.035685320356852e-05, |
|
"loss": 0.4873, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.030008110300081e-05, |
|
"loss": 0.5576, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.0243309002433084e-05, |
|
"loss": 0.6743, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.0186536901865364e-05, |
|
"loss": 2.2871, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.0129764801297644e-05, |
|
"loss": 0.3877, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.0072992700729925e-05, |
|
"loss": 1.2285, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.00162206001622e-05, |
|
"loss": 0.54, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.9959448499594485e-05, |
|
"loss": 0.7734, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.990267639902676e-05, |
|
"loss": 0.3262, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.984590429845904e-05, |
|
"loss": 0.8691, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.978913219789131e-05, |
|
"loss": 0.9824, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.97323600973236e-05, |
|
"loss": 0.5977, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.9675587996755874e-05, |
|
"loss": 0.3306, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.9618815896188154e-05, |
|
"loss": 0.6807, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.9562043795620434e-05, |
|
"loss": 0.522, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.9505271695052715e-05, |
|
"loss": 0.469, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.944849959448499e-05, |
|
"loss": 0.9087, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.9391727493917275e-05, |
|
"loss": 0.8984, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.933495539334955e-05, |
|
"loss": 0.8662, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.927818329278182e-05, |
|
"loss": 1.2021, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.922141119221411e-05, |
|
"loss": 1.0156, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.916463909164638e-05, |
|
"loss": 1.0498, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.9107866991078664e-05, |
|
"loss": 0.9355, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.9051094890510944e-05, |
|
"loss": 1.4971, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.8994322789943224e-05, |
|
"loss": 0.542, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.89375506893755e-05, |
|
"loss": 0.7451, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.8880778588807785e-05, |
|
"loss": 0.7549, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.882400648824006e-05, |
|
"loss": 1.1846, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.876723438767234e-05, |
|
"loss": 1.3223, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.871046228710462e-05, |
|
"loss": 0.9224, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.86536901865369e-05, |
|
"loss": 0.9736, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.859691808596917e-05, |
|
"loss": 0.9375, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.854014598540146e-05, |
|
"loss": 0.7939, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.8483373884833734e-05, |
|
"loss": 0.425, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.8426601784266014e-05, |
|
"loss": 0.9941, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.8369829683698295e-05, |
|
"loss": 0.3867, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.8313057583130575e-05, |
|
"loss": 0.8789, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.825628548256285e-05, |
|
"loss": 0.856, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.8199513381995136e-05, |
|
"loss": 0.6328, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.814274128142741e-05, |
|
"loss": 0.3904, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.808596918085968e-05, |
|
"loss": 0.9131, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.802919708029197e-05, |
|
"loss": 1.7617, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.7972424979724244e-05, |
|
"loss": 0.7188, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.7915652879156524e-05, |
|
"loss": 0.7603, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.7858880778588804e-05, |
|
"loss": 0.4224, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.7802108678021085e-05, |
|
"loss": 0.4788, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.774533657745336e-05, |
|
"loss": 0.3206, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.768856447688564e-05, |
|
"loss": 0.8936, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.763179237631792e-05, |
|
"loss": 1.0488, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.75750202757502e-05, |
|
"loss": 0.4692, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.751824817518247e-05, |
|
"loss": 1.1621, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.746147607461476e-05, |
|
"loss": 0.6011, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.7404703974047034e-05, |
|
"loss": 0.5776, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.7347931873479314e-05, |
|
"loss": 0.7192, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.7291159772911594e-05, |
|
"loss": 0.6357, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.7234387672343875e-05, |
|
"loss": 0.9116, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.717761557177615e-05, |
|
"loss": 0.8164, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.7120843471208436e-05, |
|
"loss": 0.5967, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.706407137064071e-05, |
|
"loss": 0.978, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.700729927007298e-05, |
|
"loss": 0.6914, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.695052716950527e-05, |
|
"loss": 0.5815, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.6893755068937543e-05, |
|
"loss": 0.9937, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.6836982968369824e-05, |
|
"loss": 0.6064, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.6780210867802104e-05, |
|
"loss": 0.7002, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.6723438767234384e-05, |
|
"loss": 0.4333, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.666666666666666e-05, |
|
"loss": 1.2129, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.6609894566098945e-05, |
|
"loss": 0.6694, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.655312246553122e-05, |
|
"loss": 0.6294, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.64963503649635e-05, |
|
"loss": 0.6826, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.643957826439578e-05, |
|
"loss": 1.0996, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.638280616382806e-05, |
|
"loss": 0.5732, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.6326034063260333e-05, |
|
"loss": 0.6401, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.626926196269262e-05, |
|
"loss": 0.8794, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.6212489862124894e-05, |
|
"loss": 0.4543, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.6155717761557174e-05, |
|
"loss": 0.8379, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.6098945660989455e-05, |
|
"loss": 0.5986, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.6042173560421735e-05, |
|
"loss": 0.6763, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.598540145985401e-05, |
|
"loss": 0.5933, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.5928629359286296e-05, |
|
"loss": 1.1729, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.587185725871857e-05, |
|
"loss": 0.4932, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.581508515815084e-05, |
|
"loss": 0.4761, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.575831305758313e-05, |
|
"loss": 0.6289, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.5701540957015404e-05, |
|
"loss": 1.4434, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.5644768856447684e-05, |
|
"loss": 0.6553, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.558799675587996e-05, |
|
"loss": 0.7256, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.5531224655312245e-05, |
|
"loss": 0.8916, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.547445255474452e-05, |
|
"loss": 0.4094, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.54176804541768e-05, |
|
"loss": 0.6704, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.536090835360908e-05, |
|
"loss": 0.9302, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.530413625304136e-05, |
|
"loss": 0.6665, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.524736415247363e-05, |
|
"loss": 0.4407, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.519059205190592e-05, |
|
"loss": 0.624, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.5133819951338194e-05, |
|
"loss": 0.2291, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.5077047850770474e-05, |
|
"loss": 0.9922, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.5020275750202755e-05, |
|
"loss": 0.9346, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.4963503649635035e-05, |
|
"loss": 0.9111, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.490673154906731e-05, |
|
"loss": 0.7959, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.4849959448499596e-05, |
|
"loss": 1.416, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.479318734793187e-05, |
|
"loss": 0.7896, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.473641524736414e-05, |
|
"loss": 0.3201, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.467964314679643e-05, |
|
"loss": 1.1387, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.4622871046228703e-05, |
|
"loss": 1.4805, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.4566098945660984e-05, |
|
"loss": 0.5156, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.4509326845093264e-05, |
|
"loss": 0.6445, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.4452554744525545e-05, |
|
"loss": 0.4756, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.439578264395782e-05, |
|
"loss": 0.4639, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.4339010543390105e-05, |
|
"loss": 0.6973, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.428223844282238e-05, |
|
"loss": 0.6152, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.422546634225466e-05, |
|
"loss": 1.7656, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.416869424168694e-05, |
|
"loss": 0.9326, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.411192214111922e-05, |
|
"loss": 0.3223, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.4055150040551493e-05, |
|
"loss": 1.0098, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.399837793998378e-05, |
|
"loss": 0.4707, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.3941605839416054e-05, |
|
"loss": 0.5889, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.3884833738848335e-05, |
|
"loss": 1.167, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.3828061638280615e-05, |
|
"loss": 0.9785, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.3771289537712895e-05, |
|
"loss": 0.5278, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.371451743714517e-05, |
|
"loss": 0.6733, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.3657745336577456e-05, |
|
"loss": 0.77, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.360097323600973e-05, |
|
"loss": 0.6948, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.3544201135442e-05, |
|
"loss": 0.5112, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.348742903487429e-05, |
|
"loss": 0.6782, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.3430656934306564e-05, |
|
"loss": 0.8408, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.3373884833738844e-05, |
|
"loss": 0.8418, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.331711273317112e-05, |
|
"loss": 1.1699, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.3260340632603405e-05, |
|
"loss": 0.2642, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.320356853203568e-05, |
|
"loss": 0.9331, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.314679643146796e-05, |
|
"loss": 0.4326, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.309002433090024e-05, |
|
"loss": 0.5601, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.303325223033252e-05, |
|
"loss": 1.208, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.297648012976479e-05, |
|
"loss": 0.7935, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.291970802919708e-05, |
|
"loss": 0.8384, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.2862935928629354e-05, |
|
"loss": 1.0117, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.2806163828061634e-05, |
|
"loss": 0.9062, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.2749391727493915e-05, |
|
"loss": 0.9189, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.2692619626926195e-05, |
|
"loss": 0.9731, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.263584752635847e-05, |
|
"loss": 0.4832, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.2579075425790756e-05, |
|
"loss": 0.6538, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.252230332522303e-05, |
|
"loss": 0.791, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.24655312246553e-05, |
|
"loss": 0.9517, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.240875912408759e-05, |
|
"loss": 1.2822, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.2351987023519864e-05, |
|
"loss": 1.3242, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.2295214922952144e-05, |
|
"loss": 0.5776, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.2238442822384424e-05, |
|
"loss": 0.8545, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.2181670721816705e-05, |
|
"loss": 0.7012, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.212489862124898e-05, |
|
"loss": 1.1816, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.2068126520681265e-05, |
|
"loss": 0.6606, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.201135442011354e-05, |
|
"loss": 0.1469, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.195458231954582e-05, |
|
"loss": 0.6958, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.18978102189781e-05, |
|
"loss": 0.9199, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.184103811841038e-05, |
|
"loss": 0.5605, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.1784266017842654e-05, |
|
"loss": 0.8633, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.172749391727494e-05, |
|
"loss": 0.7412, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.1670721816707214e-05, |
|
"loss": 1.3379, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.1613949716139495e-05, |
|
"loss": 0.3428, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.1557177615571775e-05, |
|
"loss": 0.8501, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.1500405515004055e-05, |
|
"loss": 1.1455, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.144363341443633e-05, |
|
"loss": 0.5195, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.1386861313868616e-05, |
|
"loss": 0.5435, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.133008921330089e-05, |
|
"loss": 0.9927, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.127331711273316e-05, |
|
"loss": 0.8477, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.1216545012165444e-05, |
|
"loss": 1.041, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.1159772911597724e-05, |
|
"loss": 1.0352, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.1103000811030004e-05, |
|
"loss": 0.8618, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.104622871046228e-05, |
|
"loss": 0.5752, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.0989456609894565e-05, |
|
"loss": 0.2349, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.093268450932684e-05, |
|
"loss": 0.7603, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.087591240875912e-05, |
|
"loss": 1.0488, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.08191403081914e-05, |
|
"loss": 0.3303, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.076236820762368e-05, |
|
"loss": 0.467, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.070559610705595e-05, |
|
"loss": 0.5454, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.064882400648824e-05, |
|
"loss": 1.1592, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.0592051905920514e-05, |
|
"loss": 0.8486, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.0535279805352794e-05, |
|
"loss": 1.3613, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.0478507704785075e-05, |
|
"loss": 0.7183, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.0421735604217355e-05, |
|
"loss": 0.7847, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.036496350364963e-05, |
|
"loss": 0.6357, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.0308191403081916e-05, |
|
"loss": 0.5742, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.025141930251419e-05, |
|
"loss": 0.7642, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.019464720194646e-05, |
|
"loss": 0.6035, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.013787510137875e-05, |
|
"loss": 0.9321, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.0081103000811024e-05, |
|
"loss": 1.1025, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.0024330900243304e-05, |
|
"loss": 0.7021, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.9967558799675584e-05, |
|
"loss": 0.4368, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.9910786699107865e-05, |
|
"loss": 0.7178, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.985401459854014e-05, |
|
"loss": 0.7056, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.9797242497972425e-05, |
|
"loss": 0.7983, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.97404703974047e-05, |
|
"loss": 0.2686, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.968369829683698e-05, |
|
"loss": 1.1807, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.962692619626926e-05, |
|
"loss": 0.3396, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.957015409570154e-05, |
|
"loss": 0.8848, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.9513381995133814e-05, |
|
"loss": 0.5806, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.94566098945661e-05, |
|
"loss": 0.9692, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.9399837793998374e-05, |
|
"loss": 1.0947, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.9343065693430655e-05, |
|
"loss": 1.0781, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.9286293592862935e-05, |
|
"loss": 0.5312, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.9229521492295215e-05, |
|
"loss": 0.855, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.917274939172749e-05, |
|
"loss": 0.428, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.911597729115976e-05, |
|
"loss": 0.8184, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.905920519059205e-05, |
|
"loss": 0.8101, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.900243309002432e-05, |
|
"loss": 0.8364, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.8945660989456604e-05, |
|
"loss": 0.8145, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.8888888888888884e-05, |
|
"loss": 0.3484, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.8832116788321164e-05, |
|
"loss": 0.8086, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.877534468775344e-05, |
|
"loss": 0.7988, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.8718572587185725e-05, |
|
"loss": 1.1143, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.8661800486618e-05, |
|
"loss": 0.9492, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.860502838605028e-05, |
|
"loss": 1.209, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.854825628548256e-05, |
|
"loss": 0.8208, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.849148418491484e-05, |
|
"loss": 0.4158, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.843471208434711e-05, |
|
"loss": 0.4329, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.83779399837794e-05, |
|
"loss": 0.585, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.8321167883211674e-05, |
|
"loss": 0.5088, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.8264395782643954e-05, |
|
"loss": 0.5562, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.8207623682076235e-05, |
|
"loss": 1.0244, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.8150851581508515e-05, |
|
"loss": 0.6074, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.809407948094079e-05, |
|
"loss": 0.6514, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.8037307380373076e-05, |
|
"loss": 1.0977, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.798053527980535e-05, |
|
"loss": 1.0625, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.792376317923762e-05, |
|
"loss": 0.8379, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.786699107866991e-05, |
|
"loss": 0.8101, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.7810218978102184e-05, |
|
"loss": 0.75, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.7753446877534464e-05, |
|
"loss": 0.8555, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.7696674776966744e-05, |
|
"loss": 0.75, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.7639902676399025e-05, |
|
"loss": 0.8521, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.75831305758313e-05, |
|
"loss": 1.1045, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.7526358475263585e-05, |
|
"loss": 1.3887, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.746958637469586e-05, |
|
"loss": 0.2522, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.741281427412814e-05, |
|
"loss": 1.3838, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.735604217356042e-05, |
|
"loss": 0.9751, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.72992700729927e-05, |
|
"loss": 0.4792, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.7242497972424974e-05, |
|
"loss": 0.4321, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.718572587185726e-05, |
|
"loss": 0.9722, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.7128953771289534e-05, |
|
"loss": 0.562, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.7072181670721815e-05, |
|
"loss": 0.4463, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.701540957015409e-05, |
|
"loss": 1.5498, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.6958637469586375e-05, |
|
"loss": 0.8501, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.690186536901865e-05, |
|
"loss": 0.8066, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.684509326845092e-05, |
|
"loss": 0.9126, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.678832116788321e-05, |
|
"loss": 0.8511, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.673154906731548e-05, |
|
"loss": 0.4231, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.6674776966747764e-05, |
|
"loss": 0.4185, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.6618004866180044e-05, |
|
"loss": 0.8369, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.6561232765612324e-05, |
|
"loss": 0.5913, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.65044606650446e-05, |
|
"loss": 0.9609, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.6447688564476885e-05, |
|
"loss": 0.4048, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.639091646390916e-05, |
|
"loss": 0.5698, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.633414436334144e-05, |
|
"loss": 1.0156, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.627737226277372e-05, |
|
"loss": 0.6201, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.6220600162206e-05, |
|
"loss": 1.6387, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.616382806163827e-05, |
|
"loss": 0.9468, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.610705596107056e-05, |
|
"loss": 0.8638, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.6050283860502834e-05, |
|
"loss": 0.4985, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.5993511759935114e-05, |
|
"loss": 1.2568, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.5936739659367395e-05, |
|
"loss": 0.8823, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.5879967558799675e-05, |
|
"loss": 0.6323, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.582319545823195e-05, |
|
"loss": 0.3994, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.5766423357664236e-05, |
|
"loss": 0.6123, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.570965125709651e-05, |
|
"loss": 0.3184, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.565287915652878e-05, |
|
"loss": 0.5298, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.559610705596107e-05, |
|
"loss": 0.6157, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.5539334955393344e-05, |
|
"loss": 0.5684, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.5482562854825624e-05, |
|
"loss": 0.7129, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.5425790754257904e-05, |
|
"loss": 0.8398, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.5369018653690185e-05, |
|
"loss": 0.4697, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.531224655312246e-05, |
|
"loss": 1.2695, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.5255474452554745e-05, |
|
"loss": 0.4897, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.519870235198702e-05, |
|
"loss": 0.6719, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.51419302514193e-05, |
|
"loss": 0.5254, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.508515815085158e-05, |
|
"loss": 0.4199, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.502838605028386e-05, |
|
"loss": 0.5068, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.4971613949716134e-05, |
|
"loss": 0.4521, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.4914841849148414e-05, |
|
"loss": 0.6313, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.4858069748580694e-05, |
|
"loss": 1.1426, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.4801297648012975e-05, |
|
"loss": 1.0918, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.4744525547445255e-05, |
|
"loss": 0.623, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.4687753446877536e-05, |
|
"loss": 0.7119, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.463098134630981e-05, |
|
"loss": 0.7236, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.457420924574209e-05, |
|
"loss": 0.7139, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.451743714517437e-05, |
|
"loss": 1.2646, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.446066504460665e-05, |
|
"loss": 0.7563, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.440389294403893e-05, |
|
"loss": 0.5947, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.4347120843471204e-05, |
|
"loss": 1.3467, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.4290348742903484e-05, |
|
"loss": 0.6465, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.4233576642335765e-05, |
|
"loss": 0.5835, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.417680454176804e-05, |
|
"loss": 1.332, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.412003244120032e-05, |
|
"loss": 0.9106, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.40632603406326e-05, |
|
"loss": 1.0898, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.400648824006488e-05, |
|
"loss": 0.7256, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.394971613949716e-05, |
|
"loss": 0.6187, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.389294403892944e-05, |
|
"loss": 0.7061, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.3836171938361714e-05, |
|
"loss": 0.9189, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.3779399837793994e-05, |
|
"loss": 0.8999, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.3722627737226274e-05, |
|
"loss": 0.7769, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.3665855636658555e-05, |
|
"loss": 0.627, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.3609083536090835e-05, |
|
"loss": 0.7388, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.3552311435523116e-05, |
|
"loss": 0.3435, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.349553933495539e-05, |
|
"loss": 0.9785, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.343876723438767e-05, |
|
"loss": 0.6763, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.338199513381995e-05, |
|
"loss": 0.5811, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.332522303325223e-05, |
|
"loss": 1.1953, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.326845093268451e-05, |
|
"loss": 0.7393, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.3211678832116784e-05, |
|
"loss": 0.7554, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.3154906731549064e-05, |
|
"loss": 0.5854, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.3098134630981345e-05, |
|
"loss": 0.1134, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.304136253041362e-05, |
|
"loss": 1.1055, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.29845904298459e-05, |
|
"loss": 0.7324, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.292781832927818e-05, |
|
"loss": 0.6943, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.287104622871046e-05, |
|
"loss": 0.4536, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.281427412814274e-05, |
|
"loss": 0.8086, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.275750202757502e-05, |
|
"loss": 0.5688, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.2700729927007294e-05, |
|
"loss": 0.7705, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.2643957826439574e-05, |
|
"loss": 0.5151, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.2587185725871854e-05, |
|
"loss": 0.897, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.2530413625304135e-05, |
|
"loss": 1.124, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.2473641524736415e-05, |
|
"loss": 0.5747, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.2416869424168696e-05, |
|
"loss": 0.5112, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.236009732360097e-05, |
|
"loss": 0.3132, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.230332522303325e-05, |
|
"loss": 0.875, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.224655312246553e-05, |
|
"loss": 0.5933, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.218978102189781e-05, |
|
"loss": 0.7305, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.213300892133009e-05, |
|
"loss": 0.9233, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.2076236820762364e-05, |
|
"loss": 1.7334, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.2019464720194645e-05, |
|
"loss": 0.9136, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.1962692619626925e-05, |
|
"loss": 0.7993, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.19059205190592e-05, |
|
"loss": 1.1094, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.184914841849148e-05, |
|
"loss": 0.321, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.179237631792376e-05, |
|
"loss": 1.1719, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.173560421735604e-05, |
|
"loss": 0.6455, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.167883211678832e-05, |
|
"loss": 0.4949, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.16220600162206e-05, |
|
"loss": 0.856, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.1565287915652874e-05, |
|
"loss": 0.5679, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.1508515815085154e-05, |
|
"loss": 0.7729, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.1451743714517435e-05, |
|
"loss": 0.668, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.1394971613949715e-05, |
|
"loss": 1.25, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.1338199513381995e-05, |
|
"loss": 0.7476, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.128142741281427e-05, |
|
"loss": 0.7188, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.122465531224655e-05, |
|
"loss": 0.4893, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.116788321167883e-05, |
|
"loss": 0.7192, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 0.6304, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.105433901054339e-05, |
|
"loss": 0.4111, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.099756690997567e-05, |
|
"loss": 0.9019, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.0940794809407944e-05, |
|
"loss": 0.6289, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.0884022708840225e-05, |
|
"loss": 0.5425, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.0827250608272505e-05, |
|
"loss": 0.3123, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.077047850770478e-05, |
|
"loss": 0.3379, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.071370640713706e-05, |
|
"loss": 0.959, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.065693430656934e-05, |
|
"loss": 0.541, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.060016220600162e-05, |
|
"loss": 0.7529, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.05433901054339e-05, |
|
"loss": 0.2869, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.048661800486618e-05, |
|
"loss": 1.0674, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.0429845904298454e-05, |
|
"loss": 0.6401, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.0373073803730734e-05, |
|
"loss": 0.6665, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.0316301703163015e-05, |
|
"loss": 1.0078, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.025952960259529e-05, |
|
"loss": 1.085, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.0202757502027572e-05, |
|
"loss": 0.626, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.014598540145985e-05, |
|
"loss": 0.9785, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.008921330089213e-05, |
|
"loss": 0.895, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.003244120032441e-05, |
|
"loss": 1.0342, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.9975669099756687e-05, |
|
"loss": 0.8584, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.9918896999188967e-05, |
|
"loss": 0.7554, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.9862124898621247e-05, |
|
"loss": 0.9302, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.9805352798053524e-05, |
|
"loss": 0.5811, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.9748580697485805e-05, |
|
"loss": 1.125, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.9691808596918085e-05, |
|
"loss": 1.1416, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.9635036496350362e-05, |
|
"loss": 1.1533, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.9578264395782642e-05, |
|
"loss": 0.9238, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.9521492295214923e-05, |
|
"loss": 1.2266, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.94647201946472e-05, |
|
"loss": 0.272, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.940794809407948e-05, |
|
"loss": 0.8979, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.935117599351176e-05, |
|
"loss": 0.835, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.9294403892944034e-05, |
|
"loss": 0.4558, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.9237631792376314e-05, |
|
"loss": 0.708, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.9180859691808598e-05, |
|
"loss": 0.709, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.912408759124087e-05, |
|
"loss": 0.4038, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.9067315490673152e-05, |
|
"loss": 0.6201, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.901054339010543e-05, |
|
"loss": 0.4866, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.895377128953771e-05, |
|
"loss": 0.1422, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.889699918896999e-05, |
|
"loss": 0.4102, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.8840227088402267e-05, |
|
"loss": 0.3677, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.8783454987834547e-05, |
|
"loss": 0.3108, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.8726682887266827e-05, |
|
"loss": 0.8662, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.8669910786699104e-05, |
|
"loss": 0.9453, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.8613138686131385e-05, |
|
"loss": 0.8242, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.8556366585563665e-05, |
|
"loss": 0.5547, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.8499594484995942e-05, |
|
"loss": 0.2257, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.8442822384428222e-05, |
|
"loss": 0.6704, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.8386050283860503e-05, |
|
"loss": 0.5303, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.832927818329278e-05, |
|
"loss": 0.7231, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.827250608272506e-05, |
|
"loss": 0.2352, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.821573398215734e-05, |
|
"loss": 0.4023, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.8158961881589617e-05, |
|
"loss": 0.8174, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.8102189781021898e-05, |
|
"loss": 0.5205, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.804541768045417e-05, |
|
"loss": 0.5718, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.798864557988645e-05, |
|
"loss": 1.1748, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.7931873479318732e-05, |
|
"loss": 0.479, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.787510137875101e-05, |
|
"loss": 0.7202, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.781832927818329e-05, |
|
"loss": 0.647, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.776155717761557e-05, |
|
"loss": 0.5723, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.7704785077047847e-05, |
|
"loss": 0.5718, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.7648012976480127e-05, |
|
"loss": 0.5396, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.7591240875912407e-05, |
|
"loss": 0.2593, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.7534468775344684e-05, |
|
"loss": 0.5273, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.7477696674776965e-05, |
|
"loss": 0.439, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.7420924574209245e-05, |
|
"loss": 0.3242, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.7364152473641522e-05, |
|
"loss": 0.9819, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.7307380373073802e-05, |
|
"loss": 0.4307, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.7250608272506083e-05, |
|
"loss": 0.5859, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.719383617193836e-05, |
|
"loss": 0.5366, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.713706407137064e-05, |
|
"loss": 0.8901, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.708029197080292e-05, |
|
"loss": 1.3379, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.7023519870235197e-05, |
|
"loss": 0.687, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.6966747769667478e-05, |
|
"loss": 0.4709, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.690997566909975e-05, |
|
"loss": 0.4983, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.685320356853203e-05, |
|
"loss": 0.2065, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.6796431467964312e-05, |
|
"loss": 0.5854, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.673965936739659e-05, |
|
"loss": 0.4934, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.668288726682887e-05, |
|
"loss": 0.4431, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.662611516626115e-05, |
|
"loss": 0.4934, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.6569343065693427e-05, |
|
"loss": 1.1514, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.6512570965125707e-05, |
|
"loss": 1.3037, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.6455798864557987e-05, |
|
"loss": 0.5269, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.6399026763990264e-05, |
|
"loss": 0.2634, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.6342254663422545e-05, |
|
"loss": 0.3381, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.6285482562854825e-05, |
|
"loss": 0.1937, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.6228710462287102e-05, |
|
"loss": 0.2969, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.6171938361719382e-05, |
|
"loss": 0.6128, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.6115166261151663e-05, |
|
"loss": 0.71, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.605839416058394e-05, |
|
"loss": 0.4531, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.600162206001622e-05, |
|
"loss": 0.4827, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.5944849959448497e-05, |
|
"loss": 0.4341, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.5888077858880777e-05, |
|
"loss": 0.5361, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.5831305758313058e-05, |
|
"loss": 0.3362, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.577453365774533e-05, |
|
"loss": 0.3809, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.571776155717761e-05, |
|
"loss": 0.147, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.5660989456609892e-05, |
|
"loss": 1.0244, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.560421735604217e-05, |
|
"loss": 1.2217, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.554744525547445e-05, |
|
"loss": 0.3718, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.549067315490673e-05, |
|
"loss": 0.5864, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.5433901054339007e-05, |
|
"loss": 0.5439, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.5377128953771287e-05, |
|
"loss": 0.3328, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.5320356853203567e-05, |
|
"loss": 0.3213, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.5263584752635844e-05, |
|
"loss": 0.3284, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.5206812652068125e-05, |
|
"loss": 0.5542, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.5150040551500405e-05, |
|
"loss": 1.1582, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.5093268450932682e-05, |
|
"loss": 0.4353, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.5036496350364962e-05, |
|
"loss": 0.4209, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.4979724249797243e-05, |
|
"loss": 0.9199, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.492295214922952e-05, |
|
"loss": 0.5474, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.48661800486618e-05, |
|
"loss": 0.9546, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.4809407948094077e-05, |
|
"loss": 0.488, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.4752635847526357e-05, |
|
"loss": 0.4436, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.4695863746958638e-05, |
|
"loss": 0.5791, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.463909164639091e-05, |
|
"loss": 0.6582, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.458231954582319e-05, |
|
"loss": 0.8828, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.4525547445255472e-05, |
|
"loss": 0.2367, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.446877534468775e-05, |
|
"loss": 1.0918, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.441200324412003e-05, |
|
"loss": 0.4905, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.435523114355231e-05, |
|
"loss": 0.7236, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.4298459042984587e-05, |
|
"loss": 0.3337, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.4241686942416867e-05, |
|
"loss": 0.4087, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.4184914841849147e-05, |
|
"loss": 0.281, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.4128142741281424e-05, |
|
"loss": 1.0273, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.4071370640713705e-05, |
|
"loss": 0.7656, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.4014598540145985e-05, |
|
"loss": 1.0479, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.3957826439578262e-05, |
|
"loss": 0.6377, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.3901054339010542e-05, |
|
"loss": 0.363, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.384428223844282e-05, |
|
"loss": 0.4946, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.37875101378751e-05, |
|
"loss": 1.1221, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.373073803730738e-05, |
|
"loss": 0.5117, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.3673965936739657e-05, |
|
"loss": 0.5752, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.3617193836171937e-05, |
|
"loss": 0.3254, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.3560421735604218e-05, |
|
"loss": 0.4553, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.350364963503649e-05, |
|
"loss": 0.385, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.3446877534468772e-05, |
|
"loss": 0.3816, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.3390105433901052e-05, |
|
"loss": 0.5527, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.333333333333333e-05, |
|
"loss": 0.9141, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.327656123276561e-05, |
|
"loss": 0.5244, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.321978913219789e-05, |
|
"loss": 0.6611, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.3163017031630167e-05, |
|
"loss": 0.205, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.3106244931062447e-05, |
|
"loss": 0.8145, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.3049472830494727e-05, |
|
"loss": 0.2952, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.2992700729927004e-05, |
|
"loss": 0.7324, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.2935928629359285e-05, |
|
"loss": 0.915, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.2879156528791565e-05, |
|
"loss": 0.6216, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.2822384428223842e-05, |
|
"loss": 0.5337, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.2765612327656122e-05, |
|
"loss": 0.4937, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.27088402270884e-05, |
|
"loss": 0.5557, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.265206812652068e-05, |
|
"loss": 0.4761, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.259529602595296e-05, |
|
"loss": 0.4863, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.2538523925385237e-05, |
|
"loss": 0.3481, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.2481751824817517e-05, |
|
"loss": 0.5713, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.2424979724249798e-05, |
|
"loss": 0.3926, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.236820762368207e-05, |
|
"loss": 0.4314, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.2311435523114352e-05, |
|
"loss": 0.5977, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.2254663422546632e-05, |
|
"loss": 0.4341, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.219789132197891e-05, |
|
"loss": 0.8169, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.214111922141119e-05, |
|
"loss": 0.876, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.208434712084347e-05, |
|
"loss": 0.6338, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.2027575020275747e-05, |
|
"loss": 0.4614, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.1970802919708027e-05, |
|
"loss": 0.54, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.1914030819140307e-05, |
|
"loss": 0.2418, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.1857258718572584e-05, |
|
"loss": 0.8965, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.1800486618004865e-05, |
|
"loss": 0.174, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.1743714517437145e-05, |
|
"loss": 0.5371, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.1686942416869422e-05, |
|
"loss": 0.7256, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.1630170316301702e-05, |
|
"loss": 0.6338, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.157339821573398e-05, |
|
"loss": 0.8237, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.151662611516626e-05, |
|
"loss": 0.7646, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.145985401459854e-05, |
|
"loss": 0.2607, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.1403081914030817e-05, |
|
"loss": 0.6035, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.1346309813463097e-05, |
|
"loss": 0.3855, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.1289537712895378e-05, |
|
"loss": 0.4417, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.123276561232765e-05, |
|
"loss": 0.8057, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.1175993511759932e-05, |
|
"loss": 0.4263, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.1119221411192212e-05, |
|
"loss": 0.7349, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.106244931062449e-05, |
|
"loss": 0.6836, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.100567721005677e-05, |
|
"loss": 0.3931, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.094890510948905e-05, |
|
"loss": 0.6533, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.0892133008921327e-05, |
|
"loss": 1.002, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.0835360908353607e-05, |
|
"loss": 0.5088, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.0778588807785887e-05, |
|
"loss": 0.4814, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.0721816707218164e-05, |
|
"loss": 1.1133, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.0665044606650445e-05, |
|
"loss": 0.6992, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.0608272506082722e-05, |
|
"loss": 0.4233, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.0551500405515002e-05, |
|
"loss": 0.9634, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.0494728304947282e-05, |
|
"loss": 0.8037, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.043795620437956e-05, |
|
"loss": 0.2686, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.038118410381184e-05, |
|
"loss": 0.625, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.032441200324412e-05, |
|
"loss": 0.8828, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.0267639902676397e-05, |
|
"loss": 0.3313, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.0210867802108678e-05, |
|
"loss": 0.6484, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.0154095701540958e-05, |
|
"loss": 0.8306, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.009732360097323e-05, |
|
"loss": 0.8872, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.0040551500405512e-05, |
|
"loss": 0.5132, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.9983779399837792e-05, |
|
"loss": 0.2336, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.992700729927007e-05, |
|
"loss": 0.5596, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.987023519870235e-05, |
|
"loss": 0.5449, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.981346309813463e-05, |
|
"loss": 0.5312, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.9756690997566907e-05, |
|
"loss": 0.2983, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.9699918896999187e-05, |
|
"loss": 0.2976, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.9643146796431468e-05, |
|
"loss": 0.3208, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.9586374695863744e-05, |
|
"loss": 0.4143, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.9529602595296025e-05, |
|
"loss": 0.875, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.9472830494728302e-05, |
|
"loss": 0.6592, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.9416058394160582e-05, |
|
"loss": 1.1064, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.9359286293592863e-05, |
|
"loss": 0.6777, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.930251419302514e-05, |
|
"loss": 0.4482, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.924574209245742e-05, |
|
"loss": 0.8452, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.91889699918897e-05, |
|
"loss": 0.2957, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.9132197891321977e-05, |
|
"loss": 0.6172, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.9075425790754258e-05, |
|
"loss": 0.5103, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.9018653690186538e-05, |
|
"loss": 0.4517, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.896188158961881e-05, |
|
"loss": 0.3567, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.8905109489051092e-05, |
|
"loss": 0.6704, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.8848337388483372e-05, |
|
"loss": 0.6201, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.879156528791565e-05, |
|
"loss": 0.4514, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.873479318734793e-05, |
|
"loss": 0.8965, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.867802108678021e-05, |
|
"loss": 0.4563, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.8621248986212487e-05, |
|
"loss": 0.4766, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.8564476885644767e-05, |
|
"loss": 0.1726, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.8507704785077044e-05, |
|
"loss": 0.45, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.8450932684509325e-05, |
|
"loss": 0.8271, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.8394160583941605e-05, |
|
"loss": 0.6323, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.8337388483373882e-05, |
|
"loss": 0.8447, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.8280616382806162e-05, |
|
"loss": 0.2271, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.8223844282238443e-05, |
|
"loss": 0.7334, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.816707218167072e-05, |
|
"loss": 0.5259, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.8110300081103e-05, |
|
"loss": 0.8145, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.805352798053528e-05, |
|
"loss": 0.998, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.7996755879967557e-05, |
|
"loss": 0.563, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.7939983779399838e-05, |
|
"loss": 0.7578, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.7883211678832118e-05, |
|
"loss": 0.9736, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.782643957826439e-05, |
|
"loss": 0.5923, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.7769667477696672e-05, |
|
"loss": 0.7061, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.7712895377128952e-05, |
|
"loss": 0.5552, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.765612327656123e-05, |
|
"loss": 0.5576, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.759935117599351e-05, |
|
"loss": 0.748, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.754257907542579e-05, |
|
"loss": 0.3291, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.7485806974858067e-05, |
|
"loss": 0.9028, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.7429034874290347e-05, |
|
"loss": 0.5415, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.7372262773722628e-05, |
|
"loss": 0.4973, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.7315490673154905e-05, |
|
"loss": 0.4338, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.7258718572587185e-05, |
|
"loss": 0.4854, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.7201946472019465e-05, |
|
"loss": 0.6221, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.7145174371451742e-05, |
|
"loss": 0.8428, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.708840227088402e-05, |
|
"loss": 0.8164, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.70316301703163e-05, |
|
"loss": 0.4241, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.697485806974858e-05, |
|
"loss": 0.4199, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.6918085969180857e-05, |
|
"loss": 0.4744, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.6861313868613137e-05, |
|
"loss": 0.1241, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.6804541768045418e-05, |
|
"loss": 0.2839, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.6747769667477695e-05, |
|
"loss": 0.6265, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.6690997566909975e-05, |
|
"loss": 0.2062, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.6634225466342255e-05, |
|
"loss": 0.5576, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.6577453365774532e-05, |
|
"loss": 0.3423, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.652068126520681e-05, |
|
"loss": 0.9883, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.646390916463909e-05, |
|
"loss": 0.6357, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.640713706407137e-05, |
|
"loss": 0.6514, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.6350364963503647e-05, |
|
"loss": 0.292, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.6293592862935927e-05, |
|
"loss": 0.4519, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.6236820762368208e-05, |
|
"loss": 0.5962, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6180048661800485e-05, |
|
"loss": 0.3174, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6123276561232765e-05, |
|
"loss": 0.5894, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6066504460665045e-05, |
|
"loss": 0.6484, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6009732360097322e-05, |
|
"loss": 0.3057, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.59529602595296e-05, |
|
"loss": 0.4146, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.589618815896188e-05, |
|
"loss": 0.4551, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.583941605839416e-05, |
|
"loss": 0.1633, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.5782643957826437e-05, |
|
"loss": 0.4495, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.5725871857258717e-05, |
|
"loss": 0.2476, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.5669099756690998e-05, |
|
"loss": 0.6172, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.5612327656123275e-05, |
|
"loss": 0.4644, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 0.4917, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5498783454987835e-05, |
|
"loss": 1.1738, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5442011354420112e-05, |
|
"loss": 0.2059, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.538523925385239e-05, |
|
"loss": 0.4333, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.532846715328467e-05, |
|
"loss": 0.2751, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.527169505271695e-05, |
|
"loss": 0.6938, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5214922952149227e-05, |
|
"loss": 0.8027, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5158150851581507e-05, |
|
"loss": 0.5044, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5101378751013786e-05, |
|
"loss": 0.3484, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5044606650446065e-05, |
|
"loss": 0.5415, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4987834549878343e-05, |
|
"loss": 0.4456, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4931062449310624e-05, |
|
"loss": 0.2499, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4874290348742902e-05, |
|
"loss": 0.8345, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4817518248175181e-05, |
|
"loss": 0.5835, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4760746147607461e-05, |
|
"loss": 0.4609, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.470397404703974e-05, |
|
"loss": 0.7793, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4647201946472017e-05, |
|
"loss": 1.123, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4590429845904299e-05, |
|
"loss": 0.7969, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4533657745336576e-05, |
|
"loss": 0.4326, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4476885644768855e-05, |
|
"loss": 0.7173, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.4420113544201133e-05, |
|
"loss": 0.8926, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.4363341443633414e-05, |
|
"loss": 0.4236, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.4306569343065692e-05, |
|
"loss": 0.5669, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.4249797242497971e-05, |
|
"loss": 0.4136, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.4193025141930251e-05, |
|
"loss": 0.8345, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.413625304136253e-05, |
|
"loss": 0.6509, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.4079480940794809e-05, |
|
"loss": 0.3733, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.4022708840227086e-05, |
|
"loss": 0.3032, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3965936739659366e-05, |
|
"loss": 0.689, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3909164639091645e-05, |
|
"loss": 0.4873, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.3852392538523923e-05, |
|
"loss": 0.2715, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.3795620437956204e-05, |
|
"loss": 0.4514, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.3738848337388482e-05, |
|
"loss": 0.5317, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.3682076236820761e-05, |
|
"loss": 0.4238, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.3625304136253041e-05, |
|
"loss": 0.2284, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.356853203568532e-05, |
|
"loss": 0.8516, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3511759935117599e-05, |
|
"loss": 0.5547, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3454987834549876e-05, |
|
"loss": 1.2266, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3398215733982156e-05, |
|
"loss": 0.2224, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3341443633414435e-05, |
|
"loss": 0.6514, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3284671532846713e-05, |
|
"loss": 0.2062, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3227899432278994e-05, |
|
"loss": 0.4329, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3171127331711272e-05, |
|
"loss": 0.7588, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3114355231143551e-05, |
|
"loss": 0.2451, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3057583130575831e-05, |
|
"loss": 0.689, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.300081103000811e-05, |
|
"loss": 0.4387, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2944038929440389e-05, |
|
"loss": 0.2125, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2887266828872666e-05, |
|
"loss": 0.5562, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2830494728304946e-05, |
|
"loss": 0.5137, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2773722627737225e-05, |
|
"loss": 0.958, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2716950527169503e-05, |
|
"loss": 0.8799, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2660178426601784e-05, |
|
"loss": 0.7983, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2603406326034062e-05, |
|
"loss": 0.4785, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2546634225466341e-05, |
|
"loss": 0.2351, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2489862124898621e-05, |
|
"loss": 0.4695, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.24330900243309e-05, |
|
"loss": 0.604, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.2376317923763179e-05, |
|
"loss": 0.6479, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.2319545823195456e-05, |
|
"loss": 1.166, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.2262773722627736e-05, |
|
"loss": 0.4951, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.2206001622060015e-05, |
|
"loss": 0.3142, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.2149229521492293e-05, |
|
"loss": 0.2808, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.2092457420924574e-05, |
|
"loss": 0.4324, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.2035685320356852e-05, |
|
"loss": 1.1162, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.1978913219789131e-05, |
|
"loss": 0.9199, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.192214111922141e-05, |
|
"loss": 0.4338, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.186536901865369e-05, |
|
"loss": 0.7217, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.1808596918085969e-05, |
|
"loss": 0.1216, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.1751824817518246e-05, |
|
"loss": 0.5801, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.1695052716950526e-05, |
|
"loss": 0.5137, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.1638280616382805e-05, |
|
"loss": 0.5264, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.1581508515815083e-05, |
|
"loss": 0.5063, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.1524736415247364e-05, |
|
"loss": 0.6367, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.1467964314679642e-05, |
|
"loss": 0.7559, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.1411192214111921e-05, |
|
"loss": 0.6006, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.13544201135442e-05, |
|
"loss": 0.5874, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.129764801297648e-05, |
|
"loss": 0.5439, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.1240875912408759e-05, |
|
"loss": 0.4546, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.1184103811841036e-05, |
|
"loss": 0.4595, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.1127331711273316e-05, |
|
"loss": 0.457, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.1070559610705595e-05, |
|
"loss": 0.4153, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.1013787510137873e-05, |
|
"loss": 0.606, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.0957015409570154e-05, |
|
"loss": 0.2664, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.0900243309002432e-05, |
|
"loss": 0.3906, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.0843471208434711e-05, |
|
"loss": 0.2925, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.078669910786699e-05, |
|
"loss": 0.8638, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.072992700729927e-05, |
|
"loss": 0.4258, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.0673154906731549e-05, |
|
"loss": 0.9253, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.0616382806163826e-05, |
|
"loss": 0.5903, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.0559610705596106e-05, |
|
"loss": 0.3267, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.0502838605028385e-05, |
|
"loss": 0.6777, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.0446066504460663e-05, |
|
"loss": 0.5342, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.0389294403892944e-05, |
|
"loss": 0.7773, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.0332522303325222e-05, |
|
"loss": 0.3628, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.0275750202757501e-05, |
|
"loss": 0.4507, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.021897810218978e-05, |
|
"loss": 0.5913, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.016220600162206e-05, |
|
"loss": 0.1931, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.0105433901054339e-05, |
|
"loss": 0.5566, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.0048661800486616e-05, |
|
"loss": 0.5449, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.991889699918896e-06, |
|
"loss": 0.2639, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.935117599351175e-06, |
|
"loss": 0.4834, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.878345498783453e-06, |
|
"loss": 0.7046, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.821573398215734e-06, |
|
"loss": 0.7881, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.764801297648012e-06, |
|
"loss": 0.6787, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.708029197080291e-06, |
|
"loss": 0.3464, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.65125709651257e-06, |
|
"loss": 0.5957, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.59448499594485e-06, |
|
"loss": 0.5264, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.537712895377129e-06, |
|
"loss": 0.1837, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.480940794809406e-06, |
|
"loss": 0.4675, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.424168694241686e-06, |
|
"loss": 1.3447, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.367396593673965e-06, |
|
"loss": 0.7939, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.310624493106243e-06, |
|
"loss": 0.4553, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.253852392538522e-06, |
|
"loss": 0.5186, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.197080291970802e-06, |
|
"loss": 0.6504, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.140308191403081e-06, |
|
"loss": 0.771, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.08353609083536e-06, |
|
"loss": 0.9619, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.02676399026764e-06, |
|
"loss": 0.7549, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 8.969991889699919e-06, |
|
"loss": 0.6831, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 8.913219789132196e-06, |
|
"loss": 0.6719, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.856447688564476e-06, |
|
"loss": 0.2435, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.799675587996755e-06, |
|
"loss": 0.394, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.742903487429033e-06, |
|
"loss": 0.5737, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.686131386861314e-06, |
|
"loss": 0.2098, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.629359286293592e-06, |
|
"loss": 0.8467, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.572587185725871e-06, |
|
"loss": 0.6816, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.51581508515815e-06, |
|
"loss": 0.2177, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.459042984590428e-06, |
|
"loss": 0.6748, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.402270884022709e-06, |
|
"loss": 0.4956, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.345498783454987e-06, |
|
"loss": 0.3037, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 8.288726682887266e-06, |
|
"loss": 0.8867, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 8.231954582319545e-06, |
|
"loss": 0.8599, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 8.175182481751823e-06, |
|
"loss": 0.7173, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 8.118410381184104e-06, |
|
"loss": 0.6348, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 8.061638280616382e-06, |
|
"loss": 0.416, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.004866180048661e-06, |
|
"loss": 0.3101, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.94809407948094e-06, |
|
"loss": 0.6797, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.891321978913218e-06, |
|
"loss": 0.6694, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.834549878345499e-06, |
|
"loss": 0.6025, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 1.0098, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.721005677210056e-06, |
|
"loss": 0.2991, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.664233576642335e-06, |
|
"loss": 0.3538, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.6074614760746135e-06, |
|
"loss": 0.2712, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.550689375506893e-06, |
|
"loss": 0.4722, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.493917274939172e-06, |
|
"loss": 0.7329, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.437145174371451e-06, |
|
"loss": 0.9937, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.380373073803731e-06, |
|
"loss": 0.345, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.3236009732360085e-06, |
|
"loss": 0.4797, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.266828872668288e-06, |
|
"loss": 0.8823, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.210056772100567e-06, |
|
"loss": 0.834, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.153284671532846e-06, |
|
"loss": 0.501, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.096512570965126e-06, |
|
"loss": 0.8916, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.039740470397404e-06, |
|
"loss": 0.5918, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.982968369829683e-06, |
|
"loss": 0.2996, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.926196269261962e-06, |
|
"loss": 0.4622, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.869424168694241e-06, |
|
"loss": 0.4446, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.812652068126521e-06, |
|
"loss": 0.251, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.755879967558799e-06, |
|
"loss": 0.8315, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.699107866991078e-06, |
|
"loss": 0.5166, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.642335766423357e-06, |
|
"loss": 0.6924, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.585563665855636e-06, |
|
"loss": 0.3486, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.528791565287916e-06, |
|
"loss": 0.3926, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.472019464720194e-06, |
|
"loss": 0.9673, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.415247364152473e-06, |
|
"loss": 0.8521, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.358475263584752e-06, |
|
"loss": 0.6758, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.301703163017031e-06, |
|
"loss": 0.6196, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.244931062449311e-06, |
|
"loss": 0.489, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.188158961881589e-06, |
|
"loss": 0.6602, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.131386861313868e-06, |
|
"loss": 0.2769, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.074614760746147e-06, |
|
"loss": 0.2961, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.017842660178426e-06, |
|
"loss": 0.6484, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.961070559610705e-06, |
|
"loss": 1.2812, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.904298459042984e-06, |
|
"loss": 1.0889, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.847526358475263e-06, |
|
"loss": 0.3257, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.790754257907542e-06, |
|
"loss": 0.6099, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.733982157339821e-06, |
|
"loss": 0.6045, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.6772100567721e-06, |
|
"loss": 0.7598, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.620437956204379e-06, |
|
"loss": 0.4126, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.563665855636658e-06, |
|
"loss": 0.2166, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.506893755068937e-06, |
|
"loss": 0.3916, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.450121654501216e-06, |
|
"loss": 0.5898, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.393349553933495e-06, |
|
"loss": 0.7783, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.336577453365774e-06, |
|
"loss": 0.5337, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.279805352798053e-06, |
|
"loss": 0.3035, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.223033252230332e-06, |
|
"loss": 0.5034, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.166261151662611e-06, |
|
"loss": 0.3003, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.10948905109489e-06, |
|
"loss": 0.8774, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.052716950527169e-06, |
|
"loss": 0.3027, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.995944849959448e-06, |
|
"loss": 0.7339, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.939172749391727e-06, |
|
"loss": 0.6821, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.882400648824006e-06, |
|
"loss": 0.4023, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.825628548256285e-06, |
|
"loss": 0.386, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.768856447688564e-06, |
|
"loss": 0.6006, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.712084347120843e-06, |
|
"loss": 0.5742, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.655312246553122e-06, |
|
"loss": 0.5044, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.598540145985401e-06, |
|
"loss": 0.353, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.54176804541768e-06, |
|
"loss": 0.7031, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.484995944849959e-06, |
|
"loss": 0.4141, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.428223844282238e-06, |
|
"loss": 0.6055, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.371451743714517e-06, |
|
"loss": 0.2998, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.314679643146796e-06, |
|
"loss": 0.3992, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.257907542579075e-06, |
|
"loss": 0.3438, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.201135442011354e-06, |
|
"loss": 0.3262, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.144363341443633e-06, |
|
"loss": 0.4243, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.087591240875912e-06, |
|
"loss": 0.1632, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.030819140308191e-06, |
|
"loss": 0.2795, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.97404703974047e-06, |
|
"loss": 0.4995, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.917274939172749e-06, |
|
"loss": 0.3352, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.860502838605028e-06, |
|
"loss": 0.3313, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.8037307380373067e-06, |
|
"loss": 0.4207, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.746958637469586e-06, |
|
"loss": 0.2896, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.6901865369018653e-06, |
|
"loss": 0.5596, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.633414436334144e-06, |
|
"loss": 0.3398, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.576642335766423e-06, |
|
"loss": 0.2605, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.519870235198702e-06, |
|
"loss": 0.2009, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.463098134630981e-06, |
|
"loss": 1.0205, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.4063260340632603e-06, |
|
"loss": 0.9443, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.349553933495539e-06, |
|
"loss": 0.7095, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.292781832927818e-06, |
|
"loss": 0.3589, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.236009732360097e-06, |
|
"loss": 0.582, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.179237631792376e-06, |
|
"loss": 0.304, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.1224655312246553e-06, |
|
"loss": 0.4224, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.065693430656934e-06, |
|
"loss": 0.498, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.008921330089213e-06, |
|
"loss": 0.4141, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.952149229521492e-06, |
|
"loss": 0.5723, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.895377128953771e-06, |
|
"loss": 0.5054, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.83860502838605e-06, |
|
"loss": 0.6367, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.781832927818329e-06, |
|
"loss": 0.2021, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.725060827250608e-06, |
|
"loss": 0.6387, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.668288726682887e-06, |
|
"loss": 0.6621, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.611516626115166e-06, |
|
"loss": 0.9473, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.554744525547445e-06, |
|
"loss": 0.7485, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.497972424979724e-06, |
|
"loss": 0.5908, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.441200324412003e-06, |
|
"loss": 0.1987, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.384428223844282e-06, |
|
"loss": 0.7959, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.327656123276561e-06, |
|
"loss": 0.8477, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.27088402270884e-06, |
|
"loss": 0.4907, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.214111922141119e-06, |
|
"loss": 0.2498, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.157339821573398e-06, |
|
"loss": 0.6602, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.100567721005677e-06, |
|
"loss": 0.5947, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.043795620437956e-06, |
|
"loss": 0.5444, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.987023519870235e-06, |
|
"loss": 0.4663, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.930251419302514e-06, |
|
"loss": 0.4692, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.873479318734793e-06, |
|
"loss": 0.3188, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.816707218167072e-06, |
|
"loss": 0.438, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.759935117599351e-06, |
|
"loss": 0.7119, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.7031630170316302e-06, |
|
"loss": 0.5, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.646390916463909e-06, |
|
"loss": 0.687, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.589618815896188e-06, |
|
"loss": 0.2094, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.532846715328467e-06, |
|
"loss": 0.71, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.476074614760746e-06, |
|
"loss": 0.6973, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.419302514193025e-06, |
|
"loss": 0.3813, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.362530413625304e-06, |
|
"loss": 0.54, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.305758313057583e-06, |
|
"loss": 0.3499, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.248986212489862e-06, |
|
"loss": 0.4475, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.192214111922141e-06, |
|
"loss": 0.6743, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.13544201135442e-06, |
|
"loss": 0.3364, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.078669910786699e-06, |
|
"loss": 0.6958, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.021897810218978e-06, |
|
"loss": 0.5132, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.65125709651257e-07, |
|
"loss": 0.2908, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.08353609083536e-07, |
|
"loss": 0.353, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.515815085158151e-07, |
|
"loss": 0.49, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.94809407948094e-07, |
|
"loss": 0.4644, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.38037307380373e-07, |
|
"loss": 0.647, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 6.81265206812652e-07, |
|
"loss": 0.4165, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 6.24493106244931e-07, |
|
"loss": 0.3989, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.6772100567721e-07, |
|
"loss": 0.3667, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.10948905109489e-07, |
|
"loss": 0.448, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.54176804541768e-07, |
|
"loss": 0.6055, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.97404703974047e-07, |
|
"loss": 0.2896, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.40632603406326e-07, |
|
"loss": 0.7144, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.83860502838605e-07, |
|
"loss": 0.2627, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.27088402270884e-07, |
|
"loss": 0.7861, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.70316301703163e-07, |
|
"loss": 0.3096, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.13544201135442e-07, |
|
"loss": 0.375, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.6772100567721e-08, |
|
"loss": 0.3643, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.0693, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1542, |
|
"total_flos": 4.951076031122375e+18, |
|
"train_loss": 0.9981908340553056, |
|
"train_runtime": 284.2177, |
|
"train_samples_per_second": 1391.43, |
|
"train_steps_per_second": 5.425 |
|
} |
|
], |
|
"max_steps": 1542, |
|
"num_train_epochs": 3, |
|
"total_flos": 4.951076031122375e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|