|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9963099630996313, |
|
"eval_steps": 500, |
|
"global_step": 609, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.3157894736842106e-06, |
|
"loss": 1.3628, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 1.3486, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9473684210526315e-06, |
|
"loss": 1.4299, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 1.3228, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.578947368421053e-06, |
|
"loss": 1.3087, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.894736842105263e-06, |
|
"loss": 1.1821, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.210526315789474e-06, |
|
"loss": 1.1411, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 1.1109, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1842105263157895e-05, |
|
"loss": 1.113, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 1.0936, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.4473684210526317e-05, |
|
"loss": 1.1008, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.5789473684210526e-05, |
|
"loss": 1.1469, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.7105263157894737e-05, |
|
"loss": 1.0595, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.8421052631578947e-05, |
|
"loss": 1.1116, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9736842105263158e-05, |
|
"loss": 1.0477, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 1.1064, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.236842105263158e-05, |
|
"loss": 1.1002, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.368421052631579e-05, |
|
"loss": 1.0617, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.1376, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.499982279551347e-05, |
|
"loss": 1.1213, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.4999291187078104e-05, |
|
"loss": 1.0721, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.4998405189766444e-05, |
|
"loss": 1.0531, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.4997164828698926e-05, |
|
"loss": 1.0651, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.4995570139043158e-05, |
|
"loss": 1.0514, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.4993621166012923e-05, |
|
"loss": 1.1169, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.4991317964866905e-05, |
|
"loss": 1.1252, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.4988660600907115e-05, |
|
"loss": 1.0334, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.4985649149477043e-05, |
|
"loss": 1.0856, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.4982283695959525e-05, |
|
"loss": 1.1015, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.4978564335774313e-05, |
|
"loss": 1.1473, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.4974491174375374e-05, |
|
"loss": 1.0341, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.497006432724791e-05, |
|
"loss": 1.0717, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.4965283919905065e-05, |
|
"loss": 1.0933, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.4960150087884376e-05, |
|
"loss": 1.1277, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.4954662976743944e-05, |
|
"loss": 1.056, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.494882274205827e-05, |
|
"loss": 1.0766, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.4942629549413894e-05, |
|
"loss": 1.1247, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.4936083574404646e-05, |
|
"loss": 1.1544, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.4929185002626714e-05, |
|
"loss": 1.139, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.492193402967336e-05, |
|
"loss": 1.0876, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.4914330861129376e-05, |
|
"loss": 1.1107, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4906375712565245e-05, |
|
"loss": 1.1297, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.489806880953106e-05, |
|
"loss": 1.1153, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.4889410387550093e-05, |
|
"loss": 1.1108, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.488040069211214e-05, |
|
"loss": 1.1006, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.487103997866655e-05, |
|
"loss": 1.1015, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4861328512615e-05, |
|
"loss": 1.1215, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.485126656930394e-05, |
|
"loss": 1.1191, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.4840854434016808e-05, |
|
"loss": 1.1129, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.4830092401965942e-05, |
|
"loss": 1.1249, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.4818980778284202e-05, |
|
"loss": 1.119, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.4807519878016313e-05, |
|
"loss": 1.0443, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.4795710026109954e-05, |
|
"loss": 1.078, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.4783551557406515e-05, |
|
"loss": 1.0966, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.4771044816631635e-05, |
|
"loss": 1.1596, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.4758190158385402e-05, |
|
"loss": 1.087, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.4744987947132318e-05, |
|
"loss": 1.121, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.473143855719095e-05, |
|
"loss": 1.1222, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.4717542372723333e-05, |
|
"loss": 1.1292, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.4703299787724065e-05, |
|
"loss": 1.1314, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.468871120600913e-05, |
|
"loss": 1.1266, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.4673777041204478e-05, |
|
"loss": 1.1486, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.465849771673427e-05, |
|
"loss": 1.0973, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.4642873665808873e-05, |
|
"loss": 1.145, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.4626905331412608e-05, |
|
"loss": 1.1104, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.4610593166291143e-05, |
|
"loss": 1.0877, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.4593937632938695e-05, |
|
"loss": 1.0961, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.457693920358491e-05, |
|
"loss": 1.0783, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.455959836018145e-05, |
|
"loss": 1.0869, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.454191559438836e-05, |
|
"loss": 1.1227, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.4523891407560107e-05, |
|
"loss": 1.0735, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.4505526310731376e-05, |
|
"loss": 1.1002, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.448682082460257e-05, |
|
"loss": 1.1313, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.446777547952507e-05, |
|
"loss": 1.1208, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.4448390815486165e-05, |
|
"loss": 1.1212, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.4428667382093762e-05, |
|
"loss": 1.1048, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.440860573856081e-05, |
|
"loss": 1.1089, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.438820645368942e-05, |
|
"loss": 1.1081, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.4367470105854766e-05, |
|
"loss": 1.0935, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.4346397282988666e-05, |
|
"loss": 1.0858, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.432498858256292e-05, |
|
"loss": 1.0944, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.4303244611572372e-05, |
|
"loss": 1.0921, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4281165986517696e-05, |
|
"loss": 1.1146, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.425875333338792e-05, |
|
"loss": 1.0932, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4236007287642664e-05, |
|
"loss": 1.0857, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.421292849419416e-05, |
|
"loss": 1.0615, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.4189517607388918e-05, |
|
"loss": 1.0923, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.416577529098921e-05, |
|
"loss": 1.1382, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.4141702218154232e-05, |
|
"loss": 1.1798, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.4117299071421023e-05, |
|
"loss": 1.1104, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.4092566542685123e-05, |
|
"loss": 1.1011, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.4067505333180938e-05, |
|
"loss": 1.0925, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.404211615346187e-05, |
|
"loss": 1.1313, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.401639972338016e-05, |
|
"loss": 1.0656, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.399035677206651e-05, |
|
"loss": 1.1277, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.396398803790936e-05, |
|
"loss": 1.1044, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.393729426853399e-05, |
|
"loss": 1.0957, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.3910276220781314e-05, |
|
"loss": 1.0594, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.3882934660686418e-05, |
|
"loss": 1.1278, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.3855270363456833e-05, |
|
"loss": 1.0847, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.382728411345058e-05, |
|
"loss": 1.1694, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.3798976704153906e-05, |
|
"loss": 1.1414, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.3770348938158805e-05, |
|
"loss": 1.0949, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.3741401627140242e-05, |
|
"loss": 1.1151, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.371213559183316e-05, |
|
"loss": 1.0679, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3682551662009203e-05, |
|
"loss": 1.1304, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3652650676453188e-05, |
|
"loss": 1.1176, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.362243348293931e-05, |
|
"loss": 1.0294, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3591900938207147e-05, |
|
"loss": 1.1691, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3561053907937315e-05, |
|
"loss": 1.0793, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.352989326672696e-05, |
|
"loss": 1.0782, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.349841989806495e-05, |
|
"loss": 1.0712, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.346663469430683e-05, |
|
"loss": 1.1301, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.3434538556649515e-05, |
|
"loss": 1.1725, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.3402132395105737e-05, |
|
"loss": 1.1134, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.336941712847825e-05, |
|
"loss": 1.1455, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.3336393684333778e-05, |
|
"loss": 1.1175, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.3303062998976712e-05, |
|
"loss": 1.1114, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.3269426017422576e-05, |
|
"loss": 1.1064, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.3235483693371214e-05, |
|
"loss": 1.0249, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.3201236989179766e-05, |
|
"loss": 1.0759, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.3166686875835365e-05, |
|
"loss": 1.0937, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.313183433292764e-05, |
|
"loss": 1.1164, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.3096680348620886e-05, |
|
"loss": 1.1097, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.3061225919626124e-05, |
|
"loss": 1.1287, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.3025472051172755e-05, |
|
"loss": 1.1137, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.2989419756980134e-05, |
|
"loss": 1.1086, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.295307005922879e-05, |
|
"loss": 1.0967, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.2916423988531437e-05, |
|
"loss": 1.1077, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.2879482583903783e-05, |
|
"loss": 1.1341, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.2842246892735053e-05, |
|
"loss": 1.106, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.2804717970758286e-05, |
|
"loss": 1.1166, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.2766896882020422e-05, |
|
"loss": 1.1153, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.2728784698852114e-05, |
|
"loss": 1.0829, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.2690382501837344e-05, |
|
"loss": 1.1223, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.2651691379782764e-05, |
|
"loss": 1.0744, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.2612712429686845e-05, |
|
"loss": 1.0786, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.2573446756708754e-05, |
|
"loss": 1.1594, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.2533895474137047e-05, |
|
"loss": 1.0879, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.2494059703358073e-05, |
|
"loss": 1.1591, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.24539405738242e-05, |
|
"loss": 1.1975, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.2413539223021794e-05, |
|
"loss": 1.0487, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.237285679643895e-05, |
|
"loss": 1.1051, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.2331894447533035e-05, |
|
"loss": 1.1266, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.229065333769796e-05, |
|
"loss": 1.0445, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2249134636231285e-05, |
|
"loss": 1.0915, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2207339520301033e-05, |
|
"loss": 1.1328, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.216526917491234e-05, |
|
"loss": 1.1179, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.2122924792873827e-05, |
|
"loss": 1.0199, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.2080307574763824e-05, |
|
"loss": 1.1108, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.2037418728896282e-05, |
|
"loss": 1.1442, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.1994259471286545e-05, |
|
"loss": 1.1125, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.1950831025616873e-05, |
|
"loss": 1.1047, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.190713462320173e-05, |
|
"loss": 1.1058, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.186317150295289e-05, |
|
"loss": 1.027, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.1818942911344283e-05, |
|
"loss": 1.1007, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.17744501023767e-05, |
|
"loss": 1.0835, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.1729694337542196e-05, |
|
"loss": 1.1269, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.168467688578834e-05, |
|
"loss": 1.1548, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.163939902348225e-05, |
|
"loss": 1.0896, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1593862034374365e-05, |
|
"loss": 1.1175, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.15480672095621e-05, |
|
"loss": 1.0988, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.15020158474532e-05, |
|
"loss": 1.1124, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.145570925372894e-05, |
|
"loss": 1.0722, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.1409148741307107e-05, |
|
"loss": 1.0978, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.1362335630304768e-05, |
|
"loss": 1.0294, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.131527124800086e-05, |
|
"loss": 1.0917, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.1267956928798525e-05, |
|
"loss": 1.0315, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.1220394014187312e-05, |
|
"loss": 1.0825, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.1172583852705115e-05, |
|
"loss": 1.1014, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.1124527799899953e-05, |
|
"loss": 1.0582, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1076227218291535e-05, |
|
"loss": 1.0917, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1027683477332623e-05, |
|
"loss": 1.0564, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0978897953370204e-05, |
|
"loss": 1.0999, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0929872029606472e-05, |
|
"loss": 1.1015, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.088060709605962e-05, |
|
"loss": 1.0807, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.08311045495244e-05, |
|
"loss": 1.0348, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.078136579353255e-05, |
|
"loss": 1.0644, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.0731392238312985e-05, |
|
"loss": 1.0882, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.0681185300751814e-05, |
|
"loss": 1.109, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.0630746404352168e-05, |
|
"loss": 1.1298, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.058007697919386e-05, |
|
"loss": 1.0834, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.0529178461892785e-05, |
|
"loss": 1.0839, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.0478052295560253e-05, |
|
"loss": 1.1053, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.0426699929762035e-05, |
|
"loss": 1.0968, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.037512282047726e-05, |
|
"loss": 1.0399, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.0323322430057166e-05, |
|
"loss": 1.1565, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.0271300227183598e-05, |
|
"loss": 1.103, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.02190576868274e-05, |
|
"loss": 1.0688, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.016659629020658e-05, |
|
"loss": 1.0667, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0113917524744308e-05, |
|
"loss": 1.1085, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0061022884026762e-05, |
|
"loss": 1.085, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.0007913867760763e-05, |
|
"loss": 1.0716, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.995459198173127e-05, |
|
"loss": 1.1282, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9901058737758667e-05, |
|
"loss": 1.1367, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9847315653655915e-05, |
|
"loss": 1.08, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9793364253185518e-05, |
|
"loss": 1.1025, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9739206066016307e-05, |
|
"loss": 1.0882, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9684842627680088e-05, |
|
"loss": 1.1219, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.963027547952808e-05, |
|
"loss": 1.0971, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9575506168687247e-05, |
|
"loss": 1.0991, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9520536248016403e-05, |
|
"loss": 1.0028, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.946536727606219e-05, |
|
"loss": 1.0726, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.941000081701492e-05, |
|
"loss": 0.8409, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9354438440664167e-05, |
|
"loss": 0.7279, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.929868172235433e-05, |
|
"loss": 0.6879, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9242732242939893e-05, |
|
"loss": 0.6526, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9186591588740667e-05, |
|
"loss": 0.6945, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.913026135149678e-05, |
|
"loss": 0.6871, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.9073743128323558e-05, |
|
"loss": 0.6716, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9017038521666232e-05, |
|
"loss": 0.7348, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.8960149139254513e-05, |
|
"loss": 0.6692, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.8903076594057013e-05, |
|
"loss": 0.6319, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.88458225042355e-05, |
|
"loss": 0.6803, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.8788388493099033e-05, |
|
"loss": 0.6941, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.8730776189057918e-05, |
|
"loss": 0.702, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.8672987225577552e-05, |
|
"loss": 0.7123, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.8615023241132127e-05, |
|
"loss": 0.6531, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.855688587915813e-05, |
|
"loss": 0.6997, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.8498576788007775e-05, |
|
"loss": 0.706, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.8440097620902284e-05, |
|
"loss": 0.6839, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.838145003588497e-05, |
|
"loss": 0.6981, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.832263569577427e-05, |
|
"loss": 0.6168, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.8263656268116576e-05, |
|
"loss": 0.6331, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.8204513425138952e-05, |
|
"loss": 0.6864, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.8145208843701742e-05, |
|
"loss": 0.7184, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.808574420525101e-05, |
|
"loss": 0.6272, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.8026121195770874e-05, |
|
"loss": 0.61, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.7966341505735695e-05, |
|
"loss": 0.6583, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.7906406830062156e-05, |
|
"loss": 0.6503, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.7846318868061212e-05, |
|
"loss": 0.6473, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.7786079323389893e-05, |
|
"loss": 0.658, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.7725689904003006e-05, |
|
"loss": 0.6683, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.766515232210473e-05, |
|
"loss": 0.6576, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.7604468294100034e-05, |
|
"loss": 0.7206, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.754363954054605e-05, |
|
"loss": 0.6805, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.7482667786103256e-05, |
|
"loss": 0.6856, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.7421554759486614e-05, |
|
"loss": 0.6346, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.736030219341651e-05, |
|
"loss": 0.6842, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.7298911824569676e-05, |
|
"loss": 0.6696, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.7237385393529916e-05, |
|
"loss": 0.6832, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.7175724644738773e-05, |
|
"loss": 0.6993, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.7113931326446054e-05, |
|
"loss": 0.7059, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.705200719066028e-05, |
|
"loss": 0.7253, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.6989953993098998e-05, |
|
"loss": 0.6968, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.6927773493139014e-05, |
|
"loss": 0.6491, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.68654674537665e-05, |
|
"loss": 0.6645, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.6803037641527002e-05, |
|
"loss": 0.6346, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.674048582647538e-05, |
|
"loss": 0.7157, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.6677813782125592e-05, |
|
"loss": 0.7311, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.6615023285400432e-05, |
|
"loss": 0.6516, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.6552116116581133e-05, |
|
"loss": 0.6617, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.648909405925691e-05, |
|
"loss": 0.6757, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6425958900274362e-05, |
|
"loss": 0.6776, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6362712429686846e-05, |
|
"loss": 0.6525, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.629935644070369e-05, |
|
"loss": 0.6695, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6235892729639378e-05, |
|
"loss": 0.6625, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.617232309586261e-05, |
|
"loss": 0.6569, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.6108649341745262e-05, |
|
"loss": 0.7276, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.604487327261133e-05, |
|
"loss": 0.7187, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.59809966966857e-05, |
|
"loss": 0.693, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.591702142504291e-05, |
|
"loss": 0.6845, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5852949271555787e-05, |
|
"loss": 0.6641, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.5788782052844015e-05, |
|
"loss": 0.6816, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.572452158822265e-05, |
|
"loss": 0.6962, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.5660169699650503e-05, |
|
"loss": 0.6748, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.5595728211678525e-05, |
|
"loss": 0.7169, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.553119895139803e-05, |
|
"loss": 0.6708, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.546658374838894e-05, |
|
"loss": 0.6961, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.5401884434667864e-05, |
|
"loss": 0.6814, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.5337102844636188e-05, |
|
"loss": 0.6701, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.5272240815028067e-05, |
|
"loss": 0.678, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.520730018485832e-05, |
|
"loss": 0.7226, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.5142282795370305e-05, |
|
"loss": 0.712, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.5077190489983728e-05, |
|
"loss": 0.7174, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.5012025114242355e-05, |
|
"loss": 0.697, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.49467885157617e-05, |
|
"loss": 0.6919, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.4881482544176627e-05, |
|
"loss": 0.7013, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.4816109051088931e-05, |
|
"loss": 0.672, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.4750669890014807e-05, |
|
"loss": 0.6643, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.468516691633233e-05, |
|
"loss": 0.7121, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.4619601987228832e-05, |
|
"loss": 0.7121, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.4553976961648244e-05, |
|
"loss": 0.7016, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.4488293700238401e-05, |
|
"loss": 0.6935, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.4422554065298286e-05, |
|
"loss": 0.6644, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.435675992072521e-05, |
|
"loss": 0.6634, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.4290913131961997e-05, |
|
"loss": 0.6848, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.4225015565944072e-05, |
|
"loss": 0.7342, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.4159069091046526e-05, |
|
"loss": 0.7043, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4093075577031157e-05, |
|
"loss": 0.6844, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4027036894993442e-05, |
|
"loss": 0.7029, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.396095491730951e-05, |
|
"loss": 0.6458, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.3894831517583017e-05, |
|
"loss": 0.6892, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.3828668570592069e-05, |
|
"loss": 0.6533, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.3762467952236027e-05, |
|
"loss": 0.6618, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.369623153948235e-05, |
|
"loss": 0.7309, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.3629961210313353e-05, |
|
"loss": 0.6675, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.3563658843672985e-05, |
|
"loss": 0.7065, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.3497326319413539e-05, |
|
"loss": 0.6495, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.3430965518242362e-05, |
|
"loss": 0.6695, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.336457832166852e-05, |
|
"loss": 0.7005, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.3298166611949467e-05, |
|
"loss": 0.7049, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.3231732272037672e-05, |
|
"loss": 0.6695, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.3165277185527234e-05, |
|
"loss": 0.6984, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.3098803236600462e-05, |
|
"loss": 0.6445, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.3032312309974482e-05, |
|
"loss": 0.674, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.2965806290847768e-05, |
|
"loss": 0.6703, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.2899287064846725e-05, |
|
"loss": 0.6647, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.2832756517972185e-05, |
|
"loss": 0.6637, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2766216536545984e-05, |
|
"loss": 0.6944, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2699669007157435e-05, |
|
"loss": 0.6246, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.2633115816609867e-05, |
|
"loss": 0.6816, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.2566558851867106e-05, |
|
"loss": 0.628, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.6668, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.2433441148132897e-05, |
|
"loss": 0.6779, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.236688418339014e-05, |
|
"loss": 0.6325, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.2300330992842566e-05, |
|
"loss": 0.6418, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.223378346345402e-05, |
|
"loss": 0.6853, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.2167243482027816e-05, |
|
"loss": 0.6517, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.2100712935153283e-05, |
|
"loss": 0.6686, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.2034193709152231e-05, |
|
"loss": 0.6647, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.196768769002552e-05, |
|
"loss": 0.7095, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.190119676339954e-05, |
|
"loss": 0.65, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.1834722814472771e-05, |
|
"loss": 0.6796, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.1768267727962328e-05, |
|
"loss": 0.6685, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.1701833388050536e-05, |
|
"loss": 0.6913, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.1635421678331485e-05, |
|
"loss": 0.7266, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.1569034481757644e-05, |
|
"loss": 0.6682, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.150267368058646e-05, |
|
"loss": 0.6643, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.1436341156327016e-05, |
|
"loss": 0.6984, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.137003878968665e-05, |
|
"loss": 0.6792, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.1303768460517657e-05, |
|
"loss": 0.7129, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.1237532047763974e-05, |
|
"loss": 0.6972, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.1171331429407934e-05, |
|
"loss": 0.6692, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.1105168482416984e-05, |
|
"loss": 0.6335, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.1039045082690496e-05, |
|
"loss": 0.6787, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.0972963105006556e-05, |
|
"loss": 0.6626, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.0906924422968846e-05, |
|
"loss": 0.67, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.0840930908953477e-05, |
|
"loss": 0.6307, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.0774984434055932e-05, |
|
"loss": 0.6691, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.0709086868038005e-05, |
|
"loss": 0.6772, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.064324007927479e-05, |
|
"loss": 0.6846, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.0577445934701717e-05, |
|
"loss": 0.6568, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.0511706299761602e-05, |
|
"loss": 0.7119, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.044602303835176e-05, |
|
"loss": 0.6945, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.0380398012771169e-05, |
|
"loss": 0.6954, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0314833083667672e-05, |
|
"loss": 0.6703, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0249330109985198e-05, |
|
"loss": 0.6854, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.0183890948911074e-05, |
|
"loss": 0.6929, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.0118517455823374e-05, |
|
"loss": 0.7106, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.0053211484238303e-05, |
|
"loss": 0.7181, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.987974885757647e-06, |
|
"loss": 0.691, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.922809510016277e-06, |
|
"loss": 0.6584, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.857717204629696e-06, |
|
"loss": 0.6575, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.792699815141685e-06, |
|
"loss": 0.6865, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.727759184971936e-06, |
|
"loss": 0.6692, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.662897155363814e-06, |
|
"loss": 0.7082, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.598115565332142e-06, |
|
"loss": 0.6894, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.533416251611064e-06, |
|
"loss": 0.7292, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.468801048601972e-06, |
|
"loss": 0.6795, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 9.404271788321482e-06, |
|
"loss": 0.662, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 9.339830300349498e-06, |
|
"loss": 0.6917, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.275478411777354e-06, |
|
"loss": 0.6759, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.211217947155986e-06, |
|
"loss": 0.6535, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.147050728444215e-06, |
|
"loss": 0.6741, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.08297857495709e-06, |
|
"loss": 0.6837, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 9.0190033033143e-06, |
|
"loss": 0.6454, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.955126727388672e-06, |
|
"loss": 0.6695, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.89135065825474e-06, |
|
"loss": 0.727, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.827676904137397e-06, |
|
"loss": 0.6626, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.76410727036062e-06, |
|
"loss": 0.6436, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.700643559296313e-06, |
|
"loss": 0.6852, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.637287570313159e-06, |
|
"loss": 0.6295, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.574041099725644e-06, |
|
"loss": 0.679, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.510905940743093e-06, |
|
"loss": 0.6501, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.447883883418866e-06, |
|
"loss": 0.6976, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.38497671459957e-06, |
|
"loss": 0.7009, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.32218621787441e-06, |
|
"loss": 0.6754, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.25951417352462e-06, |
|
"loss": 0.6816, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.196962358473e-06, |
|
"loss": 0.6554, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.134532546233505e-06, |
|
"loss": 0.6532, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.072226506860988e-06, |
|
"loss": 0.7011, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.010046006901001e-06, |
|
"loss": 0.7002, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.947992809339723e-06, |
|
"loss": 0.643, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.88606867355395e-06, |
|
"loss": 0.7304, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.824275355261233e-06, |
|
"loss": 0.673, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.762614606470084e-06, |
|
"loss": 0.6368, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.701088175430327e-06, |
|
"loss": 0.6589, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.639697806583493e-06, |
|
"loss": 0.685, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.578445240513393e-06, |
|
"loss": 0.6659, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.517332213896743e-06, |
|
"loss": 0.6914, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.456360459453952e-06, |
|
"loss": 0.6852, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.395531705899969e-06, |
|
"loss": 0.6308, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.334847677895276e-06, |
|
"loss": 0.6535, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.274310095997e-06, |
|
"loss": 0.6662, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.213920676610112e-06, |
|
"loss": 0.6827, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.153681131938791e-06, |
|
"loss": 0.6781, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.093593169937849e-06, |
|
"loss": 0.6506, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.033658494264309e-06, |
|
"loss": 0.7036, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.973878804229131e-06, |
|
"loss": 0.6834, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.914255794748989e-06, |
|
"loss": 0.6629, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.8547911562982595e-06, |
|
"loss": 0.7212, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.7954865748610515e-06, |
|
"loss": 0.6611, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.736343731883425e-06, |
|
"loss": 0.7121, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.677364304225731e-06, |
|
"loss": 0.6365, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.618549964115033e-06, |
|
"loss": 0.6527, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.559902379097721e-06, |
|
"loss": 0.5056, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 6.501423211992224e-06, |
|
"loss": 0.4116, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 6.443114120841874e-06, |
|
"loss": 0.3913, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 6.384976758867875e-06, |
|
"loss": 0.4092, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 6.32701277442245e-06, |
|
"loss": 0.402, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 6.269223810942086e-06, |
|
"loss": 0.3886, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 6.211611506900973e-06, |
|
"loss": 0.3336, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.1541774957645045e-06, |
|
"loss": 0.3918, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.096923405942989e-06, |
|
"loss": 0.3541, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 6.03985086074549e-06, |
|
"loss": 0.3317, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.982961478333768e-06, |
|
"loss": 0.3406, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.926256871676443e-06, |
|
"loss": 0.324, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.86973864850322e-06, |
|
"loss": 0.3686, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.813408411259331e-06, |
|
"loss": 0.3323, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.757267757060109e-06, |
|
"loss": 0.3494, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.701318277645675e-06, |
|
"loss": 0.3532, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.645561559335832e-06, |
|
"loss": 0.3596, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.589999182985087e-06, |
|
"loss": 0.3783, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.534632723937809e-06, |
|
"loss": 0.3515, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.479463751983603e-06, |
|
"loss": 0.3261, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.424493831312757e-06, |
|
"loss": 0.368, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 5.369724520471925e-06, |
|
"loss": 0.3684, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 5.315157372319915e-06, |
|
"loss": 0.3526, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 5.2607939339836945e-06, |
|
"loss": 0.3388, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 5.2066357468144824e-06, |
|
"loss": 0.3406, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 5.152684346344087e-06, |
|
"loss": 0.33, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 5.0989412622413345e-06, |
|
"loss": 0.3424, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 5.0454080182687314e-06, |
|
"loss": 0.373, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.992086132239238e-06, |
|
"loss": 0.3332, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.938977115973242e-06, |
|
"loss": 0.3413, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.886082475255694e-06, |
|
"loss": 0.355, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.833403709793424e-06, |
|
"loss": 0.3331, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.780942313172602e-06, |
|
"loss": 0.343, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.728699772816407e-06, |
|
"loss": 0.3988, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.6766775699428365e-06, |
|
"loss": 0.3527, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.624877179522742e-06, |
|
"loss": 0.3563, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.573300070237973e-06, |
|
"loss": 0.376, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.5219477044397525e-06, |
|
"loss": 0.3256, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.470821538107219e-06, |
|
"loss": 0.3517, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.419923020806148e-06, |
|
"loss": 0.4053, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.36925359564783e-06, |
|
"loss": 0.3272, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.318814699248188e-06, |
|
"loss": 0.3371, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.268607761687019e-06, |
|
"loss": 0.314, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.218634206467449e-06, |
|
"loss": 0.347, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.1688954504756e-06, |
|
"loss": 0.3671, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.119392903940384e-06, |
|
"loss": 0.3479, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.07012797039353e-06, |
|
"loss": 0.3732, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.021102046629799e-06, |
|
"loss": 0.3116, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.972316522667381e-06, |
|
"loss": 0.3443, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.923772781708468e-06, |
|
"loss": 0.3662, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.875472200100051e-06, |
|
"loss": 0.2995, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.827416147294888e-06, |
|
"loss": 0.361, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.7796059858126927e-06, |
|
"loss": 0.3441, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.7320430712014796e-06, |
|
"loss": 0.3462, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.6847287519991434e-06, |
|
"loss": 0.3885, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.637664369695233e-06, |
|
"loss": 0.3637, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.590851258692894e-06, |
|
"loss": 0.3185, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.5442907462710605e-06, |
|
"loss": 0.3414, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.4979841525468016e-06, |
|
"loss": 0.3493, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.451932790437898e-06, |
|
"loss": 0.3546, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.4061379656256345e-06, |
|
"loss": 0.3458, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.3606009765177555e-06, |
|
"loss": 0.3618, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.3153231142116617e-06, |
|
"loss": 0.3519, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.270305662457805e-06, |
|
"loss": 0.3614, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.225549897623302e-06, |
|
"loss": 0.3665, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.181057088655719e-06, |
|
"loss": 0.3785, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.136828497047116e-06, |
|
"loss": 0.3377, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.0928653767982695e-06, |
|
"loss": 0.3767, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.0491689743831277e-06, |
|
"loss": 0.3523, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.005740528713458e-06, |
|
"loss": 0.3635, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.9625812711037206e-06, |
|
"loss": 0.341, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.91969242523618e-06, |
|
"loss": 0.3467, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.87707520712617e-06, |
|
"loss": 0.3493, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.834730825087663e-06, |
|
"loss": 0.3379, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.7926604796989676e-06, |
|
"loss": 0.3951, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.7508653637687176e-06, |
|
"loss": 0.3427, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.709346662302041e-06, |
|
"loss": 0.3813, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6681055524669694e-06, |
|
"loss": 0.4021, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.6271432035610513e-06, |
|
"loss": 0.3559, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.5864607769782084e-06, |
|
"loss": 0.3115, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.5460594261757996e-06, |
|
"loss": 0.3605, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.5059402966419303e-06, |
|
"loss": 0.326, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.466104525862957e-06, |
|
"loss": 0.364, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.426553243291249e-06, |
|
"loss": 0.3477, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.3872875703131583e-06, |
|
"loss": 0.352, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.3483086202172377e-06, |
|
"loss": 0.3749, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.309617498162657e-06, |
|
"loss": 0.3521, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.271215301147887e-06, |
|
"loss": 0.3528, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.2331031179795792e-06, |
|
"loss": 0.3462, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.1952820292417143e-06, |
|
"loss": 0.3517, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.1577531072649495e-06, |
|
"loss": 0.3434, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.120517416096217e-06, |
|
"loss": 0.3679, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.083576011468562e-06, |
|
"loss": 0.3568, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.0469299407712112e-06, |
|
"loss": 0.3727, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.010580243019865e-06, |
|
"loss": 0.3949, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.9745279488272484e-06, |
|
"loss": 0.3595, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.938774080373884e-06, |
|
"loss": 0.3371, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.9033196513791138e-06, |
|
"loss": 0.342, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.8681656670723663e-06, |
|
"loss": 0.3505, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.833313124164636e-06, |
|
"loss": 0.3533, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.7987630108202355e-06, |
|
"loss": 0.3303, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.764516306628787e-06, |
|
"loss": 0.3815, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.7305739825774228e-06, |
|
"loss": 0.3618, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.6969370010232882e-06, |
|
"loss": 0.3296, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.663606315666226e-06, |
|
"loss": 0.3413, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.6305828715217506e-06, |
|
"loss": 0.36, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.597867604894264e-06, |
|
"loss": 0.3716, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.5654614433504841e-06, |
|
"loss": 0.346, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.5333653056931684e-06, |
|
"loss": 0.3628, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.5015801019350476e-06, |
|
"loss": 0.3417, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.4701067332730426e-06, |
|
"loss": 0.3508, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.4389460920626887e-06, |
|
"loss": 0.3418, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.4080990617928571e-06, |
|
"loss": 0.3471, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.3775665170606896e-06, |
|
"loss": 0.371, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.3473493235468177e-06, |
|
"loss": 0.348, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.3174483379907996e-06, |
|
"loss": 0.3172, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2878644081668403e-06, |
|
"loss": 0.3701, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.2585983728597608e-06, |
|
"loss": 0.3788, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.2296510618411957e-06, |
|
"loss": 0.4176, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.201023295846093e-06, |
|
"loss": 0.3323, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.1727158865494206e-06, |
|
"loss": 0.3603, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.1447296365431668e-06, |
|
"loss": 0.3394, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.1170653393135847e-06, |
|
"loss": 0.3344, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0897237792186862e-06, |
|
"loss": 0.3331, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0627057314660114e-06, |
|
"loss": 0.3656, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.0360119620906426e-06, |
|
"loss": 0.3086, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.0096432279334905e-06, |
|
"loss": 0.3505, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.836002766198379e-07, |
|
"loss": 0.3473, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.57883846538135e-07, |
|
"loss": 0.3996, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.324946668190657e-07, |
|
"loss": 0.3151, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.074334573148783e-07, |
|
"loss": 0.3581, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.827009285789778e-07, |
|
"loss": 0.3843, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.582977818457696e-07, |
|
"loss": 0.3462, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 8.342247090107919e-07, |
|
"loss": 0.3123, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 8.104823926110811e-07, |
|
"loss": 0.3567, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.870715058058406e-07, |
|
"loss": 0.3301, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.639927123573351e-07, |
|
"loss": 0.3228, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.412466666120846e-07, |
|
"loss": 0.3474, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.188340134823043e-07, |
|
"loss": 0.3202, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.967553884276279e-07, |
|
"loss": 0.3468, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.750114174370797e-07, |
|
"loss": 0.3849, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.536027170113354e-07, |
|
"loss": 0.3187, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.32529894145234e-07, |
|
"loss": 0.3558, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.117935463105809e-07, |
|
"loss": 0.2956, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.913942614391931e-07, |
|
"loss": 0.3309, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.713326179062409e-07, |
|
"loss": 0.339, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.516091845138377e-07, |
|
"loss": 0.3394, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.322245204749319e-07, |
|
"loss": 0.3898, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.131791753974296e-07, |
|
"loss": 0.3797, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.944736892686278e-07, |
|
"loss": 0.3644, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.761085924398959e-07, |
|
"loss": 0.3346, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.580844056116418e-07, |
|
"loss": 0.3028, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.4040163981855095e-07, |
|
"loss": 0.4031, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.2306079641509094e-07, |
|
"loss": 0.3211, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.0606236706130367e-07, |
|
"loss": 0.3552, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.89406833708858e-07, |
|
"loss": 0.3539, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.73094668587394e-07, |
|
"loss": 0.3764, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.5712633419112686e-07, |
|
"loss": 0.3776, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.4150228326573356e-07, |
|
"loss": 0.3943, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.262229587955232e-07, |
|
"loss": 0.3839, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.112887939908715e-07, |
|
"loss": 0.3238, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.9670021227593965e-07, |
|
"loss": 0.3462, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.824576272766666e-07, |
|
"loss": 0.3338, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.6856144280904967e-07, |
|
"loss": 0.4009, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.550120528676841e-07, |
|
"loss": 0.3511, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.4180984161460004e-07, |
|
"loss": 0.3157, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.2895518336836923e-07, |
|
"loss": 0.3281, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.164484425934879e-07, |
|
"loss": 0.3573, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.0428997389004972e-07, |
|
"loss": 0.3414, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.9248012198368748e-07, |
|
"loss": 0.386, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.8101922171580022e-07, |
|
"loss": 0.3197, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.699075980340581e-07, |
|
"loss": 0.3153, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5914556598319307e-07, |
|
"loss": 0.3227, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4873343069606305e-07, |
|
"loss": 0.3259, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.3867148738500313e-07, |
|
"loss": 0.3968, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.289600213334505e-07, |
|
"loss": 0.3531, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.195993078878646e-07, |
|
"loss": 0.3476, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1058961244991144e-07, |
|
"loss": 0.3373, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.0193119046894295e-07, |
|
"loss": 0.3372, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.362428743475565e-08, |
|
"loss": 0.3463, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.566913887062683e-08, |
|
"loss": 0.3544, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.806597032663925e-08, |
|
"loss": 0.3629, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.081499737328634e-08, |
|
"loss": 0.3486, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.391642559535616e-08, |
|
"loss": 0.3315, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.7370450586109737e-08, |
|
"loss": 0.3358, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.117725794172995e-08, |
|
"loss": 0.3144, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.533702325605904e-08, |
|
"loss": 0.3609, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.9849912115623446e-08, |
|
"loss": 0.3044, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.471608009493754e-08, |
|
"loss": 0.3391, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9935672752090525e-08, |
|
"loss": 0.3522, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.5508825624626088e-08, |
|
"loss": 0.3487, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.1435664225688547e-08, |
|
"loss": 0.3291, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.7716304040475697e-08, |
|
"loss": 0.3867, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.4350850522956705e-08, |
|
"loss": 0.3466, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.1339399092885616e-08, |
|
"loss": 0.2979, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.682035133096566e-09, |
|
"loss": 0.3577, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.378833987077948e-09, |
|
"loss": 0.3616, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.429860956842169e-09, |
|
"loss": 0.3405, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.8351713010743573e-09, |
|
"loss": 0.3453, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.5948102335572313e-09, |
|
"loss": 0.3446, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.088129218985052e-10, |
|
"loss": 0.3575, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.772044865316902e-10, |
|
"loss": 0.3648, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.3142, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 609, |
|
"total_flos": 58075646853120.0, |
|
"train_loss": 0.7130698823478617, |
|
"train_runtime": 3036.251, |
|
"train_samples_per_second": 51.379, |
|
"train_steps_per_second": 0.201 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 609, |
|
"num_train_epochs": 3, |
|
"save_steps": -609, |
|
"total_flos": 58075646853120.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|