|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1705, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.846153846153847e-07, |
|
"loss": 2.7739, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 7.692307692307694e-07, |
|
"loss": 2.6648, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.153846153846154e-06, |
|
"loss": 2.8447, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.5384615384615387e-06, |
|
"loss": 2.6984, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.9230769230769234e-06, |
|
"loss": 2.7267, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 2.6802, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.6923076923076923e-06, |
|
"loss": 2.5858, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 2.436, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4615384615384617e-06, |
|
"loss": 2.3204, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 2.3288, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.230769230769231e-06, |
|
"loss": 2.1069, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 1.9975, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5e-06, |
|
"loss": 2.0434, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.384615384615385e-06, |
|
"loss": 1.9555, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.769230769230769e-06, |
|
"loss": 1.8655, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 1.8597, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.538461538461539e-06, |
|
"loss": 1.809, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 1.7783, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.307692307692308e-06, |
|
"loss": 1.7153, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 1.7193, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.076923076923077e-06, |
|
"loss": 1.6741, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.461538461538462e-06, |
|
"loss": 1.7576, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.846153846153847e-06, |
|
"loss": 1.728, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 1.5899, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 1.6476, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5837, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0384615384615386e-05, |
|
"loss": 1.615, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.076923076923077e-05, |
|
"loss": 1.6188, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1153846153846154e-05, |
|
"loss": 1.5616, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1538461538461538e-05, |
|
"loss": 1.6321, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1923076923076925e-05, |
|
"loss": 1.5472, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 1.4874, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2692307692307693e-05, |
|
"loss": 1.5321, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3076923076923078e-05, |
|
"loss": 1.5677, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3461538461538463e-05, |
|
"loss": 1.5789, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3846153846153847e-05, |
|
"loss": 1.558, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4230769230769232e-05, |
|
"loss": 1.4183, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4615384615384615e-05, |
|
"loss": 1.5641, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.5382, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 1.5014, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.576923076923077e-05, |
|
"loss": 1.4522, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6153846153846154e-05, |
|
"loss": 1.4632, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.653846153846154e-05, |
|
"loss": 1.4419, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6923076923076924e-05, |
|
"loss": 1.4787, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.730769230769231e-05, |
|
"loss": 1.471, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7692307692307694e-05, |
|
"loss": 1.4674, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.807692307692308e-05, |
|
"loss": 1.421, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 1.4135, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8846153846153846e-05, |
|
"loss": 1.5387, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 1.4109, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9615384615384617e-05, |
|
"loss": 1.4647, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5125, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999981939743593e-05, |
|
"loss": 1.3975, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999927759039606e-05, |
|
"loss": 1.3975, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999837458083738e-05, |
|
"loss": 1.4772, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999711037202162e-05, |
|
"loss": 1.4527, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999548496851524e-05, |
|
"loss": 1.4963, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999934983761892e-05, |
|
"loss": 1.4704, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999911506022192e-05, |
|
"loss": 1.4795, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999884416550855e-05, |
|
"loss": 1.455, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9998537154457298e-05, |
|
"loss": 1.4383, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9998194028177105e-05, |
|
"loss": 1.2969, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9997814787907356e-05, |
|
"loss": 1.4535, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9997399435017893e-05, |
|
"loss": 1.4099, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9996947971008986e-05, |
|
"loss": 1.4214, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9996460397511347e-05, |
|
"loss": 1.3191, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999593671628612e-05, |
|
"loss": 1.39, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9995376929224863e-05, |
|
"loss": 1.4771, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999478103834956e-05, |
|
"loss": 1.3907, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.99941490458126e-05, |
|
"loss": 1.4197, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999348095389677e-05, |
|
"loss": 1.4526, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9992776765015252e-05, |
|
"loss": 1.3396, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9992036481711618e-05, |
|
"loss": 1.4858, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9991260106659798e-05, |
|
"loss": 1.3566, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9990447642664107e-05, |
|
"loss": 1.3758, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9989599092659207e-05, |
|
"loss": 1.4627, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.99887144597101e-05, |
|
"loss": 1.444, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9987793747012125e-05, |
|
"loss": 1.4592, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9986836957890947e-05, |
|
"loss": 1.4336, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9985844095802538e-05, |
|
"loss": 1.3724, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9984815164333163e-05, |
|
"loss": 1.4621, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9983750167199377e-05, |
|
"loss": 1.3457, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998264910824801e-05, |
|
"loss": 1.3869, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998151199145613e-05, |
|
"loss": 1.4126, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9980338820931074e-05, |
|
"loss": 1.3754, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9979129600910386e-05, |
|
"loss": 1.4018, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9977884335761834e-05, |
|
"loss": 1.3275, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9976603029983383e-05, |
|
"loss": 1.3606, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9975285688203167e-05, |
|
"loss": 1.3201, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9973932315179502e-05, |
|
"loss": 1.3587, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997254291580083e-05, |
|
"loss": 1.3955, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997111749508574e-05, |
|
"loss": 1.5024, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9969656058182926e-05, |
|
"loss": 1.4342, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9968158610371164e-05, |
|
"loss": 1.4254, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9966625157059324e-05, |
|
"loss": 1.3801, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9965055703786306e-05, |
|
"loss": 1.4432, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9963450256221066e-05, |
|
"loss": 1.3711, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996180882016256e-05, |
|
"loss": 1.4053, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9960131401539737e-05, |
|
"loss": 1.3744, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9958418006411522e-05, |
|
"loss": 1.4216, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995666864096678e-05, |
|
"loss": 1.4584, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9954883311524316e-05, |
|
"loss": 1.4102, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995306202453283e-05, |
|
"loss": 1.2626, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9951204786570896e-05, |
|
"loss": 1.3363, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9949311604346965e-05, |
|
"loss": 1.3958, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99473824846993e-05, |
|
"loss": 1.3832, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9945417434595983e-05, |
|
"loss": 1.3707, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9943416461134877e-05, |
|
"loss": 1.4689, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9941379571543597e-05, |
|
"loss": 1.3616, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9939306773179498e-05, |
|
"loss": 1.3882, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9937198073529626e-05, |
|
"loss": 1.3026, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993505348021072e-05, |
|
"loss": 1.3696, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993287300096916e-05, |
|
"loss": 1.2533, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993065664368094e-05, |
|
"loss": 1.3722, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.992840441635167e-05, |
|
"loss": 1.3511, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.99261163271165e-05, |
|
"loss": 1.4428, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9923792384240134e-05, |
|
"loss": 1.3871, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9921432596116763e-05, |
|
"loss": 1.3628, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.991903697127007e-05, |
|
"loss": 1.3381, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.991660551835317e-05, |
|
"loss": 1.4411, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.99141382461486e-05, |
|
"loss": 1.3643, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9911635163568276e-05, |
|
"loss": 1.3865, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9909096279653455e-05, |
|
"loss": 1.3367, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9906521603574713e-05, |
|
"loss": 1.3274, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9903911144631924e-05, |
|
"loss": 1.3183, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9901264912254186e-05, |
|
"loss": 1.3829, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9898582915999834e-05, |
|
"loss": 1.3697, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9895865165556375e-05, |
|
"loss": 1.3734, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9893111670740462e-05, |
|
"loss": 1.4211, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9890322441497857e-05, |
|
"loss": 1.4691, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9887497487903406e-05, |
|
"loss": 1.3725, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.988463682016098e-05, |
|
"loss": 1.3903, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.988174044860346e-05, |
|
"loss": 1.3517, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.987880838369269e-05, |
|
"loss": 1.36, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9875840636019435e-05, |
|
"loss": 1.3872, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9872837216303353e-05, |
|
"loss": 1.3582, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9869798135392953e-05, |
|
"loss": 1.3862, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9866723404265546e-05, |
|
"loss": 1.3446, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9863613034027224e-05, |
|
"loss": 1.4092, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.98604670359128e-05, |
|
"loss": 1.4489, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9857285421285785e-05, |
|
"loss": 1.2771, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9854068201638327e-05, |
|
"loss": 1.3381, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9850815388591195e-05, |
|
"loss": 1.3302, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9847526993893712e-05, |
|
"loss": 1.2568, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9844203029423733e-05, |
|
"loss": 1.4585, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.984084350718758e-05, |
|
"loss": 1.4134, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9837448439320027e-05, |
|
"loss": 1.3563, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9834017838084234e-05, |
|
"loss": 1.3011, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9830551715871703e-05, |
|
"loss": 1.396, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.982705008520225e-05, |
|
"loss": 1.2511, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9823512958723942e-05, |
|
"loss": 1.4065, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.981994034921306e-05, |
|
"loss": 1.3925, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9816332269574056e-05, |
|
"loss": 1.3561, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9812688732839497e-05, |
|
"loss": 1.3132, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9809009752170022e-05, |
|
"loss": 1.3534, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9805295340854303e-05, |
|
"loss": 1.4454, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9801545512308982e-05, |
|
"loss": 1.3429, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.979776028007863e-05, |
|
"loss": 1.4072, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.97939396578357e-05, |
|
"loss": 1.28, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.979008365938048e-05, |
|
"loss": 1.359, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.978619229864103e-05, |
|
"loss": 1.2664, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9782265589673146e-05, |
|
"loss": 1.3265, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9778303546660304e-05, |
|
"loss": 1.3663, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9774306183913602e-05, |
|
"loss": 1.3191, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9770273515871724e-05, |
|
"loss": 1.4161, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.976620555710087e-05, |
|
"loss": 1.3269, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9762102322294716e-05, |
|
"loss": 1.4626, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9757963826274357e-05, |
|
"loss": 1.3634, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9753790083988256e-05, |
|
"loss": 1.3309, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.974958111051218e-05, |
|
"loss": 1.276, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.974533692104916e-05, |
|
"loss": 1.3402, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9741057530929425e-05, |
|
"loss": 1.3347, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.973674295561035e-05, |
|
"loss": 1.3164, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9732393210676405e-05, |
|
"loss": 1.4294, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.972800831183909e-05, |
|
"loss": 1.2978, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9723588274936882e-05, |
|
"loss": 1.3527, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9719133115935188e-05, |
|
"loss": 1.3472, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9714642850926264e-05, |
|
"loss": 1.2505, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.971011749612918e-05, |
|
"loss": 1.3672, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.970555706788975e-05, |
|
"loss": 1.3688, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9700961582680476e-05, |
|
"loss": 1.3013, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9696331057100484e-05, |
|
"loss": 1.3348, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9691665507875467e-05, |
|
"loss": 1.3899, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9686964951857636e-05, |
|
"loss": 1.3406, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9682229406025635e-05, |
|
"loss": 1.2191, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.96774588874845e-05, |
|
"loss": 1.3919, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9672653413465584e-05, |
|
"loss": 1.4293, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9667813001326512e-05, |
|
"loss": 1.3165, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9662937668551097e-05, |
|
"loss": 1.3989, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9658027432749293e-05, |
|
"loss": 1.2933, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9653082311657124e-05, |
|
"loss": 1.2817, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.964810232313662e-05, |
|
"loss": 1.3737, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9643087485175752e-05, |
|
"loss": 1.2853, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9638037815888378e-05, |
|
"loss": 1.3151, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.963295333351416e-05, |
|
"loss": 1.3505, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.96278340564185e-05, |
|
"loss": 1.3389, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9622680003092503e-05, |
|
"loss": 1.2762, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.961749119215287e-05, |
|
"loss": 1.2946, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.961226764234185e-05, |
|
"loss": 1.3227, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9607009372527172e-05, |
|
"loss": 1.3411, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9601716401701976e-05, |
|
"loss": 1.3325, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.959638874898475e-05, |
|
"loss": 1.4039, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9591026433619246e-05, |
|
"loss": 1.3013, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9585629474974413e-05, |
|
"loss": 1.3372, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9580197892544354e-05, |
|
"loss": 1.3755, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.957473170594822e-05, |
|
"loss": 1.374, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9569230934930157e-05, |
|
"loss": 1.329, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9563695599359233e-05, |
|
"loss": 1.3057, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9558125719229357e-05, |
|
"loss": 1.3024, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.955252131465923e-05, |
|
"loss": 1.2636, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9546882405892247e-05, |
|
"loss": 1.2914, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9541209013296435e-05, |
|
"loss": 1.3777, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9535501157364375e-05, |
|
"loss": 1.333, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.952975885871314e-05, |
|
"loss": 1.4056, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.952398213808421e-05, |
|
"loss": 1.3496, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.951817101634339e-05, |
|
"loss": 1.3138, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.951232551448075e-05, |
|
"loss": 1.2944, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9506445653610548e-05, |
|
"loss": 1.3374, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9500531454971136e-05, |
|
"loss": 1.3748, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9494582939924908e-05, |
|
"loss": 1.3424, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.94886001299582e-05, |
|
"loss": 1.313, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9482583046681238e-05, |
|
"loss": 1.3227, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9476531711828027e-05, |
|
"loss": 1.2002, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.94704461472563e-05, |
|
"loss": 1.3451, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9464326374947432e-05, |
|
"loss": 1.3672, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9458172417006347e-05, |
|
"loss": 1.3881, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9451984295661467e-05, |
|
"loss": 1.3065, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9445762033264595e-05, |
|
"loss": 1.36, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9439505652290867e-05, |
|
"loss": 1.3986, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.943321517533865e-05, |
|
"loss": 1.4003, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9426890625129466e-05, |
|
"loss": 1.2915, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.942053202450792e-05, |
|
"loss": 1.3677, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9414139396441602e-05, |
|
"loss": 1.3934, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9407712764021015e-05, |
|
"loss": 1.2412, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9401252150459478e-05, |
|
"loss": 1.2942, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9394757579093063e-05, |
|
"loss": 1.3166, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9388229073380495e-05, |
|
"loss": 1.313, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9381666656903068e-05, |
|
"loss": 1.2928, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9375070353364573e-05, |
|
"loss": 1.317, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9368440186591192e-05, |
|
"loss": 1.349, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9361776180531428e-05, |
|
"loss": 1.3727, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.935507835925601e-05, |
|
"loss": 1.3032, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.934834674695782e-05, |
|
"loss": 1.3728, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.934158136795178e-05, |
|
"loss": 1.2144, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9334782246674786e-05, |
|
"loss": 1.2674, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9327949407685617e-05, |
|
"loss": 1.3519, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9321082875664834e-05, |
|
"loss": 1.3787, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9314182675414704e-05, |
|
"loss": 1.3495, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9307248831859106e-05, |
|
"loss": 1.2048, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9300281370043433e-05, |
|
"loss": 1.3263, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9293280315134524e-05, |
|
"loss": 1.2988, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.928624569242054e-05, |
|
"loss": 1.2418, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.92791775273109e-05, |
|
"loss": 1.3163, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9272075845336177e-05, |
|
"loss": 1.2373, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9264940672148018e-05, |
|
"loss": 1.2492, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9257772033519032e-05, |
|
"loss": 1.2972, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9250569955342705e-05, |
|
"loss": 1.2825, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9243334463633315e-05, |
|
"loss": 1.3018, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.923606558452583e-05, |
|
"loss": 1.4347, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9228763344275813e-05, |
|
"loss": 1.301, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9221427769259333e-05, |
|
"loss": 1.2397, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.921405888597286e-05, |
|
"loss": 1.3608, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9206656721033178e-05, |
|
"loss": 1.359, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9199221301177287e-05, |
|
"loss": 1.323, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.919175265326231e-05, |
|
"loss": 1.3361, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9184250804265375e-05, |
|
"loss": 1.3126, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9176715781283556e-05, |
|
"loss": 1.2873, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9169147611533737e-05, |
|
"loss": 1.3034, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.916154632235254e-05, |
|
"loss": 1.3788, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.91539119411962e-05, |
|
"loss": 1.2423, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.914624449564051e-05, |
|
"loss": 1.3597, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.913854401338066e-05, |
|
"loss": 1.2726, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.91308105222312e-05, |
|
"loss": 1.3245, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9123044050125892e-05, |
|
"loss": 1.3731, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9115244625117627e-05, |
|
"loss": 1.2859, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9107412275378335e-05, |
|
"loss": 1.306, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.909954702919886e-05, |
|
"loss": 1.3568, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9091648914988873e-05, |
|
"loss": 1.2589, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9083717961276775e-05, |
|
"loss": 1.2778, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9075754196709574e-05, |
|
"loss": 1.2671, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9067757650052792e-05, |
|
"loss": 1.284, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.905972835019037e-05, |
|
"loss": 1.3116, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.905166632612455e-05, |
|
"loss": 1.3019, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9043571606975776e-05, |
|
"loss": 1.278, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.903544422198259e-05, |
|
"loss": 1.2977, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.902728420050152e-05, |
|
"loss": 1.2591, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9019091572006988e-05, |
|
"loss": 1.395, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9010866366091182e-05, |
|
"loss": 1.3454, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.900260861246397e-05, |
|
"loss": 1.3167, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.899431834095278e-05, |
|
"loss": 1.3569, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.89859955815025e-05, |
|
"loss": 1.3338, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8977640364175367e-05, |
|
"loss": 1.3153, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8969252719150847e-05, |
|
"loss": 1.3261, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.896083267672555e-05, |
|
"loss": 1.2942, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.89523802673131e-05, |
|
"loss": 1.2212, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.894389552144403e-05, |
|
"loss": 1.3297, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.893537846976568e-05, |
|
"loss": 1.2216, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8926829143042075e-05, |
|
"loss": 1.3309, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8918247572153822e-05, |
|
"loss": 1.312, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8909633788098004e-05, |
|
"loss": 1.3446, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8900987821988038e-05, |
|
"loss": 1.3735, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.88923097050536e-05, |
|
"loss": 1.3151, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8883599468640498e-05, |
|
"loss": 1.3641, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.887485714421055e-05, |
|
"loss": 1.3251, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.886608276334148e-05, |
|
"loss": 1.3444, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8857276357726802e-05, |
|
"loss": 1.3449, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8848437959175703e-05, |
|
"loss": 1.3467, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8839567599612934e-05, |
|
"loss": 1.2495, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8830665311078694e-05, |
|
"loss": 1.3242, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8821731125728493e-05, |
|
"loss": 1.2344, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8812765075833076e-05, |
|
"loss": 1.2997, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8803767193778273e-05, |
|
"loss": 1.2249, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.879473751206489e-05, |
|
"loss": 1.2523, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.878567606330861e-05, |
|
"loss": 1.3366, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8776582880239847e-05, |
|
"loss": 1.3649, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.876745799570364e-05, |
|
"loss": 1.3518, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8758301442659546e-05, |
|
"loss": 1.1984, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8749113254181498e-05, |
|
"loss": 1.397, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.873989346345771e-05, |
|
"loss": 1.2047, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8730642103790537e-05, |
|
"loss": 1.3858, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.872135920859636e-05, |
|
"loss": 1.2031, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.871204481140548e-05, |
|
"loss": 1.4161, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.870269894586197e-05, |
|
"loss": 1.3427, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8693321645723584e-05, |
|
"loss": 1.2259, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8683912944861606e-05, |
|
"loss": 1.3471, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.867447287726074e-05, |
|
"loss": 1.2364, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8665001477019006e-05, |
|
"loss": 1.289, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.865549877834758e-05, |
|
"loss": 1.2322, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.86459648155707e-05, |
|
"loss": 1.262, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8636399623125524e-05, |
|
"loss": 1.347, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8626803235562025e-05, |
|
"loss": 1.2308, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8617175687542844e-05, |
|
"loss": 1.3258, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8607517013843172e-05, |
|
"loss": 1.1856, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.859782724935064e-05, |
|
"loss": 1.3437, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8588106429065175e-05, |
|
"loss": 1.2403, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8578354588098877e-05, |
|
"loss": 1.2893, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8568571761675893e-05, |
|
"loss": 1.2987, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8558757985132296e-05, |
|
"loss": 1.2255, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8548913293915954e-05, |
|
"loss": 1.3965, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8539037723586387e-05, |
|
"loss": 1.2476, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8529131309814667e-05, |
|
"loss": 1.2654, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.851919408838327e-05, |
|
"loss": 1.2373, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.850922609518595e-05, |
|
"loss": 1.2854, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8499227366227605e-05, |
|
"loss": 1.3365, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8489197937624162e-05, |
|
"loss": 1.2573, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8479137845602426e-05, |
|
"loss": 1.2512, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8469047126499967e-05, |
|
"loss": 1.3111, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8458925816764983e-05, |
|
"loss": 1.2592, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8448773952956164e-05, |
|
"loss": 1.3152, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8438591571742556e-05, |
|
"loss": 1.3005, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8428378709903447e-05, |
|
"loss": 1.1925, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8418135404328218e-05, |
|
"loss": 1.3367, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8407861692016215e-05, |
|
"loss": 1.1738, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.839755761007661e-05, |
|
"loss": 1.3063, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8387223195728276e-05, |
|
"loss": 1.279, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.837685848629965e-05, |
|
"loss": 1.2847, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.836646351922859e-05, |
|
"loss": 1.3267, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8356038332062258e-05, |
|
"loss": 1.3021, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8345582962456955e-05, |
|
"loss": 1.2832, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.833509744817802e-05, |
|
"loss": 1.2527, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8324581827099665e-05, |
|
"loss": 1.2557, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8314036137204853e-05, |
|
"loss": 1.3615, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8303460416585155e-05, |
|
"loss": 1.3034, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.829285470344062e-05, |
|
"loss": 1.2433, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.828221903607962e-05, |
|
"loss": 1.3128, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.827155345291874e-05, |
|
"loss": 1.3725, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.826085799248261e-05, |
|
"loss": 1.2327, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.825013269340378e-05, |
|
"loss": 1.3472, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.823937759442258e-05, |
|
"loss": 1.3852, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8228592734386983e-05, |
|
"loss": 1.3841, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.821777815225245e-05, |
|
"loss": 1.3298, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8206933887081817e-05, |
|
"loss": 1.2682, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8196059978045117e-05, |
|
"loss": 1.256, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8185156464419467e-05, |
|
"loss": 1.2935, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.817422338558892e-05, |
|
"loss": 1.239, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8163260781044317e-05, |
|
"loss": 1.3453, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8152268690383146e-05, |
|
"loss": 1.288, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.814124715330941e-05, |
|
"loss": 1.2275, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8130196209633455e-05, |
|
"loss": 1.3109, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8119115899271863e-05, |
|
"loss": 1.4089, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8108006262247282e-05, |
|
"loss": 1.3837, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.809686733868829e-05, |
|
"loss": 1.2909, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.808569916882925e-05, |
|
"loss": 1.2858, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8074501793010166e-05, |
|
"loss": 1.4052, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.806327525167653e-05, |
|
"loss": 1.2526, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.805201958537919e-05, |
|
"loss": 1.2896, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8040734834774186e-05, |
|
"loss": 1.3058, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8029421040622614e-05, |
|
"loss": 1.27, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8018078243790484e-05, |
|
"loss": 1.3524, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8006706485248558e-05, |
|
"loss": 1.3095, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.799530580607221e-05, |
|
"loss": 1.2465, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7983876247441282e-05, |
|
"loss": 1.2959, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7972417850639916e-05, |
|
"loss": 1.3024, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.796093065705644e-05, |
|
"loss": 1.3681, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.794941470818318e-05, |
|
"loss": 1.4034, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7937870045616334e-05, |
|
"loss": 1.3187, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7926296711055818e-05, |
|
"loss": 1.2311, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7914694746305107e-05, |
|
"loss": 1.3033, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.79030641932711e-05, |
|
"loss": 1.2861, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.789140509396394e-05, |
|
"loss": 1.335, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7879717490496896e-05, |
|
"loss": 1.2853, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7868001425086193e-05, |
|
"loss": 1.261, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.785625694005086e-05, |
|
"loss": 1.3158, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7844484077812575e-05, |
|
"loss": 1.2731, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7832682880895526e-05, |
|
"loss": 1.2974, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.782085339192624e-05, |
|
"loss": 1.2916, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7808995653633433e-05, |
|
"loss": 1.2562, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.779710970884787e-05, |
|
"loss": 1.2349, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.778519560050219e-05, |
|
"loss": 1.3608, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7773253371630762e-05, |
|
"loss": 1.3262, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7761283065369533e-05, |
|
"loss": 1.2279, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.774928472495586e-05, |
|
"loss": 1.3138, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7737258393728363e-05, |
|
"loss": 1.273, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7725204115126772e-05, |
|
"loss": 1.2071, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7713121932691755e-05, |
|
"loss": 1.2668, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7701011890064778e-05, |
|
"loss": 1.1902, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.768887403098793e-05, |
|
"loss": 1.3153, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7676708399303784e-05, |
|
"loss": 1.2884, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.766451503895523e-05, |
|
"loss": 1.2719, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7652293993985304e-05, |
|
"loss": 1.2502, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.764004530853705e-05, |
|
"loss": 1.2796, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.762776902685335e-05, |
|
"loss": 1.2544, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.761546519327676e-05, |
|
"loss": 1.2752, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7603133852249357e-05, |
|
"loss": 1.2869, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7590775048312577e-05, |
|
"loss": 1.309, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.757838882610706e-05, |
|
"loss": 1.2327, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7565975230372473e-05, |
|
"loss": 1.2557, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7553534305947352e-05, |
|
"loss": 1.2468, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7541066097768965e-05, |
|
"loss": 1.2354, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7528570650873115e-05, |
|
"loss": 1.2976, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7516048010393993e-05, |
|
"loss": 1.304, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7503498221564026e-05, |
|
"loss": 1.2967, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.749092132971369e-05, |
|
"loss": 1.1766, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7478317380271367e-05, |
|
"loss": 1.2731, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.746568641876316e-05, |
|
"loss": 1.2995, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7453028490812764e-05, |
|
"loss": 1.3078, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.744034364214125e-05, |
|
"loss": 1.2704, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.742763191856695e-05, |
|
"loss": 1.3429, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7414893366005263e-05, |
|
"loss": 1.2426, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7402128030468487e-05, |
|
"loss": 1.2139, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.738933595806567e-05, |
|
"loss": 1.2751, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.737651719500244e-05, |
|
"loss": 1.2601, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7363671787580818e-05, |
|
"loss": 1.2236, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.735079978219908e-05, |
|
"loss": 1.2135, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.733790122535157e-05, |
|
"loss": 1.2746, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7324976163628533e-05, |
|
"loss": 1.1513, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7312024643715958e-05, |
|
"loss": 1.2476, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7299046712395394e-05, |
|
"loss": 1.2501, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.72860424165438e-05, |
|
"loss": 1.2874, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7273011803133356e-05, |
|
"loss": 1.2876, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.725995491923131e-05, |
|
"loss": 1.2822, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7246871811999793e-05, |
|
"loss": 1.2649, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.723376252869566e-05, |
|
"loss": 1.2315, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7220627116670314e-05, |
|
"loss": 1.2017, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7207465623369535e-05, |
|
"loss": 1.2219, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.719427809633332e-05, |
|
"loss": 1.319, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.718106458319568e-05, |
|
"loss": 1.3039, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7167825131684516e-05, |
|
"loss": 1.351, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7154559789621394e-05, |
|
"loss": 1.3211, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7141268604921414e-05, |
|
"loss": 1.2843, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.712795162559302e-05, |
|
"loss": 1.2754, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.711460889973782e-05, |
|
"loss": 1.2443, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7101240475550427e-05, |
|
"loss": 1.3022, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7087846401318273e-05, |
|
"loss": 1.2614, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7074426725421444e-05, |
|
"loss": 1.2902, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.706098149633249e-05, |
|
"loss": 1.2225, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7047510762616275e-05, |
|
"loss": 1.2784, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7034014572929777e-05, |
|
"loss": 1.3031, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.702049297602193e-05, |
|
"loss": 1.2359, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7006946020733426e-05, |
|
"loss": 1.261, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6993373755996572e-05, |
|
"loss": 1.3043, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6979776230835076e-05, |
|
"loss": 1.2774, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.69661534943639e-05, |
|
"loss": 1.3224, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.695250559578907e-05, |
|
"loss": 1.3853, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6938832584407494e-05, |
|
"loss": 1.3034, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6925134509606786e-05, |
|
"loss": 1.1885, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6911411420865097e-05, |
|
"loss": 1.309, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.689766336775093e-05, |
|
"loss": 1.3596, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6883890399922952e-05, |
|
"loss": 1.2906, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6870092567129837e-05, |
|
"loss": 1.2529, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6856269919210056e-05, |
|
"loss": 1.2574, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6842422506091727e-05, |
|
"loss": 1.2816, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6828550377792415e-05, |
|
"loss": 1.2797, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6814653584418966e-05, |
|
"loss": 1.2126, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6800732176167296e-05, |
|
"loss": 1.1748, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6786786203322255e-05, |
|
"loss": 1.2303, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6772815716257414e-05, |
|
"loss": 1.3059, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.675882076543488e-05, |
|
"loss": 1.235, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6744801401405138e-05, |
|
"loss": 1.1984, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6730757674806858e-05, |
|
"loss": 1.1858, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6716689636366687e-05, |
|
"loss": 1.23, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6702597336899105e-05, |
|
"loss": 1.2098, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6688480827306224e-05, |
|
"loss": 1.2429, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.66743401585776e-05, |
|
"loss": 1.2814, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.666017538179005e-05, |
|
"loss": 1.1957, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.664598654810748e-05, |
|
"loss": 1.2829, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.663177370878068e-05, |
|
"loss": 1.301, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6617536915147156e-05, |
|
"loss": 1.2611, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6603276218630932e-05, |
|
"loss": 1.3957, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6588991670742384e-05, |
|
"loss": 1.2579, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.657468332307803e-05, |
|
"loss": 1.2808, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.656035122732035e-05, |
|
"loss": 1.33, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.654599543523761e-05, |
|
"loss": 1.2839, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.653161599868367e-05, |
|
"loss": 1.2799, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6517212969597793e-05, |
|
"loss": 1.2972, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6502786400004454e-05, |
|
"loss": 1.3233, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6488336342013166e-05, |
|
"loss": 1.2354, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.647386284781828e-05, |
|
"loss": 1.2814, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6459365969698796e-05, |
|
"loss": 1.259, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.644484576001818e-05, |
|
"loss": 1.305, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6430302271224178e-05, |
|
"loss": 1.268, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.641573555584862e-05, |
|
"loss": 1.313, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6401145666507225e-05, |
|
"loss": 1.2821, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6386532655899418e-05, |
|
"loss": 1.3113, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6371896576808147e-05, |
|
"loss": 1.2607, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6357237482099682e-05, |
|
"loss": 1.2905, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.634255542472342e-05, |
|
"loss": 1.2955, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.632785045771171e-05, |
|
"loss": 1.2592, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.631312263417964e-05, |
|
"loss": 1.2158, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6298372007324873e-05, |
|
"loss": 1.2978, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.628359863042742e-05, |
|
"loss": 1.3465, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6268802556849486e-05, |
|
"loss": 1.2226, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6253983840035243e-05, |
|
"loss": 1.1759, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.623914253351065e-05, |
|
"loss": 1.3524, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6224278690883277e-05, |
|
"loss": 1.2554, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.620939236584208e-05, |
|
"loss": 1.3068, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6194483612157232e-05, |
|
"loss": 1.1915, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.617955248367991e-05, |
|
"loss": 1.2537, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6164599034342122e-05, |
|
"loss": 1.3189, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6149623318156493e-05, |
|
"loss": 1.2137, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6134625389216076e-05, |
|
"loss": 1.2993, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6119605301694156e-05, |
|
"loss": 1.3054, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6104563109844063e-05, |
|
"loss": 1.3151, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.608949886799897e-05, |
|
"loss": 1.3125, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6074412630571685e-05, |
|
"loss": 1.2791, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6059304452054475e-05, |
|
"loss": 1.2114, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.604417438701885e-05, |
|
"loss": 1.3071, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6029022490115383e-05, |
|
"loss": 1.2292, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6013848816073507e-05, |
|
"loss": 1.2529, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5998653419701306e-05, |
|
"loss": 1.2233, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5983436355885333e-05, |
|
"loss": 1.2568, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5968197679590406e-05, |
|
"loss": 1.2929, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5952937445859396e-05, |
|
"loss": 1.2198, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.593765570981306e-05, |
|
"loss": 1.2318, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5922352526649803e-05, |
|
"loss": 1.1666, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.590702795164551e-05, |
|
"loss": 1.236, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.589168204015334e-05, |
|
"loss": 1.3213, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5876314847603506e-05, |
|
"loss": 1.2291, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5860926429503093e-05, |
|
"loss": 1.2891, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.584551684143586e-05, |
|
"loss": 1.1453, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5830086139062025e-05, |
|
"loss": 1.2638, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5814634378118085e-05, |
|
"loss": 1.2881, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.579916161441658e-05, |
|
"loss": 1.2684, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5783667903845943e-05, |
|
"loss": 1.2407, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.576815330237024e-05, |
|
"loss": 1.2362, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5752617866029005e-05, |
|
"loss": 1.3086, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5737061650937033e-05, |
|
"loss": 1.3395, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5721484713284173e-05, |
|
"loss": 1.3664, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5705887109335124e-05, |
|
"loss": 1.2502, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.569026889542923e-05, |
|
"loss": 1.3197, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5674630127980278e-05, |
|
"loss": 1.2253, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5658970863476298e-05, |
|
"loss": 1.2318, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.564329115847936e-05, |
|
"loss": 1.2364, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.562759106962536e-05, |
|
"loss": 1.3453, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5611870653623826e-05, |
|
"loss": 1.1937, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5596129967257704e-05, |
|
"loss": 1.2392, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.558036906738316e-05, |
|
"loss": 1.1775, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5564588010929375e-05, |
|
"loss": 1.2464, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.554878685489834e-05, |
|
"loss": 1.1942, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5532965656364627e-05, |
|
"loss": 1.2793, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.551712447247523e-05, |
|
"loss": 1.1996, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5501263360449303e-05, |
|
"loss": 1.2649, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.548538237757801e-05, |
|
"loss": 1.2232, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5469481581224274e-05, |
|
"loss": 1.238, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5453561028822573e-05, |
|
"loss": 1.2528, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5437620777878776e-05, |
|
"loss": 1.2723, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5421660885969875e-05, |
|
"loss": 1.0972, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5405681410743825e-05, |
|
"loss": 1.2458, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5389682409919308e-05, |
|
"loss": 1.2331, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5373663941285536e-05, |
|
"loss": 1.2441, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5357626062702035e-05, |
|
"loss": 1.3371, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5341568832098447e-05, |
|
"loss": 1.2398, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5325492307474314e-05, |
|
"loss": 1.2584, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.530939654689887e-05, |
|
"loss": 1.2923, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.529328160851082e-05, |
|
"loss": 1.2247, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5277147550518156e-05, |
|
"loss": 1.2918, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5260994431197916e-05, |
|
"loss": 1.289, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5244822308896001e-05, |
|
"loss": 1.216, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.522863124202694e-05, |
|
"loss": 1.1868, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5212421289073703e-05, |
|
"loss": 1.2235, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5196192508587469e-05, |
|
"loss": 1.2868, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5179944959187423e-05, |
|
"loss": 1.2517, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5163678699560549e-05, |
|
"loss": 1.2021, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5147393788461411e-05, |
|
"loss": 1.1528, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5131090284711942e-05, |
|
"loss": 1.2786, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5114768247201235e-05, |
|
"loss": 1.2786, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5098427734885324e-05, |
|
"loss": 1.1867, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.508206880678698e-05, |
|
"loss": 1.2849, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.506569152199549e-05, |
|
"loss": 1.2415, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5049295939666445e-05, |
|
"loss": 1.2351, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.503288211902153e-05, |
|
"loss": 1.3409, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5016450119348309e-05, |
|
"loss": 1.2582, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.2702, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4983531820395287e-05, |
|
"loss": 1.2479, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4967045640018074e-05, |
|
"loss": 1.1806, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4950541518417292e-05, |
|
"loss": 1.1697, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4934019515206675e-05, |
|
"loss": 1.298, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4917479690064545e-05, |
|
"loss": 1.3408, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4900922102733596e-05, |
|
"loss": 1.2368, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4884346813020685e-05, |
|
"loss": 1.2021, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4867753880796607e-05, |
|
"loss": 1.2987, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4851143365995886e-05, |
|
"loss": 1.2617, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4834515328616555e-05, |
|
"loss": 1.2645, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4817869828719933e-05, |
|
"loss": 1.2312, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4801206926430425e-05, |
|
"loss": 1.266, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4784526681935282e-05, |
|
"loss": 1.2838, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4767829155484407e-05, |
|
"loss": 1.3501, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4751114407390122e-05, |
|
"loss": 1.2235, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4734382498026946e-05, |
|
"loss": 1.1788, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4717633487831407e-05, |
|
"loss": 1.2717, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.470086743730178e-05, |
|
"loss": 1.2021, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4684084406997903e-05, |
|
"loss": 1.3221, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.466728445754094e-05, |
|
"loss": 1.2328, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4650467649613168e-05, |
|
"loss": 1.2738, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4633634043957767e-05, |
|
"loss": 1.2546, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4616783701378573e-05, |
|
"loss": 1.226, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4599916682739898e-05, |
|
"loss": 1.2956, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4583033048966273e-05, |
|
"loss": 1.2365, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.456613286104225e-05, |
|
"loss": 1.2986, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4549216180012175e-05, |
|
"loss": 1.1944, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4532283066979967e-05, |
|
"loss": 1.3352, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4515333583108896e-05, |
|
"loss": 1.2509, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4498367789621369e-05, |
|
"loss": 1.2776, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4481385747798705e-05, |
|
"loss": 1.2736, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4464387518980905e-05, |
|
"loss": 1.2825, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.444737316456645e-05, |
|
"loss": 1.2081, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4430342746012049e-05, |
|
"loss": 1.2581, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.441329632483246e-05, |
|
"loss": 1.2339, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4396233962600223e-05, |
|
"loss": 1.2929, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4379155720945464e-05, |
|
"loss": 1.3022, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4362061661555675e-05, |
|
"loss": 1.2035, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.434495184617547e-05, |
|
"loss": 1.1836, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4327826336606383e-05, |
|
"loss": 1.2184, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4310685194706626e-05, |
|
"loss": 1.2189, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.429352848239089e-05, |
|
"loss": 1.2939, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4276356261630096e-05, |
|
"loss": 1.3086, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4259168594451185e-05, |
|
"loss": 1.2081, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4241965542936894e-05, |
|
"loss": 1.1539, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4224747169225527e-05, |
|
"loss": 1.3016, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4207513535510736e-05, |
|
"loss": 1.2983, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4190264704041284e-05, |
|
"loss": 1.2656, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4173000737120839e-05, |
|
"loss": 1.2733, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.415572169710773e-05, |
|
"loss": 1.2133, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4138427646414746e-05, |
|
"loss": 1.3235, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4121118647508872e-05, |
|
"loss": 1.3324, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4103794762911106e-05, |
|
"loss": 1.2721, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4086456055196208e-05, |
|
"loss": 1.2735, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.406910258699248e-05, |
|
"loss": 1.2134, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4051734420981537e-05, |
|
"loss": 1.2503, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4034351619898088e-05, |
|
"loss": 1.3149, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4016954246529697e-05, |
|
"loss": 1.1784, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3999542363716571e-05, |
|
"loss": 1.2769, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3982116034351325e-05, |
|
"loss": 1.2285, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3964675321378756e-05, |
|
"loss": 1.266, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3947220287795611e-05, |
|
"loss": 1.2178, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3929750996650363e-05, |
|
"loss": 1.1795, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3912267511042994e-05, |
|
"loss": 1.272, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3894769894124748e-05, |
|
"loss": 1.2893, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3877258209097913e-05, |
|
"loss": 1.2912, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3859732519215596e-05, |
|
"loss": 1.2324, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3842192887781487e-05, |
|
"loss": 1.3233, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3824639378149634e-05, |
|
"loss": 1.2629, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.380707205372421e-05, |
|
"loss": 1.2688, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3789490977959297e-05, |
|
"loss": 1.2448, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3771896214358642e-05, |
|
"loss": 1.203, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3754287826475428e-05, |
|
"loss": 1.2845, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3736665877912063e-05, |
|
"loss": 1.2954, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3719030432319925e-05, |
|
"loss": 1.1856, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3701381553399147e-05, |
|
"loss": 1.1456, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3683719304898382e-05, |
|
"loss": 1.2641, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3666043750614584e-05, |
|
"loss": 1.2101, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3648354954392758e-05, |
|
"loss": 1.2427, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.363065298012574e-05, |
|
"loss": 1.162, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3612937891753974e-05, |
|
"loss": 1.15, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3595209753265266e-05, |
|
"loss": 1.1676, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3577468628694566e-05, |
|
"loss": 1.2021, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3559714582123713e-05, |
|
"loss": 1.2588, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3541947677681246e-05, |
|
"loss": 1.2986, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3524167979542129e-05, |
|
"loss": 1.2277, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3506375551927546e-05, |
|
"loss": 1.1577, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3488570459104656e-05, |
|
"loss": 1.3042, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3470752765386368e-05, |
|
"loss": 1.2314, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3452922535131108e-05, |
|
"loss": 1.2318, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3435079832742577e-05, |
|
"loss": 1.2922, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3417224722669534e-05, |
|
"loss": 1.2156, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.339935726940555e-05, |
|
"loss": 1.2158, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.338147753748879e-05, |
|
"loss": 1.1798, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3363585591501751e-05, |
|
"loss": 1.2187, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3345681496071068e-05, |
|
"loss": 1.2265, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3327765315867253e-05, |
|
"loss": 1.2686, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.330983711560446e-05, |
|
"loss": 1.2982, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3291896960040271e-05, |
|
"loss": 1.2733, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3273944913975452e-05, |
|
"loss": 1.2086, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.325598104225371e-05, |
|
"loss": 1.2497, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3238005409761468e-05, |
|
"loss": 1.3052, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3220018081427637e-05, |
|
"loss": 1.2156, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3202019122223373e-05, |
|
"loss": 1.1905, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.318400859716183e-05, |
|
"loss": 1.2648, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3165986571297953e-05, |
|
"loss": 1.2634, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3147953109728224e-05, |
|
"loss": 1.2507, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3129908277590433e-05, |
|
"loss": 1.2471, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3111852140063435e-05, |
|
"loss": 1.2626, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3093784762366923e-05, |
|
"loss": 1.1367, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3075706209761201e-05, |
|
"loss": 1.1657, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.305761654754692e-05, |
|
"loss": 1.1931, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3039515841064867e-05, |
|
"loss": 1.3001, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3021404155695728e-05, |
|
"loss": 1.3272, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3003281556859837e-05, |
|
"loss": 1.2923, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2985148110016947e-05, |
|
"loss": 1.2214, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2967003880666001e-05, |
|
"loss": 1.2456, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2948848934344885e-05, |
|
"loss": 1.1679, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2930683336630199e-05, |
|
"loss": 1.3211, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2912507153137009e-05, |
|
"loss": 1.1969, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2894320449518624e-05, |
|
"loss": 1.3251, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2876123291466351e-05, |
|
"loss": 1.3732, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2857915744709255e-05, |
|
"loss": 1.1665, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.283969787501393e-05, |
|
"loss": 1.3549, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.282146974818426e-05, |
|
"loss": 1.2192, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.280323143006117e-05, |
|
"loss": 1.1423, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2784982986522399e-05, |
|
"loss": 1.2578, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2766724483482257e-05, |
|
"loss": 1.2487, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2748455986891406e-05, |
|
"loss": 1.2433, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2730177562736583e-05, |
|
"loss": 1.3278, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2711889277040392e-05, |
|
"loss": 1.2566, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2693591195861065e-05, |
|
"loss": 1.3571, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 1.2008, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2656965911462573e-05, |
|
"loss": 1.2369, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2638638840535808e-05, |
|
"loss": 1.1752, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2620302238710238e-05, |
|
"loss": 1.2576, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.260195617221861e-05, |
|
"loss": 1.2434, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2583600707327854e-05, |
|
"loss": 1.258, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2565235910338852e-05, |
|
"loss": 1.2171, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2546861847586194e-05, |
|
"loss": 1.199, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2528478585437935e-05, |
|
"loss": 1.2697, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2510086190295358e-05, |
|
"loss": 1.2465, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2491684728592744e-05, |
|
"loss": 1.231, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2473274266797111e-05, |
|
"loss": 1.2877, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2454854871407993e-05, |
|
"loss": 1.2193, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2436426608957191e-05, |
|
"loss": 1.2549, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2417989546008533e-05, |
|
"loss": 1.2219, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.239954374915764e-05, |
|
"loss": 1.2328, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2381089285031666e-05, |
|
"loss": 1.2262, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2362626220289094e-05, |
|
"loss": 1.2424, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.234415462161945e-05, |
|
"loss": 1.2034, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2325674555743106e-05, |
|
"loss": 1.2005, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2307186089410999e-05, |
|
"loss": 1.2084, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2288689289404424e-05, |
|
"loss": 1.2174, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2270184222534767e-05, |
|
"loss": 1.2185, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2251670955643277e-05, |
|
"loss": 1.1254, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2233149555600827e-05, |
|
"loss": 1.189, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2214620089307662e-05, |
|
"loss": 1.1525, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2196082623693169e-05, |
|
"loss": 1.2937, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2177537225715615e-05, |
|
"loss": 1.2662, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2158983962361938e-05, |
|
"loss": 1.2588, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2140422900647473e-05, |
|
"loss": 1.233, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2121854107615726e-05, |
|
"loss": 1.2085, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2103277650338128e-05, |
|
"loss": 1.1888, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.20846935959138e-05, |
|
"loss": 1.2574, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2066102011469295e-05, |
|
"loss": 1.2228, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2047502964158373e-05, |
|
"loss": 1.32, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2028896521161746e-05, |
|
"loss": 1.2702, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2010282749686839e-05, |
|
"loss": 1.2662, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.199166171696755e-05, |
|
"loss": 1.1495, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1973033490264e-05, |
|
"loss": 1.1424, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1954398136862308e-05, |
|
"loss": 1.218, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1935755724074318e-05, |
|
"loss": 1.2007, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1917106319237386e-05, |
|
"loss": 1.338, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1898449989714116e-05, |
|
"loss": 1.2662, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1879786802892128e-05, |
|
"loss": 1.1688, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1861116826183807e-05, |
|
"loss": 1.2257, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.184244012702607e-05, |
|
"loss": 1.2185, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.182375677288011e-05, |
|
"loss": 1.2784, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1805066831231165e-05, |
|
"loss": 1.2824, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1786370369588257e-05, |
|
"loss": 1.1608, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1767667455483964e-05, |
|
"loss": 1.0812, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1748958156474176e-05, |
|
"loss": 1.1231, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1730242540137835e-05, |
|
"loss": 1.2081, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1711520674076711e-05, |
|
"loss": 1.2568, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.169279262591514e-05, |
|
"loss": 1.1896, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1674058463299798e-05, |
|
"loss": 1.2384, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1655318253899436e-05, |
|
"loss": 1.2358, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1636572065404657e-05, |
|
"loss": 1.2163, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.161781996552765e-05, |
|
"loss": 1.1862, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1599062022001964e-05, |
|
"loss": 1.2108, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1580298302582249e-05, |
|
"loss": 1.1687, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1561528875044026e-05, |
|
"loss": 1.2398, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1542753807183429e-05, |
|
"loss": 1.2229, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1523973166816967e-05, |
|
"loss": 1.1675, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.150518702178127e-05, |
|
"loss": 1.1555, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1486395439932869e-05, |
|
"loss": 1.2433, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1467598489147911e-05, |
|
"loss": 1.2203, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1448796237321949e-05, |
|
"loss": 1.1818, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1429988752369681e-05, |
|
"loss": 1.2378, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1411176102224706e-05, |
|
"loss": 1.1441, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1392358354839286e-05, |
|
"loss": 1.2706, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1373535578184083e-05, |
|
"loss": 1.2184, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1354707840247936e-05, |
|
"loss": 1.1776, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1335875209037598e-05, |
|
"loss": 1.169, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.13170377525775e-05, |
|
"loss": 1.1499, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1298195538909498e-05, |
|
"loss": 1.2536, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1279348636092634e-05, |
|
"loss": 1.1911, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1260497112202895e-05, |
|
"loss": 1.2602, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1241641035332944e-05, |
|
"loss": 1.2997, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1222780473591902e-05, |
|
"loss": 1.1709, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.120391549510508e-05, |
|
"loss": 1.2243, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1185046168013752e-05, |
|
"loss": 1.177, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1166172560474894e-05, |
|
"loss": 1.274, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1147294740660943e-05, |
|
"loss": 1.2195, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1128412776759556e-05, |
|
"loss": 1.2317, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.110952673697335e-05, |
|
"loss": 1.2189, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1090636689519673e-05, |
|
"loss": 1.3025, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1071742702630346e-05, |
|
"loss": 1.2653, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1052844844551416e-05, |
|
"loss": 1.0334, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1033943183542914e-05, |
|
"loss": 1.2577, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.101503778787861e-05, |
|
"loss": 1.283, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.0996128725845764e-05, |
|
"loss": 1.2929, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.097721606574488e-05, |
|
"loss": 1.1483, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0958299875889451e-05, |
|
"loss": 1.202, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0939380224605727e-05, |
|
"loss": 1.1862, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.092045718023246e-05, |
|
"loss": 1.2189, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0901530811120655e-05, |
|
"loss": 1.2134, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0882601185633326e-05, |
|
"loss": 1.2334, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0863668372145255e-05, |
|
"loss": 1.3247, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0844732439042736e-05, |
|
"loss": 1.1771, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0825793454723325e-05, |
|
"loss": 1.1352, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.080685148759561e-05, |
|
"loss": 1.2442, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0787906606078942e-05, |
|
"loss": 1.1536, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.076895887860321e-05, |
|
"loss": 1.2705, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0750008373608571e-05, |
|
"loss": 1.2136, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.073105515954523e-05, |
|
"loss": 1.2627, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0712099304873161e-05, |
|
"loss": 1.3146, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0693140878061886e-05, |
|
"loss": 1.2138, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0674179947590213e-05, |
|
"loss": 1.272, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0655216581945997e-05, |
|
"loss": 1.294, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0636250849625886e-05, |
|
"loss": 1.2523, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0617282819135077e-05, |
|
"loss": 1.2058, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0598312558987072e-05, |
|
"loss": 1.2699, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0579340137703422e-05, |
|
"loss": 1.2246, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0560365623813485e-05, |
|
"loss": 1.3123, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0541389085854177e-05, |
|
"loss": 1.2159, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0522410592369727e-05, |
|
"loss": 1.2068, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.050343021191143e-05, |
|
"loss": 1.2428, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.048444801303739e-05, |
|
"loss": 1.1799, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.046546406431228e-05, |
|
"loss": 1.172, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0446478434307103e-05, |
|
"loss": 1.2055, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0427491191598926e-05, |
|
"loss": 1.2507, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0408502404770641e-05, |
|
"loss": 1.2187, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0389512142410723e-05, |
|
"loss": 1.2031, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.037052047311297e-05, |
|
"loss": 1.1799, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.035152746547627e-05, |
|
"loss": 1.0826, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0332533188104332e-05, |
|
"loss": 1.2806, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.031353770960547e-05, |
|
"loss": 1.248, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0294541098592323e-05, |
|
"loss": 1.2178, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0275543423681622e-05, |
|
"loss": 1.268, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0256544753493945e-05, |
|
"loss": 1.1488, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0237545156653461e-05, |
|
"loss": 1.2169, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0218544701787692e-05, |
|
"loss": 1.1846, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0199543457527247e-05, |
|
"loss": 1.2055, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0180541492505605e-05, |
|
"loss": 1.2382, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0161538875358833e-05, |
|
"loss": 1.2319, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.014253567472536e-05, |
|
"loss": 1.2129, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0123531959245721e-05, |
|
"loss": 1.2282, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0104527797562308e-05, |
|
"loss": 1.202, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0085523258319131e-05, |
|
"loss": 1.1793, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.006651841016156e-05, |
|
"loss": 1.0981, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0047513321736077e-05, |
|
"loss": 1.2196, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0028508061690044e-05, |
|
"loss": 1.2868, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0009502698671427e-05, |
|
"loss": 1.2412, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.990497301328577e-06, |
|
"loss": 1.1552, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.971491938309963e-06, |
|
"loss": 1.2188, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.952486678263925e-06, |
|
"loss": 1.2852, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.933481589838442e-06, |
|
"loss": 1.2148, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.914476741680872e-06, |
|
"loss": 1.1657, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.895472202437695e-06, |
|
"loss": 1.2864, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.876468040754282e-06, |
|
"loss": 1.2121, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.857464325274643e-06, |
|
"loss": 1.2146, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.83846112464117e-06, |
|
"loss": 1.2107, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.819458507494395e-06, |
|
"loss": 1.2103, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.800456542472754e-06, |
|
"loss": 1.3071, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.781455298212313e-06, |
|
"loss": 1.0941, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.762454843346544e-06, |
|
"loss": 1.2122, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.743455246506059e-06, |
|
"loss": 1.2052, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.724456576318383e-06, |
|
"loss": 1.3103, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.70545890140768e-06, |
|
"loss": 1.0848, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.686462290394533e-06, |
|
"loss": 1.2722, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.667466811895668e-06, |
|
"loss": 1.2338, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.648472534523735e-06, |
|
"loss": 1.2152, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.629479526887032e-06, |
|
"loss": 1.1857, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.61048785758928e-06, |
|
"loss": 1.2306, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.59149759522936e-06, |
|
"loss": 1.1871, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.572508808401078e-06, |
|
"loss": 1.1799, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.553521565692902e-06, |
|
"loss": 1.202, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.534535935687723e-06, |
|
"loss": 1.2059, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.515551986962614e-06, |
|
"loss": 1.2164, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.496569788088575e-06, |
|
"loss": 1.2132, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.477589407630275e-06, |
|
"loss": 1.2383, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.458610914145826e-06, |
|
"loss": 1.212, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.439634376186519e-06, |
|
"loss": 1.3587, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.42065986229658e-06, |
|
"loss": 1.2435, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.401687441012928e-06, |
|
"loss": 1.2469, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.382717180864925e-06, |
|
"loss": 1.1605, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.363749150374115e-06, |
|
"loss": 1.1545, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.344783418054005e-06, |
|
"loss": 1.1999, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.32582005240979e-06, |
|
"loss": 1.2314, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.306859121938116e-06, |
|
"loss": 1.2782, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.287900695126844e-06, |
|
"loss": 1.1624, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.268944840454773e-06, |
|
"loss": 1.1685, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.249991626391429e-06, |
|
"loss": 1.2337, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.231041121396795e-06, |
|
"loss": 1.1369, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.212093393921061e-06, |
|
"loss": 1.1623, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.193148512404394e-06, |
|
"loss": 1.1005, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.174206545276678e-06, |
|
"loss": 1.1652, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.155267560957268e-06, |
|
"loss": 1.1361, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.136331627854745e-06, |
|
"loss": 1.2243, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.117398814366677e-06, |
|
"loss": 1.26, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.098469188879348e-06, |
|
"loss": 1.2799, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.079542819767544e-06, |
|
"loss": 1.1975, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.060619775394276e-06, |
|
"loss": 1.143, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.041700124110552e-06, |
|
"loss": 1.2919, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.022783934255126e-06, |
|
"loss": 1.1888, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.003871274154237e-06, |
|
"loss": 1.2185, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.984962212121391e-06, |
|
"loss": 1.2615, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.96605681645709e-06, |
|
"loss": 1.2452, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.947155155448588e-06, |
|
"loss": 1.2532, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.928257297369657e-06, |
|
"loss": 1.1422, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.909363310480329e-06, |
|
"loss": 1.2151, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.890473263026653e-06, |
|
"loss": 1.2735, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.87158722324045e-06, |
|
"loss": 1.1515, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.85270525933906e-06, |
|
"loss": 1.1761, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.833827439525109e-06, |
|
"loss": 1.1464, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.814953831986253e-06, |
|
"loss": 1.1732, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.796084504894923e-06, |
|
"loss": 1.1793, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.777219526408103e-06, |
|
"loss": 1.1665, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.758358964667061e-06, |
|
"loss": 1.2439, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.739502887797108e-06, |
|
"loss": 1.1755, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.720651363907364e-06, |
|
"loss": 1.3034, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.701804461090506e-06, |
|
"loss": 1.262, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.682962247422502e-06, |
|
"loss": 1.1896, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.664124790962407e-06, |
|
"loss": 1.2612, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.645292159752067e-06, |
|
"loss": 1.2589, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.626464421815919e-06, |
|
"loss": 1.2455, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.607641645160721e-06, |
|
"loss": 1.2321, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.588823897775296e-06, |
|
"loss": 1.1979, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.57001124763032e-06, |
|
"loss": 1.2727, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.551203762678056e-06, |
|
"loss": 1.1765, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.532401510852092e-06, |
|
"loss": 1.1837, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.513604560067135e-06, |
|
"loss": 1.2359, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.494812978218732e-06, |
|
"loss": 1.1295, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.476026833183036e-06, |
|
"loss": 1.2414, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.457246192816571e-06, |
|
"loss": 1.155, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.438471124955979e-06, |
|
"loss": 1.1973, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.419701697417753e-06, |
|
"loss": 1.2768, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.400937977998043e-06, |
|
"loss": 1.1379, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.382180034472353e-06, |
|
"loss": 1.2184, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.363427934595347e-06, |
|
"loss": 1.2427, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.344681746100569e-06, |
|
"loss": 1.2858, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.325941536700206e-06, |
|
"loss": 1.1733, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.30720737408486e-06, |
|
"loss": 1.2643, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.288479325923294e-06, |
|
"loss": 1.2195, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.269757459862169e-06, |
|
"loss": 1.1873, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.251041843525828e-06, |
|
"loss": 1.1861, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.232332544516038e-06, |
|
"loss": 1.2642, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.213629630411747e-06, |
|
"loss": 1.1918, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.194933168768842e-06, |
|
"loss": 1.1844, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.176243227119892e-06, |
|
"loss": 1.1905, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.157559872973932e-06, |
|
"loss": 1.1061, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.138883173816197e-06, |
|
"loss": 1.2447, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.120213197107877e-06, |
|
"loss": 1.1898, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.101550010285887e-06, |
|
"loss": 1.2071, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.082893680762619e-06, |
|
"loss": 1.2617, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.064244275925684e-06, |
|
"loss": 1.2059, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.045601863137694e-06, |
|
"loss": 1.3117, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.026966509736001e-06, |
|
"loss": 1.2692, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.008338283032452e-06, |
|
"loss": 1.2375, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.989717250313165e-06, |
|
"loss": 1.2351, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.971103478838256e-06, |
|
"loss": 1.2244, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.952497035841629e-06, |
|
"loss": 1.1868, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.93389798853071e-06, |
|
"loss": 1.3002, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.915306404086202e-06, |
|
"loss": 1.1335, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.896722349661872e-06, |
|
"loss": 1.2068, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.878145892384279e-06, |
|
"loss": 1.0868, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.859577099352528e-06, |
|
"loss": 1.1541, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.841016037638064e-06, |
|
"loss": 1.2877, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.822462774284389e-06, |
|
"loss": 1.1779, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.803917376306836e-06, |
|
"loss": 1.2062, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.785379910692343e-06, |
|
"loss": 1.2446, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.766850444399176e-06, |
|
"loss": 1.255, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.748329044356724e-06, |
|
"loss": 1.1769, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.72981577746524e-06, |
|
"loss": 1.2841, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.711310710595578e-06, |
|
"loss": 1.2994, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.692813910589001e-06, |
|
"loss": 1.1735, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.674325444256899e-06, |
|
"loss": 1.2689, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.655845378380551e-06, |
|
"loss": 1.2424, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.637373779710908e-06, |
|
"loss": 1.1596, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.618910714968336e-06, |
|
"loss": 1.1553, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.600456250842364e-06, |
|
"loss": 1.1199, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.582010453991468e-06, |
|
"loss": 1.2219, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.563573391042813e-06, |
|
"loss": 1.228, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.545145128592009e-06, |
|
"loss": 1.1553, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.526725733202894e-06, |
|
"loss": 1.1738, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.508315271407261e-06, |
|
"loss": 1.2284, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.489913809704643e-06, |
|
"loss": 1.215, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.471521414562071e-06, |
|
"loss": 1.1927, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.453138152413808e-06, |
|
"loss": 1.2281, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.434764089661151e-06, |
|
"loss": 1.2783, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.4163992926721505e-06, |
|
"loss": 1.1647, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.398043827781394e-06, |
|
"loss": 1.1822, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.379697761289763e-06, |
|
"loss": 1.2409, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.361361159464195e-06, |
|
"loss": 1.0965, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.34303408853743e-06, |
|
"loss": 1.1345, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.324716614707794e-06, |
|
"loss": 1.2526, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.306408804138937e-06, |
|
"loss": 1.2304, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.288110722959608e-06, |
|
"loss": 1.2212, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.269822437263423e-06, |
|
"loss": 1.1559, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.2515440131085975e-06, |
|
"loss": 1.2019, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.233275516517742e-06, |
|
"loss": 1.2414, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.215017013477607e-06, |
|
"loss": 1.1949, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.1967685699388345e-06, |
|
"loss": 1.2062, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.1785302518157415e-06, |
|
"loss": 1.1153, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.160302124986073e-06, |
|
"loss": 1.0821, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.1420842552907465e-06, |
|
"loss": 1.1893, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.123876708533655e-06, |
|
"loss": 1.1631, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.105679550481379e-06, |
|
"loss": 1.3004, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.087492846862992e-06, |
|
"loss": 1.1615, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.0693166633698055e-06, |
|
"loss": 1.1586, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.0511510656551175e-06, |
|
"loss": 1.2299, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.0329961193340014e-06, |
|
"loss": 1.2743, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.014851889983058e-06, |
|
"loss": 1.1953, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.9967184431401665e-06, |
|
"loss": 1.1268, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.978595844304272e-06, |
|
"loss": 1.2644, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.9604841589351346e-06, |
|
"loss": 1.1159, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.942383452453083e-06, |
|
"loss": 1.2025, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.924293790238803e-06, |
|
"loss": 1.2091, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.906215237633079e-06, |
|
"loss": 1.2076, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.888147859936569e-06, |
|
"loss": 1.1621, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.870091722409573e-06, |
|
"loss": 1.2223, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.852046890271778e-06, |
|
"loss": 1.2369, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.834013428702049e-06, |
|
"loss": 1.2712, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.815991402838176e-06, |
|
"loss": 1.1514, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.797980877776631e-06, |
|
"loss": 1.1875, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.779981918572364e-06, |
|
"loss": 1.1754, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.761994590238536e-06, |
|
"loss": 1.1287, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.744018957746295e-06, |
|
"loss": 1.1479, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.72605508602455e-06, |
|
"loss": 1.2319, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.7081030399597306e-06, |
|
"loss": 1.0982, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.690162884395541e-06, |
|
"loss": 1.1652, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.6722346841327515e-06, |
|
"loss": 1.1272, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.654318503928934e-06, |
|
"loss": 1.2393, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.636414408498249e-06, |
|
"loss": 1.2529, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.6185224625112155e-06, |
|
"loss": 1.1798, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.600642730594451e-06, |
|
"loss": 1.2632, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.582775277330467e-06, |
|
"loss": 1.1552, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.564920167257427e-06, |
|
"loss": 1.1897, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.5470774648688965e-06, |
|
"loss": 1.1429, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.529247234613634e-06, |
|
"loss": 1.2584, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.511429540895349e-06, |
|
"loss": 1.2007, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.4936244480724575e-06, |
|
"loss": 1.2732, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.475832020457875e-06, |
|
"loss": 1.2674, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.458052322318758e-06, |
|
"loss": 1.167, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.440285417876288e-06, |
|
"loss": 1.2319, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.42253137130544e-06, |
|
"loss": 1.2682, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.4047902467347355e-06, |
|
"loss": 1.2427, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.387062108246026e-06, |
|
"loss": 1.2025, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.369347019874264e-06, |
|
"loss": 1.1095, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.3516450456072465e-06, |
|
"loss": 1.1348, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.333956249385418e-06, |
|
"loss": 1.1869, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.316280695101621e-06, |
|
"loss": 1.0736, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.298618446600856e-06, |
|
"loss": 1.2294, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.28096956768008e-06, |
|
"loss": 1.1407, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.26333412208794e-06, |
|
"loss": 1.1574, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.245712173524572e-06, |
|
"loss": 1.2368, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.2281037856413625e-06, |
|
"loss": 1.232, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.210509022040705e-06, |
|
"loss": 1.1226, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.1929279462757905e-06, |
|
"loss": 1.1826, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.175360621850371e-06, |
|
"loss": 1.1734, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.157807112218517e-06, |
|
"loss": 1.1807, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.140267480784405e-06, |
|
"loss": 1.158, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.122741790902089e-06, |
|
"loss": 1.2017, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.105230105875254e-06, |
|
"loss": 1.164, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.08773248895701e-06, |
|
"loss": 1.1577, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.070249003349639e-06, |
|
"loss": 1.2334, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.052779712204391e-06, |
|
"loss": 1.2638, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.035324678621248e-06, |
|
"loss": 1.1633, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.017883965648676e-06, |
|
"loss": 1.2882, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.0004576362834295e-06, |
|
"loss": 1.1283, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.983045753470308e-06, |
|
"loss": 1.2374, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.965648380101916e-06, |
|
"loss": 1.1786, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.948265579018464e-06, |
|
"loss": 1.2134, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.930897413007523e-06, |
|
"loss": 1.1776, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.913543944803795e-06, |
|
"loss": 1.2078, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.896205237088894e-06, |
|
"loss": 1.1323, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.8788813524911324e-06, |
|
"loss": 1.1163, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.861572353585259e-06, |
|
"loss": 1.253, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.8442783028922725e-06, |
|
"loss": 1.2622, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.826999262879164e-06, |
|
"loss": 1.1634, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.809735295958718e-06, |
|
"loss": 1.1506, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.7924864644892675e-06, |
|
"loss": 1.1662, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.775252830774475e-06, |
|
"loss": 1.216, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.758034457063105e-06, |
|
"loss": 1.2356, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.740831405548819e-06, |
|
"loss": 1.2559, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.723643738369909e-06, |
|
"loss": 1.1466, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.706471517609111e-06, |
|
"loss": 1.1816, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.689314805293378e-06, |
|
"loss": 1.1695, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.672173663393618e-06, |
|
"loss": 1.2402, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.655048153824534e-06, |
|
"loss": 1.2497, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.637938338444325e-06, |
|
"loss": 1.1549, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.620844279054536e-06, |
|
"loss": 1.0983, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.60376603739978e-06, |
|
"loss": 1.1765, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.586703675167544e-06, |
|
"loss": 1.2167, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.569657253987952e-06, |
|
"loss": 1.1942, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.552626835433556e-06, |
|
"loss": 1.1483, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.535612481019098e-06, |
|
"loss": 1.2054, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.518614252201295e-06, |
|
"loss": 1.1733, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.5016322103786345e-06, |
|
"loss": 1.1337, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.484666416891109e-06, |
|
"loss": 1.2469, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.46771693302004e-06, |
|
"loss": 1.1667, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.45078381998783e-06, |
|
"loss": 1.1221, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.433867138957751e-06, |
|
"loss": 1.1597, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.416966951033731e-06, |
|
"loss": 1.252, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.400083317260103e-06, |
|
"loss": 1.2236, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.383216298621427e-06, |
|
"loss": 1.2796, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.366365956042236e-06, |
|
"loss": 1.1638, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.349532350386832e-06, |
|
"loss": 1.1897, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.3327155424590635e-06, |
|
"loss": 1.2024, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.3159155930021e-06, |
|
"loss": 1.2395, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.299132562698222e-06, |
|
"loss": 1.1615, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.282366512168595e-06, |
|
"loss": 1.1417, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.265617501973055e-06, |
|
"loss": 1.1685, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.248885592609885e-06, |
|
"loss": 1.1574, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.2321708445155985e-06, |
|
"loss": 1.1735, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.215473318064722e-06, |
|
"loss": 1.1609, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.198793073569577e-06, |
|
"loss": 1.2096, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.182130171280071e-06, |
|
"loss": 1.1439, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.165484671383445e-06, |
|
"loss": 1.2016, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.148856634004113e-06, |
|
"loss": 1.2194, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1322461192033945e-06, |
|
"loss": 1.1392, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.115653186979318e-06, |
|
"loss": 1.2411, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.099077897266407e-06, |
|
"loss": 1.1637, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.082520309935459e-06, |
|
"loss": 1.1294, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.065980484793328e-06, |
|
"loss": 1.2772, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.04945848158271e-06, |
|
"loss": 1.1911, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.032954359981927e-06, |
|
"loss": 1.0489, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.016468179604712e-06, |
|
"loss": 1.2083, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 1.2155, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9835498806516965e-06, |
|
"loss": 1.228, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.96711788097847e-06, |
|
"loss": 1.1028, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.95070406033356e-06, |
|
"loss": 1.1875, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.934308478004511e-06, |
|
"loss": 1.1926, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.91793119321302e-06, |
|
"loss": 1.2118, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.901572265114677e-06, |
|
"loss": 1.1967, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.885231752798767e-06, |
|
"loss": 1.2201, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.86890971528806e-06, |
|
"loss": 1.24, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.852606211538591e-06, |
|
"loss": 1.2671, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.836321300439454e-06, |
|
"loss": 1.219, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.82005504081258e-06, |
|
"loss": 1.2516, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8038074914125345e-06, |
|
"loss": 1.1835, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.787578710926296e-06, |
|
"loss": 1.184, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.771368757973062e-06, |
|
"loss": 1.1205, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.755177691104004e-06, |
|
"loss": 1.1824, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.739005568802083e-06, |
|
"loss": 1.1991, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.722852449481848e-06, |
|
"loss": 1.2295, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.706718391489179e-06, |
|
"loss": 1.1957, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.690603453101134e-06, |
|
"loss": 1.2337, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.674507692525685e-06, |
|
"loss": 1.1564, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.658431167901553e-06, |
|
"loss": 1.1294, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.642373937297968e-06, |
|
"loss": 1.2489, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.626336058714468e-06, |
|
"loss": 1.2655, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.610317590080694e-06, |
|
"loss": 1.2224, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.594318589256177e-06, |
|
"loss": 1.1202, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.578339114030127e-06, |
|
"loss": 1.1434, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.562379222121224e-06, |
|
"loss": 1.1676, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.546438971177429e-06, |
|
"loss": 1.1515, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.530518418775734e-06, |
|
"loss": 1.1223, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.514617622421993e-06, |
|
"loss": 1.2414, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.4987366395506996e-06, |
|
"loss": 1.1827, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.482875527524774e-06, |
|
"loss": 1.1472, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.467034343635377e-06, |
|
"loss": 1.2123, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.451213145101664e-06, |
|
"loss": 1.1437, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.435411989070624e-06, |
|
"loss": 1.0953, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.419630932616841e-06, |
|
"loss": 1.2336, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.403870032742299e-06, |
|
"loss": 1.2267, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.388129346376177e-06, |
|
"loss": 1.2801, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.372408930374642e-06, |
|
"loss": 1.1538, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.356708841520643e-06, |
|
"loss": 1.2236, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.341029136523701e-06, |
|
"loss": 1.2342, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.325369872019727e-06, |
|
"loss": 1.1724, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.309731104570775e-06, |
|
"loss": 1.2231, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2941128906648786e-06, |
|
"loss": 1.0791, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.27851528671583e-06, |
|
"loss": 1.2699, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.262938349062967e-06, |
|
"loss": 1.2006, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.247382133971002e-06, |
|
"loss": 1.0275, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.231846697629763e-06, |
|
"loss": 1.2236, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.2163320961540585e-06, |
|
"loss": 1.2633, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.200838385583417e-06, |
|
"loss": 1.2254, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1853656218819175e-06, |
|
"loss": 1.1661, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.169913860937975e-06, |
|
"loss": 1.1264, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.154483158564143e-06, |
|
"loss": 1.0827, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.13907357049691e-06, |
|
"loss": 1.2385, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.123685152396498e-06, |
|
"loss": 1.2062, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1083179598466625e-06, |
|
"loss": 1.1504, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.092972048354491e-06, |
|
"loss": 1.1717, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.077647473350201e-06, |
|
"loss": 1.0936, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.062344290186947e-06, |
|
"loss": 1.0833, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.047062554140605e-06, |
|
"loss": 1.1854, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0318023204096015e-06, |
|
"loss": 1.1432, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0165636441146676e-06, |
|
"loss": 1.2474, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.001346580298694e-06, |
|
"loss": 1.0607, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.986151183926494e-06, |
|
"loss": 1.2328, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.970977509884617e-06, |
|
"loss": 1.1857, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.955825612981156e-06, |
|
"loss": 1.1569, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.940695547945528e-06, |
|
"loss": 1.2401, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.925587369428316e-06, |
|
"loss": 1.137, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.910501132001031e-06, |
|
"loss": 1.2371, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.895436890155936e-06, |
|
"loss": 1.2359, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.880394698305846e-06, |
|
"loss": 1.1864, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8653746107839285e-06, |
|
"loss": 1.0934, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.850376681843511e-06, |
|
"loss": 1.1536, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8354009656578775e-06, |
|
"loss": 1.1341, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.820447516320093e-06, |
|
"loss": 1.2344, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8055163878427703e-06, |
|
"loss": 1.1653, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7906076341579235e-06, |
|
"loss": 1.1705, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7757213091167243e-06, |
|
"loss": 1.2219, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7608574664893504e-06, |
|
"loss": 1.1946, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.746016159964764e-06, |
|
"loss": 1.1176, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.731197443150515e-06, |
|
"loss": 1.2197, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7164013695725798e-06, |
|
"loss": 1.2031, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.70162799267513e-06, |
|
"loss": 1.2194, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6868773658203617e-06, |
|
"loss": 1.1545, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6721495422882948e-06, |
|
"loss": 1.1085, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.657444575276583e-06, |
|
"loss": 1.2488, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6427625179003223e-06, |
|
"loss": 1.3018, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6281034231918554e-06, |
|
"loss": 1.1583, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6134673441005864e-06, |
|
"loss": 1.236, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5988543334927773e-06, |
|
"loss": 1.1291, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.584264444151384e-06, |
|
"loss": 1.2305, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5696977287758205e-06, |
|
"loss": 1.1356, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5551542399818204e-06, |
|
"loss": 1.2003, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5406340303012075e-06, |
|
"loss": 1.135, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5261371521817247e-06, |
|
"loss": 1.2017, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.511663657986837e-06, |
|
"loss": 1.1771, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.497213599995549e-06, |
|
"loss": 1.2021, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.4827870304022116e-06, |
|
"loss": 1.158, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.468384001316333e-06, |
|
"loss": 1.0989, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.454004564762393e-06, |
|
"loss": 1.1485, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4396487726796545e-06, |
|
"loss": 1.1208, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4253166769219746e-06, |
|
"loss": 1.1925, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.411008329257618e-06, |
|
"loss": 1.2145, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.396723781369067e-06, |
|
"loss": 1.1237, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3824630848528505e-06, |
|
"loss": 1.1729, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.368226291219323e-06, |
|
"loss": 1.0858, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3540134518925226e-06, |
|
"loss": 1.1509, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.33982461820995e-06, |
|
"loss": 1.1558, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3256598414224027e-06, |
|
"loss": 1.1901, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.311519172693778e-06, |
|
"loss": 1.2092, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.297402663100897e-06, |
|
"loss": 1.2764, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2833103636333176e-06, |
|
"loss": 1.1672, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.269242325193147e-06, |
|
"loss": 1.1626, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.255198598594862e-06, |
|
"loss": 1.2014, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.241179234565123e-06, |
|
"loss": 1.1559, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2271842837425917e-06, |
|
"loss": 1.1703, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.2132137966777476e-06, |
|
"loss": 1.1252, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.199267823832705e-06, |
|
"loss": 1.2093, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.185346415581041e-06, |
|
"loss": 1.2315, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.171449622207584e-06, |
|
"loss": 1.1437, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.157577493908274e-06, |
|
"loss": 1.1403, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1437300807899462e-06, |
|
"loss": 1.1996, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.129907432870166e-06, |
|
"loss": 1.1522, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.116109600077052e-06, |
|
"loss": 1.2124, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1023366322490733e-06, |
|
"loss": 1.1728, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0885885791349045e-06, |
|
"loss": 1.1787, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0748654903932164e-06, |
|
"loss": 1.153, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0611674155925085e-06, |
|
"loss": 1.2085, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.047494404210931e-06, |
|
"loss": 1.1711, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0338465056361e-06, |
|
"loss": 1.2137, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0202237691649263e-06, |
|
"loss": 1.0411, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.00662624400343e-06, |
|
"loss": 1.1736, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.9930539792665767e-06, |
|
"loss": 1.1268, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9795070239780734e-06, |
|
"loss": 1.1023, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.965985427070226e-06, |
|
"loss": 1.2637, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9524892373837253e-06, |
|
"loss": 1.2217, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.939018503667511e-06, |
|
"loss": 1.1542, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9255732745785627e-06, |
|
"loss": 1.1, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9121535986817284e-06, |
|
"loss": 1.1416, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8987595244495736e-06, |
|
"loss": 1.1453, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8853911002621803e-06, |
|
"loss": 1.182, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8720483744069817e-06, |
|
"loss": 1.2427, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8587313950785876e-06, |
|
"loss": 1.1616, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8454402103786096e-06, |
|
"loss": 1.1386, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8321748683154893e-06, |
|
"loss": 1.1982, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8189354168043183e-06, |
|
"loss": 1.1384, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.805721903666685e-06, |
|
"loss": 1.2461, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7925343766304637e-06, |
|
"loss": 1.117, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7793728833296906e-06, |
|
"loss": 1.1167, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7662374713043414e-06, |
|
"loss": 1.1186, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.753128188000208e-06, |
|
"loss": 1.1347, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.740045080768694e-06, |
|
"loss": 1.1603, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7269881968666446e-06, |
|
"loss": 1.1694, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.713957583456203e-06, |
|
"loss": 1.1141, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7009532876046084e-06, |
|
"loss": 1.1761, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6879753562840473e-06, |
|
"loss": 1.148, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.675023836371471e-06, |
|
"loss": 1.194, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.662098774648434e-06, |
|
"loss": 1.1904, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6492002178009224e-06, |
|
"loss": 1.1511, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6363282124191847e-06, |
|
"loss": 1.1651, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6234828049975645e-06, |
|
"loss": 1.1942, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.61066404193433e-06, |
|
"loss": 1.1863, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.597871969531518e-06, |
|
"loss": 1.1301, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5851066339947397e-06, |
|
"loss": 1.1927, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.572368081433049e-06, |
|
"loss": 1.2208, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.559656357858753e-06, |
|
"loss": 1.2806, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.546971509187238e-06, |
|
"loss": 1.0795, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.534313581236839e-06, |
|
"loss": 1.2085, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.521682619728637e-06, |
|
"loss": 1.1635, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5090786702863126e-06, |
|
"loss": 1.1319, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.496501778435977e-06, |
|
"loss": 1.1842, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4839519896060094e-06, |
|
"loss": 1.042, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4714293491268904e-06, |
|
"loss": 1.1816, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4589339022310386e-06, |
|
"loss": 1.1486, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4464656940526508e-06, |
|
"loss": 1.1069, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4340247696275297e-06, |
|
"loss": 1.1562, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4216111738929426e-06, |
|
"loss": 1.1931, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.409224951687421e-06, |
|
"loss": 1.0961, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.396866147750644e-06, |
|
"loss": 1.1836, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3845348067232455e-06, |
|
"loss": 1.1885, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.372230973146652e-06, |
|
"loss": 1.0612, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3599546914629534e-06, |
|
"loss": 1.1892, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3477060060146973e-06, |
|
"loss": 1.2406, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3354849610447728e-06, |
|
"loss": 1.204, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.323291600696217e-06, |
|
"loss": 1.1617, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.3111259690120726e-06, |
|
"loss": 1.0931, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.298988109935227e-06, |
|
"loss": 1.1107, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.286878067308248e-06, |
|
"loss": 1.2222, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.274795884873231e-06, |
|
"loss": 1.2072, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2627416062716366e-06, |
|
"loss": 1.0869, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.250715275044144e-06, |
|
"loss": 1.1507, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2387169346304683e-06, |
|
"loss": 1.2378, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2267466283692386e-06, |
|
"loss": 1.1628, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.214804399497815e-06, |
|
"loss": 1.1527, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2028902911521324e-06, |
|
"loss": 1.1627, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.191004346366571e-06, |
|
"loss": 1.1561, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.179146608073763e-06, |
|
"loss": 1.2045, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1673171191044764e-06, |
|
"loss": 1.1289, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1555159221874267e-06, |
|
"loss": 1.1844, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.143743059949144e-06, |
|
"loss": 1.1313, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1319985749138107e-06, |
|
"loss": 1.1146, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1202825095031077e-06, |
|
"loss": 1.2057, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1085949060360654e-06, |
|
"loss": 1.0959, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0969358067289035e-06, |
|
"loss": 1.1209, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.085305253694895e-06, |
|
"loss": 1.161, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.073703288944183e-06, |
|
"loss": 1.2207, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0621299543836692e-06, |
|
"loss": 1.1743, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0505852918168244e-06, |
|
"loss": 1.2421, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0390693429435626e-06, |
|
"loss": 1.1522, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.027582149360088e-06, |
|
"loss": 1.084, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.016123752558722e-06, |
|
"loss": 1.177, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.004694193927791e-06, |
|
"loss": 1.1559, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9932935147514445e-06, |
|
"loss": 1.195, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9819217562095184e-06, |
|
"loss": 1.113, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.970578959377388e-06, |
|
"loss": 1.0715, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.95926516522582e-06, |
|
"loss": 1.1071, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9479804146208147e-06, |
|
"loss": 1.1358, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9367247483234743e-06, |
|
"loss": 1.155, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9254982069898386e-06, |
|
"loss": 1.112, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.914300831170751e-06, |
|
"loss": 1.1614, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.903132661311714e-06, |
|
"loss": 1.2276, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.891993737752722e-06, |
|
"loss": 1.1827, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8808841007281375e-06, |
|
"loss": 1.1267, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8698037903665479e-06, |
|
"loss": 1.1975, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8587528466905925e-06, |
|
"loss": 1.2221, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8477313096168525e-06, |
|
"loss": 1.0311, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8367392189556843e-06, |
|
"loss": 1.1872, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8257766144110823e-06, |
|
"loss": 1.2141, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.814843535580536e-06, |
|
"loss": 1.1558, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8039400219548876e-06, |
|
"loss": 1.18, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.793066112918186e-06, |
|
"loss": 1.1717, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7822218477475496e-06, |
|
"loss": 1.1987, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7714072656130199e-06, |
|
"loss": 1.1674, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7606224055774202e-06, |
|
"loss": 1.1691, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7498673065962235e-06, |
|
"loss": 1.0886, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7391420075173915e-06, |
|
"loss": 0.9991, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7284465470812605e-06, |
|
"loss": 1.209, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7177809639203834e-06, |
|
"loss": 1.1752, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7071452965593839e-06, |
|
"loss": 1.2009, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6965395834148468e-06, |
|
"loss": 1.2222, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.685963862795149e-06, |
|
"loss": 1.1312, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.675418172900336e-06, |
|
"loss": 1.0991, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6649025518219809e-06, |
|
"loss": 1.1625, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6544170375430458e-06, |
|
"loss": 1.1371, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6439616679377445e-06, |
|
"loss": 1.2432, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6335364807714104e-06, |
|
"loss": 1.1782, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6231415137003536e-06, |
|
"loss": 1.1645, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6127768042717252e-06, |
|
"loss": 1.1696, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6024423899233953e-06, |
|
"loss": 1.0632, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5921383079837882e-06, |
|
"loss": 1.0695, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5818645956717827e-06, |
|
"loss": 1.152, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5716212900965565e-06, |
|
"loss": 1.1759, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.561408428257446e-06, |
|
"loss": 1.1607, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5512260470438422e-06, |
|
"loss": 1.1307, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5410741832350173e-06, |
|
"loss": 1.1464, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5309528735000335e-06, |
|
"loss": 1.2201, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5208621543975766e-06, |
|
"loss": 1.1855, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5108020623758413e-06, |
|
"loss": 1.2273, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.500772633772396e-06, |
|
"loss": 1.241, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4907739048140524e-06, |
|
"loss": 1.1832, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4808059116167306e-06, |
|
"loss": 1.1978, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.470868690185332e-06, |
|
"loss": 1.1018, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4609622764136155e-06, |
|
"loss": 1.1125, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4510867060840483e-06, |
|
"loss": 1.1852, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4412420148677052e-06, |
|
"loss": 1.1264, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4314282383241097e-06, |
|
"loss": 1.169, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4216454119011236e-06, |
|
"loss": 1.1919, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.411893570934827e-06, |
|
"loss": 1.1465, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.40217275064936e-06, |
|
"loss": 1.1088, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.392482986156829e-06, |
|
"loss": 1.1623, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3828243124571594e-06, |
|
"loss": 1.1215, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3731967644379761e-06, |
|
"loss": 1.1838, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3636003768744754e-06, |
|
"loss": 1.1409, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.354035184429302e-06, |
|
"loss": 1.1206, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3445012216524223e-06, |
|
"loss": 1.1297, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.334998522980997e-06, |
|
"loss": 1.2138, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.325527122739262e-06, |
|
"loss": 1.1751, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3160870551383975e-06, |
|
"loss": 1.149, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3066783542764194e-06, |
|
"loss": 1.1542, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2973010541380315e-06, |
|
"loss": 1.1354, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.287955188594522e-06, |
|
"loss": 1.0665, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2786407914036425e-06, |
|
"loss": 1.1461, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2693578962094666e-06, |
|
"loss": 1.2177, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2601065365422915e-06, |
|
"loss": 1.1602, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2508867458185037e-06, |
|
"loss": 1.092, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2416985573404571e-06, |
|
"loss": 1.1138, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.232542004296362e-06, |
|
"loss": 1.1082, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2234171197601563e-06, |
|
"loss": 1.1529, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2143239366913906e-06, |
|
"loss": 1.2283, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2052624879351105e-06, |
|
"loss": 1.1514, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.196232806221731e-06, |
|
"loss": 1.1593, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1872349241669245e-06, |
|
"loss": 1.0892, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1782688742715098e-06, |
|
"loss": 1.1828, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1693346889213108e-06, |
|
"loss": 1.2005, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1604324003870637e-06, |
|
"loss": 1.1543, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1515620408242989e-06, |
|
"loss": 1.1349, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1427236422731991e-06, |
|
"loss": 1.1508, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1339172366585216e-06, |
|
"loss": 1.1577, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1251428557894516e-06, |
|
"loss": 1.2336, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1164005313595038e-06, |
|
"loss": 1.1519, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.107690294946402e-06, |
|
"loss": 1.1119, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.0990121780119668e-06, |
|
"loss": 1.1607, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.0903662119020008e-06, |
|
"loss": 1.0861, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0817524278461777e-06, |
|
"loss": 1.2109, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.073170856957928e-06, |
|
"loss": 1.1334, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.064621530234322e-06, |
|
"loss": 1.1235, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0561044785559737e-06, |
|
"loss": 1.1321, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0476197326869043e-06, |
|
"loss": 1.1748, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0391673232744504e-06, |
|
"loss": 1.2237, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0307472808491547e-06, |
|
"loss": 1.1894, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0223596358246346e-06, |
|
"loss": 1.2481, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0140044184975017e-06, |
|
"loss": 1.188, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0056816590472206e-06, |
|
"loss": 1.2523, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.973913875360331e-07, |
|
"loss": 1.1979, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.89133633908821e-07, |
|
"loss": 1.1918, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.809084279930159e-07, |
|
"loss": 1.159, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.727157994984815e-07, |
|
"loss": 1.1036, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.64555778017413e-07, |
|
"loss": 1.1067, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.564283930242258e-07, |
|
"loss": 1.2142, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.4833367387545e-07, |
|
"loss": 1.2832, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.402716498096331e-07, |
|
"loss": 1.1395, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.32242349947211e-07, |
|
"loss": 1.1718, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.242458032904311e-07, |
|
"loss": 1.1315, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.162820387232285e-07, |
|
"loss": 1.1, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.083510850111277e-07, |
|
"loss": 1.2252, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.004529708011455e-07, |
|
"loss": 1.2342, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.925877246216686e-07, |
|
"loss": 1.1885, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.847553748823745e-07, |
|
"loss": 1.2166, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.769559498741109e-07, |
|
"loss": 1.131, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.691894777688004e-07, |
|
"loss": 1.1538, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.614559866193395e-07, |
|
"loss": 1.1186, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.537555043594936e-07, |
|
"loss": 1.0496, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.460880588038001e-07, |
|
"loss": 1.1004, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.384536776474639e-07, |
|
"loss": 1.1879, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.308523884662656e-07, |
|
"loss": 1.2293, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.232842187164469e-07, |
|
"loss": 1.1442, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.157491957346275e-07, |
|
"loss": 1.1702, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.08247346737695e-07, |
|
"loss": 1.2673, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.007786988227139e-07, |
|
"loss": 1.1755, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.93343278966825e-07, |
|
"loss": 1.0932, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.859411140271422e-07, |
|
"loss": 1.2368, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.785722307406685e-07, |
|
"loss": 1.2311, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.71236655724188e-07, |
|
"loss": 1.1807, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.639344154741713e-07, |
|
"loss": 1.2103, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.566655363666864e-07, |
|
"loss": 1.1467, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.494300446572978e-07, |
|
"loss": 1.1643, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.422279664809706e-07, |
|
"loss": 1.2261, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.350593278519824e-07, |
|
"loss": 1.0772, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.279241546638239e-07, |
|
"loss": 1.218, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.208224726891044e-07, |
|
"loss": 1.1608, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.137543075794651e-07, |
|
"loss": 1.1196, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.0671968486548e-07, |
|
"loss": 1.2686, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.997186299565661e-07, |
|
"loss": 1.1473, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.927511681408971e-07, |
|
"loss": 1.1463, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.858173245852962e-07, |
|
"loss": 1.1507, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.789171243351678e-07, |
|
"loss": 1.1216, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.72050592314385e-07, |
|
"loss": 1.1535, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.652177533252146e-07, |
|
"loss": 1.1821, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.584186320482222e-07, |
|
"loss": 1.0726, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.516532530421826e-07, |
|
"loss": 1.1247, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.449216407439906e-07, |
|
"loss": 1.1563, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.382238194685752e-07, |
|
"loss": 1.129, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.315598134088119e-07, |
|
"loss": 1.138, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.249296466354305e-07, |
|
"loss": 1.1219, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.18333343096933e-07, |
|
"loss": 1.1842, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.117709266195083e-07, |
|
"loss": 1.0654, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.052424209069385e-07, |
|
"loss": 1.1573, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.987478495405252e-07, |
|
"loss": 1.1874, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.922872359789878e-07, |
|
"loss": 1.1057, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.858606035583992e-07, |
|
"loss": 1.2166, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.794679754920796e-07, |
|
"loss": 1.1399, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.731093748705341e-07, |
|
"loss": 1.1572, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.667848246613528e-07, |
|
"loss": 1.218, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.604943477091351e-07, |
|
"loss": 1.2097, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.54237966735407e-07, |
|
"loss": 1.2243, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.480157043385359e-07, |
|
"loss": 1.1342, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.418275829936537e-07, |
|
"loss": 1.1797, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.356736250525718e-07, |
|
"loss": 1.2628, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.295538527437027e-07, |
|
"loss": 1.184, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.234682881719766e-07, |
|
"loss": 1.1458, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.174169533187634e-07, |
|
"loss": 1.2195, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.113998700418e-07, |
|
"loss": 1.1694, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.05417060075094e-07, |
|
"loss": 1.1742, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.994685450288672e-07, |
|
"loss": 1.2063, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.935543463894554e-07, |
|
"loss": 1.2757, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.876744855192516e-07, |
|
"loss": 1.1961, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.818289836566126e-07, |
|
"loss": 1.1616, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.7601786191579293e-07, |
|
"loss": 1.2033, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.7024114128686017e-07, |
|
"loss": 1.1063, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.6449884263562564e-07, |
|
"loss": 1.1181, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.587909867035678e-07, |
|
"loss": 1.1713, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.531175941077537e-07, |
|
"loss": 1.0918, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.4747868534077197e-07, |
|
"loss": 1.1389, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.4187428077064443e-07, |
|
"loss": 1.1615, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.363044006407724e-07, |
|
"loss": 1.0964, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.3076906506984595e-07, |
|
"loss": 1.1582, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.252682940517805e-07, |
|
"loss": 1.1466, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.198021074556469e-07, |
|
"loss": 1.19, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.1437052502558693e-07, |
|
"loss": 1.1323, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.0897356638075815e-07, |
|
"loss": 1.2179, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.0361125101525124e-07, |
|
"loss": 1.093, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9828359829802263e-07, |
|
"loss": 1.1905, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.92990627472829e-07, |
|
"loss": 1.1468, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8773235765815155e-07, |
|
"loss": 1.2172, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8250880784713064e-07, |
|
"loss": 1.1851, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.773199969074959e-07, |
|
"loss": 1.1777, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.721659435814995e-07, |
|
"loss": 1.1227, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.670466664858452e-07, |
|
"loss": 1.1959, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.61962184111625e-07, |
|
"loss": 1.1362, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5691251482424914e-07, |
|
"loss": 1.1523, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5189767686338196e-07, |
|
"loss": 1.1422, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.469176883428782e-07, |
|
"loss": 1.1937, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.419725672507068e-07, |
|
"loss": 1.1965, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3706233144890296e-07, |
|
"loss": 1.1909, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3218699867348937e-07, |
|
"loss": 1.2193, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.273465865344172e-07, |
|
"loss": 1.182, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.2254111251550424e-07, |
|
"loss": 1.1516, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1777059397436693e-07, |
|
"loss": 1.1567, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1303504814236494e-07, |
|
"loss": 1.1832, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0833449212453237e-07, |
|
"loss": 1.16, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0366894289951875e-07, |
|
"loss": 1.2157, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.99038417319526e-07, |
|
"loss": 1.0917, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.944429321102504e-07, |
|
"loss": 1.1507, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8988250387082083e-07, |
|
"loss": 1.1437, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.853571490737372e-07, |
|
"loss": 1.1051, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8086688406481546e-07, |
|
"loss": 1.123, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.764117250631182e-07, |
|
"loss": 1.2153, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.7199168816091416e-07, |
|
"loss": 1.2143, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.6760678932359787e-07, |
|
"loss": 1.2127, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.6325704438965206e-07, |
|
"loss": 1.205, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.589424690705777e-07, |
|
"loss": 1.1716, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.5466307895084194e-07, |
|
"loss": 1.1912, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.5041888948782124e-07, |
|
"loss": 1.0927, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4620991601174596e-07, |
|
"loss": 1.1741, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.420361737256438e-07, |
|
"loss": 1.2194, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.378976777052866e-07, |
|
"loss": 1.149, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.3379444289913344e-07, |
|
"loss": 1.1055, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.2972648412827982e-07, |
|
"loss": 1.1654, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.256938160863986e-07, |
|
"loss": 1.2002, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.2169645333969815e-07, |
|
"loss": 1.1869, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1773441032685417e-07, |
|
"loss": 1.2332, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1380770135897122e-07, |
|
"loss": 1.1522, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0991634061952038e-07, |
|
"loss": 1.1533, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.060603421643004e-07, |
|
"loss": 1.1986, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0223971992137325e-07, |
|
"loss": 1.1269, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9845448769102105e-07, |
|
"loss": 1.244, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9470465914569915e-07, |
|
"loss": 1.2463, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9099024782997965e-07, |
|
"loss": 1.1368, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8731126716050707e-07, |
|
"loss": 1.1259, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.836677304259471e-07, |
|
"loss": 1.1975, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8005965078694233e-07, |
|
"loss": 1.2085, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.764870412760611e-07, |
|
"loss": 1.151, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7294991479775202e-07, |
|
"loss": 1.2416, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.694482841282974e-07, |
|
"loss": 1.0676, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6598216191576643e-07, |
|
"loss": 1.2049, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6255156067997325e-07, |
|
"loss": 1.1505, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5915649281242007e-07, |
|
"loss": 1.2177, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5579697057627074e-07, |
|
"loss": 1.0982, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5247300610628957e-07, |
|
"loss": 1.1824, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.4918461140880691e-07, |
|
"loss": 1.019, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.459317983616737e-07, |
|
"loss": 1.166, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.42714578714217e-07, |
|
"loss": 1.155, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3953296408720007e-07, |
|
"loss": 1.1519, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3638696597277678e-07, |
|
"loss": 1.1228, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3327659573445395e-07, |
|
"loss": 1.1829, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3020186460704908e-07, |
|
"loss": 1.1594, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2716278369664825e-07, |
|
"loss": 1.1898, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2415936398056715e-07, |
|
"loss": 1.2258, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2119161630731234e-07, |
|
"loss": 1.1862, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1825955139654121e-07, |
|
"loss": 1.2319, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.153631798390209e-07, |
|
"loss": 1.1628, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1250251209659502e-07, |
|
"loss": 1.1546, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0967755850214257e-07, |
|
"loss": 1.168, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0688832925954019e-07, |
|
"loss": 1.2396, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.041348344436277e-07, |
|
"loss": 1.1595, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0141708400016714e-07, |
|
"loss": 1.116, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.873508774581597e-08, |
|
"loss": 1.1527, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.608885536808055e-08, |
|
"loss": 1.1513, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.347839642528721e-08, |
|
"loss": 1.1576, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.090372034654793e-08, |
|
"loss": 1.0495, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.836483643172577e-08, |
|
"loss": 1.1289, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.58617538513995e-08, |
|
"loss": 1.0873, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.339448164682906e-08, |
|
"loss": 1.1691, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.096302872993234e-08, |
|
"loss": 1.1659, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.856740388323847e-08, |
|
"loss": 1.18, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.620761575987013e-08, |
|
"loss": 1.2707, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.388367288350018e-08, |
|
"loss": 1.2355, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.159558364833175e-08, |
|
"loss": 1.2193, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.934335631905931e-08, |
|
"loss": 1.1395, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.71269990308443e-08, |
|
"loss": 1.2028, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.494651978928179e-08, |
|
"loss": 1.1534, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.280192647037498e-08, |
|
"loss": 1.1932, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.069322682050516e-08, |
|
"loss": 1.1857, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.862042845640403e-08, |
|
"loss": 1.1041, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.65835388651248e-08, |
|
"loss": 1.1861, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.458256540401774e-08, |
|
"loss": 1.1347, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.261751530070136e-08, |
|
"loss": 1.1571, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.0688395653036846e-08, |
|
"loss": 1.1796, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.879521342910476e-08, |
|
"loss": 1.1338, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.693797546717505e-08, |
|
"loss": 1.1761, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5116688475685954e-08, |
|
"loss": 1.2456, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.333135903322072e-08, |
|
"loss": 1.2409, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.1581993588482025e-08, |
|
"loss": 1.131, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.986859846026425e-08, |
|
"loss": 1.2769, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.819117983744125e-08, |
|
"loss": 1.1935, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.6549743778934166e-08, |
|
"loss": 1.1127, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.494429621369366e-08, |
|
"loss": 1.1966, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3374842940678834e-08, |
|
"loss": 1.1531, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.18413896288372e-08, |
|
"loss": 1.1041, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.034394181707812e-08, |
|
"loss": 1.1673, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.888250491426159e-08, |
|
"loss": 1.1569, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.74570841991717e-08, |
|
"loss": 1.1125, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.606768482050215e-08, |
|
"loss": 1.1862, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.4714311796834035e-08, |
|
"loss": 1.1537, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.3396970016619225e-08, |
|
"loss": 1.2256, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.211566423816591e-08, |
|
"loss": 1.1421, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.08703990896153e-08, |
|
"loss": 1.1489, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9661179068928283e-08, |
|
"loss": 1.2294, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8488008543871007e-08, |
|
"loss": 1.0836, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7350891751993782e-08, |
|
"loss": 1.2013, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.624983280062331e-08, |
|
"loss": 1.3023, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.518483566683826e-08, |
|
"loss": 1.1781, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.415590419746371e-08, |
|
"loss": 1.2011, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3163042109053393e-08, |
|
"loss": 1.2066, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2206252987876366e-08, |
|
"loss": 1.1358, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.1285540289903696e-08, |
|
"loss": 1.0472, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.040090734079624e-08, |
|
"loss": 1.1751, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.552357335893547e-09, |
|
"loss": 1.1809, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.739893340202754e-09, |
|
"loss": 1.1425, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.963518288385264e-09, |
|
"loss": 1.2003, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.223234984747862e-09, |
|
"loss": 1.1713, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.5190461032305085e-09, |
|
"loss": 1.1553, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.850954187399671e-09, |
|
"loss": 1.1377, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.218961650439447e-09, |
|
"loss": 1.1135, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.6230707751371284e-09, |
|
"loss": 1.1009, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.063283713883203e-09, |
|
"loss": 1.2446, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.539602488654703e-09, |
|
"loss": 1.1807, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.0520289910174194e-09, |
|
"loss": 1.2118, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.600564982110365e-09, |
|
"loss": 1.1417, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.185212092645772e-09, |
|
"loss": 1.2131, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.8059718228979894e-09, |
|
"loss": 1.182, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.462845542704594e-09, |
|
"loss": 1.0384, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.1558344914530673e-09, |
|
"loss": 1.1252, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.849397780841262e-10, |
|
"loss": 1.2233, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.50162381083952e-10, |
|
"loss": 1.1729, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.5150314847863806e-10, |
|
"loss": 1.2093, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.8896279783863224e-10, |
|
"loss": 1.0945, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6254191626430272e-10, |
|
"loss": 1.2, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.224096039704088e-11, |
|
"loss": 1.1717, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.8060256408158892e-11, |
|
"loss": 1.251, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.1397, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1705, |
|
"total_flos": 307764793589760.0, |
|
"train_loss": 1.249325556419462, |
|
"train_runtime": 24639.388, |
|
"train_samples_per_second": 16.599, |
|
"train_steps_per_second": 0.069 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1705, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50000, |
|
"total_flos": 307764793589760.0, |
|
"train_batch_size": 24, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|