|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9995796553173603, |
|
"global_step": 1189, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.878, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.0676, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6e-07, |
|
"loss": 5.0744, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2e-06, |
|
"loss": 4.8887, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.6155, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4e-06, |
|
"loss": 4.9689, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.9999999999999997e-06, |
|
"loss": 4.7161, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.6837, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.8697, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2e-06, |
|
"loss": 4.7117, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8e-06, |
|
"loss": 4.7449, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.399999999999999e-06, |
|
"loss": 4.5915, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.999999999999999e-06, |
|
"loss": 4.8094, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.599999999999999e-06, |
|
"loss": 4.5704, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.4314, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.799999999999998e-06, |
|
"loss": 4.6685, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.4e-06, |
|
"loss": 4.584, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.999999999999999e-06, |
|
"loss": 4.6994, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.6e-06, |
|
"loss": 4.44, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.6258, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0799999999999998e-05, |
|
"loss": 4.5593, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.14e-05, |
|
"loss": 4.4087, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1999999999999999e-05, |
|
"loss": 4.5185, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.26e-05, |
|
"loss": 4.5389, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3199999999999997e-05, |
|
"loss": 4.3627, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3799999999999998e-05, |
|
"loss": 4.5219, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.3728, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4999999999999999e-05, |
|
"loss": 4.1913, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5599999999999996e-05, |
|
"loss": 4.2714, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6199999999999997e-05, |
|
"loss": 4.3536, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.68e-05, |
|
"loss": 4.4772, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.2838, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7999999999999997e-05, |
|
"loss": 4.3969, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8599999999999998e-05, |
|
"loss": 4.1069, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.92e-05, |
|
"loss": 4.3024, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.98e-05, |
|
"loss": 4.6005, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.4158, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.2882, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.1599999999999996e-05, |
|
"loss": 4.7542, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.2199999999999998e-05, |
|
"loss": 4.4229, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.28e-05, |
|
"loss": 4.2078, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.34e-05, |
|
"loss": 4.2834, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3999999999999997e-05, |
|
"loss": 4.2819, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 4.309, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.52e-05, |
|
"loss": 4.5016, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.5799999999999997e-05, |
|
"loss": 4.4434, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.6399999999999995e-05, |
|
"loss": 4.6236, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.6999999999999996e-05, |
|
"loss": 4.8033, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.7599999999999997e-05, |
|
"loss": 4.972, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.7599999999999997e-05, |
|
"loss": 4.7127, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.2836, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.2811, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.1493, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 4.2219, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.3674, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.119999999999999e-05, |
|
"loss": 4.1651, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.1799999999999994e-05, |
|
"loss": 4.4474, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.2399999999999995e-05, |
|
"loss": 4.3298, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.2999999999999996e-05, |
|
"loss": 4.2132, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.36e-05, |
|
"loss": 4.3708, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.42e-05, |
|
"loss": 4.1803, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.3545, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.539999999999999e-05, |
|
"loss": 4.2327, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.5999999999999994e-05, |
|
"loss": 4.1038, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.6599999999999995e-05, |
|
"loss": 4.1128, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.7199999999999996e-05, |
|
"loss": 4.2349, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.78e-05, |
|
"loss": 4.3056, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.84e-05, |
|
"loss": 4.2165, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.9e-05, |
|
"loss": 4.2028, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.96e-05, |
|
"loss": 4.2546, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.4175, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.3118, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.2295, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.2973, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.259999999999999e-05, |
|
"loss": 4.1686, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.319999999999999e-05, |
|
"loss": 4.5328, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.3799999999999994e-05, |
|
"loss": 4.3378, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.4399999999999995e-05, |
|
"loss": 4.1686, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 4.1852, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.56e-05, |
|
"loss": 4.3177, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.62e-05, |
|
"loss": 4.3517, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.68e-05, |
|
"loss": 4.2729, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.7399999999999993e-05, |
|
"loss": 4.2086, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.7999999999999994e-05, |
|
"loss": 4.3288, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8599999999999995e-05, |
|
"loss": 4.2414, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9199999999999997e-05, |
|
"loss": 4.2314, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.98e-05, |
|
"loss": 4.119, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.04e-05, |
|
"loss": 4.3444, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.1e-05, |
|
"loss": 4.4085, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.1599999999999994e-05, |
|
"loss": 4.2942, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.2199999999999995e-05, |
|
"loss": 4.381, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.279999999999999e-05, |
|
"loss": 4.3951, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.339999999999999e-05, |
|
"loss": 4.0369, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.399999999999999e-05, |
|
"loss": 4.4064, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.459999999999999e-05, |
|
"loss": 4.5946, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.519999999999999e-05, |
|
"loss": 4.4538, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.5799999999999994e-05, |
|
"loss": 4.464, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.5133, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.5579, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.2087, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.0703, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.4371, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.94e-05, |
|
"loss": 4.2404, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 4.3118, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 4.253, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.12e-05, |
|
"loss": 4.3638, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.4452, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.239999999999999e-05, |
|
"loss": 4.4478, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.299999999999999e-05, |
|
"loss": 4.3519, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.359999999999999e-05, |
|
"loss": 4.1316, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.419999999999999e-05, |
|
"loss": 4.3832, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.479999999999999e-05, |
|
"loss": 4.4707, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.539999999999999e-05, |
|
"loss": 4.2211, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.599999999999999e-05, |
|
"loss": 4.4116, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.659999999999999e-05, |
|
"loss": 4.359, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.72e-05, |
|
"loss": 4.4433, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.78e-05, |
|
"loss": 4.2172, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.84e-05, |
|
"loss": 4.0784, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.1843, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.1996, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.5407, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.079999999999999e-05, |
|
"loss": 4.3013, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.139999999999999e-05, |
|
"loss": 4.3319, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.199999999999999e-05, |
|
"loss": 4.422, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.259999999999999e-05, |
|
"loss": 4.1384, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.319999999999999e-05, |
|
"loss": 4.2991, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.379999999999999e-05, |
|
"loss": 4.4639, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.439999999999999e-05, |
|
"loss": 4.1505, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.106, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.56e-05, |
|
"loss": 4.3523, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.62e-05, |
|
"loss": 4.3679, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.68e-05, |
|
"loss": 4.2785, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.74e-05, |
|
"loss": 4.5031, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.8e-05, |
|
"loss": 4.3363, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.86e-05, |
|
"loss": 4.4382, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.92e-05, |
|
"loss": 4.1609, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.456, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.432, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.2593, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.4301, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.4305, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.2845, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.1929, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.5414, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.459999999999998e-05, |
|
"loss": 4.4126, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.519999999999998e-05, |
|
"loss": 4.5683, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.579999999999998e-05, |
|
"loss": 4.4539, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.639999999999999e-05, |
|
"loss": 4.2047, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.699999999999999e-05, |
|
"loss": 4.5413, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.759999999999999e-05, |
|
"loss": 4.5694, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.819999999999999e-05, |
|
"loss": 4.3819, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.879999999999999e-05, |
|
"loss": 4.3633, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.939999999999999e-05, |
|
"loss": 4.3485, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 4.428, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.059999999999999e-05, |
|
"loss": 4.3302, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.12e-05, |
|
"loss": 4.1732, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.18e-05, |
|
"loss": 4.3756, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.24e-05, |
|
"loss": 4.5436, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.3e-05, |
|
"loss": 4.2929, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.36e-05, |
|
"loss": 4.0308, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.419999999999999e-05, |
|
"loss": 4.3842, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.479999999999999e-05, |
|
"loss": 4.3992, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.539999999999999e-05, |
|
"loss": 4.3533, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.599999999999999e-05, |
|
"loss": 4.0939, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.659999999999999e-05, |
|
"loss": 4.3203, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.719999999999999e-05, |
|
"loss": 3.944, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.779999999999999e-05, |
|
"loss": 4.3161, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.839999999999999e-05, |
|
"loss": 4.2308, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.9e-05, |
|
"loss": 4.3824, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.96e-05, |
|
"loss": 4.2629, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001002, |
|
"loss": 4.3181, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001008, |
|
"loss": 4.196, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001014, |
|
"loss": 4.0841, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000102, |
|
"loss": 4.514, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001026, |
|
"loss": 4.3829, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010319999999999999, |
|
"loss": 4.3692, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010379999999999999, |
|
"loss": 4.4161, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010439999999999999, |
|
"loss": 4.2987, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010499999999999999, |
|
"loss": 4.3005, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010559999999999998, |
|
"loss": 4.4786, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010619999999999998, |
|
"loss": 4.4301, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010679999999999998, |
|
"loss": 4.4607, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010739999999999998, |
|
"loss": 4.3058, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010799999999999998, |
|
"loss": 4.3893, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010859999999999998, |
|
"loss": 4.1137, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010919999999999998, |
|
"loss": 4.3025, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010979999999999999, |
|
"loss": 4.5062, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00011039999999999999, |
|
"loss": 4.3481, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00011099999999999999, |
|
"loss": 4.4715, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00011159999999999999, |
|
"loss": 4.1929, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00011219999999999999, |
|
"loss": 4.3975, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00011279999999999999, |
|
"loss": 4.6548, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00011339999999999999, |
|
"loss": 5.0553, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 4.3685, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001146, |
|
"loss": 4.5626, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001152, |
|
"loss": 4.8323, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001158, |
|
"loss": 4.5394, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001164, |
|
"loss": 4.5841, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.000117, |
|
"loss": 4.652, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001176, |
|
"loss": 4.4321, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001182, |
|
"loss": 4.5498, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001188, |
|
"loss": 4.3639, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001194, |
|
"loss": 4.4626, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 4.3329, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00012059999999999999, |
|
"loss": 4.3249, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00012119999999999999, |
|
"loss": 4.318, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00012179999999999999, |
|
"loss": 4.3869, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001224, |
|
"loss": 4.9422, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012299999999999998, |
|
"loss": 4.4169, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001236, |
|
"loss": 4.8081, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012419999999999998, |
|
"loss": 4.3622, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012479999999999997, |
|
"loss": 4.4941, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012539999999999999, |
|
"loss": 4.3123, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012599999999999997, |
|
"loss": 4.5494, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001266, |
|
"loss": 4.4142, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012719999999999997, |
|
"loss": 4.4588, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001278, |
|
"loss": 4.5321, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012839999999999998, |
|
"loss": 4.3559, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.000129, |
|
"loss": 4.1084, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012959999999999998, |
|
"loss": 4.5419, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001302, |
|
"loss": 4.3222, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00013079999999999998, |
|
"loss": 4.7097, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001314, |
|
"loss": 4.626, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00013199999999999998, |
|
"loss": 4.4566, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001326, |
|
"loss": 4.5153, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00013319999999999999, |
|
"loss": 4.2298, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001338, |
|
"loss": 4.4521, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001344, |
|
"loss": 4.6118, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.000135, |
|
"loss": 4.4091, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001356, |
|
"loss": 4.5025, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001362, |
|
"loss": 4.4919, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001368, |
|
"loss": 4.3356, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001374, |
|
"loss": 4.6275, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.000138, |
|
"loss": 4.3615, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001386, |
|
"loss": 4.5941, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001392, |
|
"loss": 4.5838, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00013979999999999998, |
|
"loss": 4.2655, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001404, |
|
"loss": 4.3444, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00014099999999999998, |
|
"loss": 4.3864, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00014159999999999997, |
|
"loss": 4.4261, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001422, |
|
"loss": 4.7173, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00014279999999999997, |
|
"loss": 4.2811, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001434, |
|
"loss": 4.5207, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014399999999999998, |
|
"loss": 4.3863, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001446, |
|
"loss": 4.7548, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014519999999999998, |
|
"loss": 4.2792, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001458, |
|
"loss": 5.5172, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014639999999999998, |
|
"loss": 4.6362, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.000147, |
|
"loss": 7.8904, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014759999999999998, |
|
"loss": 4.6706, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001482, |
|
"loss": 4.6318, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014879999999999998, |
|
"loss": 4.3568, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001494, |
|
"loss": 4.6498, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00015, |
|
"loss": 4.2444, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00015059999999999997, |
|
"loss": 4.4818, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001512, |
|
"loss": 4.7729, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00015179999999999998, |
|
"loss": 4.2207, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001524, |
|
"loss": 4.9552, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00015299999999999998, |
|
"loss": 4.5542, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001536, |
|
"loss": 4.3937, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00015419999999999998, |
|
"loss": 4.4997, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001548, |
|
"loss": 4.4278, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00015539999999999998, |
|
"loss": 4.6136, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.000156, |
|
"loss": 4.8674, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00015659999999999998, |
|
"loss": 4.6685, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001572, |
|
"loss": 5.0145, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001578, |
|
"loss": 4.3919, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001584, |
|
"loss": 4.9681, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.000159, |
|
"loss": 4.5067, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001596, |
|
"loss": 4.2741, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001602, |
|
"loss": 4.6239, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001608, |
|
"loss": 4.2923, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001614, |
|
"loss": 4.4861, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.000162, |
|
"loss": 4.523, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001626, |
|
"loss": 4.4596, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001632, |
|
"loss": 4.259, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001638, |
|
"loss": 4.9241, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001644, |
|
"loss": 4.671, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.000165, |
|
"loss": 4.841, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001656, |
|
"loss": 4.6318, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001662, |
|
"loss": 4.5362, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001668, |
|
"loss": 4.4569, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001674, |
|
"loss": 4.7043, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000168, |
|
"loss": 4.3861, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001686, |
|
"loss": 4.5491, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00016919999999999997, |
|
"loss": 4.297, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00016979999999999998, |
|
"loss": 4.7139, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017039999999999997, |
|
"loss": 4.3837, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017099999999999998, |
|
"loss": 4.8364, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017159999999999997, |
|
"loss": 4.851, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017219999999999998, |
|
"loss": 5.3491, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017279999999999997, |
|
"loss": 5.1197, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017339999999999996, |
|
"loss": 4.6506, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017399999999999997, |
|
"loss": 4.8303, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017459999999999996, |
|
"loss": 4.8478, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017519999999999998, |
|
"loss": 6.4638, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017579999999999996, |
|
"loss": 5.701, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017639999999999998, |
|
"loss": 4.922, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017699999999999997, |
|
"loss": 6.5197, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017759999999999998, |
|
"loss": 5.6574, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017819999999999997, |
|
"loss": 4.6945, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017879999999999998, |
|
"loss": 4.6706, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017939999999999997, |
|
"loss": 5.0316, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 4.4729, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00018059999999999997, |
|
"loss": 4.4609, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00018119999999999999, |
|
"loss": 4.6328, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00018179999999999997, |
|
"loss": 4.6626, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001824, |
|
"loss": 4.3457, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00018299999999999998, |
|
"loss": 4.2517, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001836, |
|
"loss": 5.1203, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00018419999999999998, |
|
"loss": 4.8707, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001848, |
|
"loss": 4.4209, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00018539999999999998, |
|
"loss": 4.4336, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.000186, |
|
"loss": 4.6532, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00018659999999999998, |
|
"loss": 4.2587, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001872, |
|
"loss": 4.5758, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00018779999999999998, |
|
"loss": 4.7729, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00018839999999999997, |
|
"loss": 4.5811, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00018899999999999999, |
|
"loss": 4.648, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00018959999999999997, |
|
"loss": 4.4726, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001902, |
|
"loss": 4.3312, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019079999999999998, |
|
"loss": 4.325, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001914, |
|
"loss": 4.4029, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019199999999999998, |
|
"loss": 5.1371, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001926, |
|
"loss": 4.5978, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019319999999999998, |
|
"loss": 5.219, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001938, |
|
"loss": 4.8118, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019439999999999998, |
|
"loss": 4.7945, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.000195, |
|
"loss": 4.4908, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019559999999999998, |
|
"loss": 4.9624, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001962, |
|
"loss": 4.496, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019679999999999999, |
|
"loss": 4.7033, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001974, |
|
"loss": 4.6788, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.000198, |
|
"loss": 4.5631, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001986, |
|
"loss": 4.702, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001992, |
|
"loss": 5.5658, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001998, |
|
"loss": 4.7049, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002004, |
|
"loss": 4.7927, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.000201, |
|
"loss": 4.7044, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002016, |
|
"loss": 4.6731, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002022, |
|
"loss": 4.8644, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002028, |
|
"loss": 5.1974, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00020339999999999998, |
|
"loss": 4.5775, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.000204, |
|
"loss": 5.4593, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00020459999999999999, |
|
"loss": 4.8787, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002052, |
|
"loss": 4.8025, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002058, |
|
"loss": 5.2866, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00020639999999999998, |
|
"loss": 5.9515, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00020699999999999996, |
|
"loss": 5.1083, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00020759999999999998, |
|
"loss": 4.6994, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00020819999999999996, |
|
"loss": 5.3215, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00020879999999999998, |
|
"loss": 4.5359, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00020939999999999997, |
|
"loss": 5.0289, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00020999999999999998, |
|
"loss": 4.9806, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021059999999999997, |
|
"loss": 4.5323, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021119999999999996, |
|
"loss": 5.1149, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021179999999999997, |
|
"loss": 4.779, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021239999999999996, |
|
"loss": 4.3883, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021299999999999997, |
|
"loss": 4.7019, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021359999999999996, |
|
"loss": 4.7448, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021419999999999998, |
|
"loss": 4.5466, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00021479999999999996, |
|
"loss": 4.8753, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021539999999999998, |
|
"loss": 4.4886, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021599999999999996, |
|
"loss": 4.5213, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021659999999999998, |
|
"loss": 5.0087, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021719999999999997, |
|
"loss": 6.115, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021779999999999998, |
|
"loss": 4.7595, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021839999999999997, |
|
"loss": 4.8554, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021899999999999998, |
|
"loss": 4.6204, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00021959999999999997, |
|
"loss": 4.7214, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00022019999999999999, |
|
"loss": 5.0148, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00022079999999999997, |
|
"loss": 4.7485, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002214, |
|
"loss": 4.7558, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00022199999999999998, |
|
"loss": 4.5435, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002226, |
|
"loss": 4.7954, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022319999999999998, |
|
"loss": 4.7268, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002238, |
|
"loss": 4.6724, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022439999999999998, |
|
"loss": 4.4617, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.000225, |
|
"loss": 4.7142, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022559999999999998, |
|
"loss": 4.6315, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022619999999999997, |
|
"loss": 4.3959, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022679999999999998, |
|
"loss": 4.5737, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022739999999999997, |
|
"loss": 4.6512, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022799999999999999, |
|
"loss": 4.6704, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00022859999999999997, |
|
"loss": 4.8355, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002292, |
|
"loss": 4.6387, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00022979999999999997, |
|
"loss": 4.8218, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002304, |
|
"loss": 4.5275, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00023099999999999998, |
|
"loss": 4.7885, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002316, |
|
"loss": 4.6711, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00023219999999999998, |
|
"loss": 4.6913, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002328, |
|
"loss": 5.5985, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00023339999999999998, |
|
"loss": 4.501, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.000234, |
|
"loss": 5.082, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00023459999999999998, |
|
"loss": 4.4409, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002352, |
|
"loss": 5.9987, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00023579999999999999, |
|
"loss": 5.7972, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002364, |
|
"loss": 5.1902, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000237, |
|
"loss": 5.4818, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002376, |
|
"loss": 4.9471, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002382, |
|
"loss": 5.6634, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002388, |
|
"loss": 5.0046, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002394, |
|
"loss": 4.6863, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 4.6132, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002406, |
|
"loss": 4.8202, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00024119999999999998, |
|
"loss": 4.604, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002418, |
|
"loss": 4.4995, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00024239999999999998, |
|
"loss": 5.1402, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000243, |
|
"loss": 4.8212, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00024359999999999999, |
|
"loss": 4.763, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00024419999999999997, |
|
"loss": 5.0245, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002448, |
|
"loss": 4.5873, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00024539999999999995, |
|
"loss": 5.1268, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00024599999999999996, |
|
"loss": 4.3666, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002466, |
|
"loss": 4.9047, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002472, |
|
"loss": 4.7815, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00024779999999999995, |
|
"loss": 4.2988, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00024839999999999997, |
|
"loss": 4.8659, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.000249, |
|
"loss": 4.5983, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00024959999999999994, |
|
"loss": 4.5694, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00025019999999999996, |
|
"loss": 5.0518, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00025079999999999997, |
|
"loss": 4.6937, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002514, |
|
"loss": 4.7808, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00025199999999999995, |
|
"loss": 4.4995, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00025259999999999996, |
|
"loss": 5.3755, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002532, |
|
"loss": 4.5319, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002538, |
|
"loss": 4.3798, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00025439999999999995, |
|
"loss": 4.449, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00025499999999999996, |
|
"loss": 5.896, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002556, |
|
"loss": 5.1909, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002562, |
|
"loss": 4.8339, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00025679999999999995, |
|
"loss": 4.6261, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00025739999999999997, |
|
"loss": 4.5436, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.000258, |
|
"loss": 4.7635, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002586, |
|
"loss": 4.8065, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00025919999999999996, |
|
"loss": 4.8411, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00025979999999999997, |
|
"loss": 4.4299, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002604, |
|
"loss": 4.4418, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.000261, |
|
"loss": 4.808, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026159999999999996, |
|
"loss": 4.6162, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002622, |
|
"loss": 5.7287, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002628, |
|
"loss": 4.6771, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026339999999999995, |
|
"loss": 4.529, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026399999999999997, |
|
"loss": 4.6037, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002646, |
|
"loss": 4.5055, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002652, |
|
"loss": 5.2715, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00026579999999999996, |
|
"loss": 5.7413, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00026639999999999997, |
|
"loss": 5.028, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.000267, |
|
"loss": 4.8087, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002676, |
|
"loss": 4.8552, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00026819999999999996, |
|
"loss": 5.0898, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002688, |
|
"loss": 4.875, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002694, |
|
"loss": 4.9811, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00027, |
|
"loss": 4.7576, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00027059999999999996, |
|
"loss": 5.1758, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002712, |
|
"loss": 5.1518, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002718, |
|
"loss": 4.7805, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002724, |
|
"loss": 5.4872, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00027299999999999997, |
|
"loss": 4.6417, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002736, |
|
"loss": 4.643, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002742, |
|
"loss": 4.8394, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002748, |
|
"loss": 4.617, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00027539999999999997, |
|
"loss": 4.5665, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.000276, |
|
"loss": 4.9569, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002766, |
|
"loss": 4.7609, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002772, |
|
"loss": 4.5635, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002778, |
|
"loss": 4.691, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002784, |
|
"loss": 4.7105, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.000279, |
|
"loss": 4.5443, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00027959999999999997, |
|
"loss": 4.9041, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002802, |
|
"loss": 4.6443, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002808, |
|
"loss": 4.8053, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028139999999999996, |
|
"loss": 4.5651, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028199999999999997, |
|
"loss": 4.7821, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002826, |
|
"loss": 5.1903, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028319999999999994, |
|
"loss": 5.1694, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028379999999999996, |
|
"loss": 4.3456, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002844, |
|
"loss": 4.8221, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.000285, |
|
"loss": 4.6403, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028559999999999995, |
|
"loss": 4.5352, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028619999999999996, |
|
"loss": 4.5099, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002868, |
|
"loss": 4.5161, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00028739999999999994, |
|
"loss": 5.6125, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00028799999999999995, |
|
"loss": 4.8112, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00028859999999999997, |
|
"loss": 4.8629, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002892, |
|
"loss": 4.764, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00028979999999999994, |
|
"loss": 4.4091, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00029039999999999996, |
|
"loss": 4.6947, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00029099999999999997, |
|
"loss": 5.0368, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002916, |
|
"loss": 4.567, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00029219999999999995, |
|
"loss": 5.1349, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00029279999999999996, |
|
"loss": 4.7064, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002934, |
|
"loss": 5.4823, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.000294, |
|
"loss": 4.9341, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00029459999999999995, |
|
"loss": 5.279, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00029519999999999997, |
|
"loss": 4.8398, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002958, |
|
"loss": 4.7063, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002964, |
|
"loss": 4.7881, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00029699999999999996, |
|
"loss": 4.6494, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00029759999999999997, |
|
"loss": 4.833, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 5.194062232971191, |
|
"eval_runtime": 747.3505, |
|
"eval_samples_per_second": 3.535, |
|
"eval_steps_per_second": 0.296, |
|
"eval_wer": 1.9524395081316939, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002982, |
|
"loss": 4.9335, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002988, |
|
"loss": 4.9661, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00029939999999999996, |
|
"loss": 4.9686, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0003, |
|
"loss": 4.7973, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002995645863570392, |
|
"loss": 4.7865, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002991291727140783, |
|
"loss": 5.0596, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00029869375907111753, |
|
"loss": 4.8275, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00029825834542815676, |
|
"loss": 4.6202, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00029782293178519587, |
|
"loss": 4.6348, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002973875181422351, |
|
"loss": 5.3447, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00029695210449927426, |
|
"loss": 5.0884, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002965166908563135, |
|
"loss": 4.7513, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00029608127721335266, |
|
"loss": 4.877, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002956458635703918, |
|
"loss": 4.4649, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00029521044992743105, |
|
"loss": 4.5183, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002947750362844702, |
|
"loss": 5.2663, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002943396226415094, |
|
"loss": 5.9906, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002939042089985486, |
|
"loss": 4.666, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002934687953555878, |
|
"loss": 5.5156, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00029303338171262695, |
|
"loss": 4.5457, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00029259796806966617, |
|
"loss": 5.0324, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00029216255442670534, |
|
"loss": 4.7233, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002917271407837445, |
|
"loss": 5.6246, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00029129172714078373, |
|
"loss": 4.8244, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002908563134978229, |
|
"loss": 4.9059, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00029042089985486207, |
|
"loss": 4.4507, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002899854862119013, |
|
"loss": 5.0691, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00028955007256894046, |
|
"loss": 5.1142, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002891146589259796, |
|
"loss": 4.7393, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00028867924528301885, |
|
"loss": 4.8084, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.000288243831640058, |
|
"loss": 4.7766, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002878084179970972, |
|
"loss": 5.0283, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002873730043541364, |
|
"loss": 4.6411, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002869375907111756, |
|
"loss": 4.8474, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00028650217706821475, |
|
"loss": 4.5725, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00028606676342525397, |
|
"loss": 4.5082, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00028563134978229314, |
|
"loss": 4.8596, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002851959361393323, |
|
"loss": 5.065, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00028476052249637153, |
|
"loss": 4.7098, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002843251088534107, |
|
"loss": 5.1196, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002838896952104499, |
|
"loss": 4.8682, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002834542815674891, |
|
"loss": 5.5616, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00028301886792452826, |
|
"loss": 4.684, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002825834542815675, |
|
"loss": 5.0173, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00028214804063860665, |
|
"loss": 5.1818, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002817126269956458, |
|
"loss": 4.6626, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00028127721335268504, |
|
"loss": 4.9806, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002808417997097242, |
|
"loss": 5.3829, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002804063860667634, |
|
"loss": 4.9341, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002799709724238026, |
|
"loss": 6.3752, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002795355587808418, |
|
"loss": 4.9254, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00027910014513788094, |
|
"loss": 4.783, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00027866473149492017, |
|
"loss": 5.1896, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00027822931785195933, |
|
"loss": 4.8299, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002777939042089985, |
|
"loss": 4.9769, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002773584905660377, |
|
"loss": 4.8809, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002769230769230769, |
|
"loss": 4.5533, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00027648766328011606, |
|
"loss": 4.6023, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002760522496371553, |
|
"loss": 4.9475, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00027561683599419446, |
|
"loss": 4.9687, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002751814223512336, |
|
"loss": 4.69, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00027474600870827285, |
|
"loss": 4.7413, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.000274310595065312, |
|
"loss": 4.9996, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002738751814223512, |
|
"loss": 4.3674, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002734397677793904, |
|
"loss": 5.0086, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002730043541364296, |
|
"loss": 4.6329, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00027256894049346875, |
|
"loss": 4.7313, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00027213352685050797, |
|
"loss": 4.7225, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00027169811320754714, |
|
"loss": 4.6538, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00027126269956458636, |
|
"loss": 4.5962, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00027082728592162553, |
|
"loss": 4.6124, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002703918722786647, |
|
"loss": 5.0632, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002699564586357039, |
|
"loss": 4.5618, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002695210449927431, |
|
"loss": 4.836, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00026908563134978226, |
|
"loss": 4.9042, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002686502177068215, |
|
"loss": 4.8837, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026821480406386065, |
|
"loss": 5.2048, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002677793904208998, |
|
"loss": 4.6342, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026734397677793904, |
|
"loss": 4.6749, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002669085631349782, |
|
"loss": 4.7553, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002664731494920174, |
|
"loss": 4.8569, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002660377358490566, |
|
"loss": 4.7583, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026560232220609577, |
|
"loss": 4.7735, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026516690856313494, |
|
"loss": 4.561, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026473149492017416, |
|
"loss": 5.3574, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026429608127721333, |
|
"loss": 4.7699, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002638606676342525, |
|
"loss": 4.9943, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002634252539912917, |
|
"loss": 4.5543, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002629898403483309, |
|
"loss": 4.8902, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00026255442670537006, |
|
"loss": 4.8949, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002621190130624093, |
|
"loss": 4.6346, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00026168359941944845, |
|
"loss": 4.4538, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002612481857764876, |
|
"loss": 5.4038, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00026081277213352684, |
|
"loss": 4.6285, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.000260377358490566, |
|
"loss": 4.6779, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002599419448476052, |
|
"loss": 4.552, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002595065312046444, |
|
"loss": 4.6429, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002590711175616836, |
|
"loss": 5.7606, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00025863570391872274, |
|
"loss": 6.3581, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00025820029027576197, |
|
"loss": 6.8859, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00025776487663280113, |
|
"loss": 4.6724, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00025732946298984036, |
|
"loss": 4.9873, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002568940493468795, |
|
"loss": 5.0404, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002564586357039187, |
|
"loss": 4.778, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002560232220609579, |
|
"loss": 4.7111, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002555878084179971, |
|
"loss": 4.8009, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00025515239477503626, |
|
"loss": 4.6809, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002547169811320755, |
|
"loss": 4.8737, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00025428156748911465, |
|
"loss": 4.7996, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002538461538461538, |
|
"loss": 4.9332, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00025341074020319304, |
|
"loss": 4.8498, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002529753265602322, |
|
"loss": 4.7182, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002525399129172714, |
|
"loss": 6.0865, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002521044992743106, |
|
"loss": 4.4399, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00025166908563134977, |
|
"loss": 4.6551, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00025123367198838894, |
|
"loss": 4.9617, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00025079825834542816, |
|
"loss": 4.8646, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00025036284470246733, |
|
"loss": 5.0519, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002499274310595065, |
|
"loss": 4.8651, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002494920174165457, |
|
"loss": 4.9591, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002490566037735849, |
|
"loss": 4.3819, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00024862119013062406, |
|
"loss": 4.329, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002481857764876633, |
|
"loss": 5.9491, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00024775036284470245, |
|
"loss": 5.1706, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002473149492017416, |
|
"loss": 5.0565, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00024687953555878084, |
|
"loss": 5.0496, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00024644412191582, |
|
"loss": 4.7394, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002460087082728592, |
|
"loss": 4.4442, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002455732946298984, |
|
"loss": 4.8163, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00024513788098693757, |
|
"loss": 4.612, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002447024673439768, |
|
"loss": 4.9465, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00024426705370101596, |
|
"loss": 4.5676, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00024383164005805513, |
|
"loss": 4.994, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00024339622641509433, |
|
"loss": 4.6503, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002429608127721335, |
|
"loss": 4.8686, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002425253991291727, |
|
"loss": 4.9115, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002420899854862119, |
|
"loss": 4.5344, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00024165457184325106, |
|
"loss": 4.5513, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00024121915820029025, |
|
"loss": 4.5369, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00024078374455732945, |
|
"loss": 5.0302, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00024034833091436862, |
|
"loss": 4.6188, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002399129172714078, |
|
"loss": 5.3999, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.000239477503628447, |
|
"loss": 5.2607, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002390420899854862, |
|
"loss": 4.7053, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00023860667634252537, |
|
"loss": 4.7037, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00023817126269956457, |
|
"loss": 4.5346, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00023773584905660377, |
|
"loss": 4.3213, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00023730043541364293, |
|
"loss": 5.9242, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00023686502177068213, |
|
"loss": 5.0213, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00023642960812772133, |
|
"loss": 5.1531, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002359941944847605, |
|
"loss": 4.8283, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002355587808417997, |
|
"loss": 4.6261, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00023512336719883889, |
|
"loss": 4.6689, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00023468795355587805, |
|
"loss": 5.0152, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00023425253991291725, |
|
"loss": 4.5758, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00023381712626995645, |
|
"loss": 4.5943, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00023338171262699562, |
|
"loss": 4.6639, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002329462989840348, |
|
"loss": 4.8844, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.000232510885341074, |
|
"loss": 4.9718, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002320754716981132, |
|
"loss": 5.0065, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00023164005805515237, |
|
"loss": 4.713, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00023120464441219157, |
|
"loss": 4.4193, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00023076923076923076, |
|
"loss": 4.6658, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00023033381712626993, |
|
"loss": 4.5546, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00022989840348330913, |
|
"loss": 4.9011, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00022946298984034832, |
|
"loss": 4.826, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002290275761973875, |
|
"loss": 4.7478, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002285921625544267, |
|
"loss": 4.6145, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00022815674891146588, |
|
"loss": 5.2825, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00022772133526850505, |
|
"loss": 4.8788, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00022728592162554425, |
|
"loss": 4.7793, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00022685050798258345, |
|
"loss": 5.0238, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00022641509433962264, |
|
"loss": 4.7159, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002259796806966618, |
|
"loss": 4.7365, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.000225544267053701, |
|
"loss": 4.5953, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002251088534107402, |
|
"loss": 4.844, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00022467343976777937, |
|
"loss": 4.8889, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00022423802612481857, |
|
"loss": 4.7826, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00022380261248185776, |
|
"loss": 4.8066, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00022336719883889693, |
|
"loss": 4.8016, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00022293178519593613, |
|
"loss": 5.1085, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00022249637155297532, |
|
"loss": 4.293, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002220609579100145, |
|
"loss": 4.7588, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002216255442670537, |
|
"loss": 4.5345, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00022119013062409288, |
|
"loss": 4.9384, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00022075471698113205, |
|
"loss": 5.1311, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00022031930333817125, |
|
"loss": 4.5077, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00021988388969521044, |
|
"loss": 4.6148, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00021944847605224964, |
|
"loss": 4.7591, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002190130624092888, |
|
"loss": 5.0202, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.000218577648766328, |
|
"loss": 4.8682, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002181422351233672, |
|
"loss": 4.6642, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00021770682148040637, |
|
"loss": 4.8987, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00021727140783744556, |
|
"loss": 5.5827, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00021683599419448476, |
|
"loss": 4.66, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021640058055152393, |
|
"loss": 4.6692, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021596516690856313, |
|
"loss": 4.7535, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021552975326560232, |
|
"loss": 5.7668, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021509433962264146, |
|
"loss": 4.7838, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021465892597968069, |
|
"loss": 5.2674, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021422351233671988, |
|
"loss": 5.1239, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021378809869375908, |
|
"loss": 4.4763, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021335268505079825, |
|
"loss": 4.6522, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021291727140783744, |
|
"loss": 4.6981, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021248185776487664, |
|
"loss": 4.4445, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021204644412191578, |
|
"loss": 4.6095, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.000211611030478955, |
|
"loss": 4.5051, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002111756168359942, |
|
"loss": 4.9519, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021074020319303334, |
|
"loss": 4.5414, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021030478955007256, |
|
"loss": 4.7941, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00020986937590711176, |
|
"loss": 4.3142, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002094339622641509, |
|
"loss": 4.7656, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002089985486211901, |
|
"loss": 4.8588, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00020856313497822932, |
|
"loss": 4.4229, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00020812772133526846, |
|
"loss": 4.9272, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00020769230769230766, |
|
"loss": 4.5207, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00020725689404934685, |
|
"loss": 4.4965, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00020682148040638608, |
|
"loss": 4.8384, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00020638606676342522, |
|
"loss": 4.5704, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020595065312046441, |
|
"loss": 4.487, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020551523947750364, |
|
"loss": 4.4206, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020507982583454278, |
|
"loss": 4.3743, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020464441219158197, |
|
"loss": 5.2636, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020420899854862117, |
|
"loss": 4.4234, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020377358490566034, |
|
"loss": 4.6968, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020333817126269953, |
|
"loss": 5.4371, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020290275761973873, |
|
"loss": 4.4697, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002024673439767779, |
|
"loss": 4.819, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002020319303338171, |
|
"loss": 4.3262, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002015965166908563, |
|
"loss": 4.698, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00020116110304789546, |
|
"loss": 4.8936, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00020072568940493466, |
|
"loss": 5.0859, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00020029027576197385, |
|
"loss": 4.4665, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019985486211901305, |
|
"loss": 4.5697, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019941944847605222, |
|
"loss": 5.1158, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001989840348330914, |
|
"loss": 4.5841, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001985486211901306, |
|
"loss": 4.6718, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019811320754716978, |
|
"loss": 4.9392, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019767779390420897, |
|
"loss": 4.9908, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019724238026124817, |
|
"loss": 4.5999, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019680696661828734, |
|
"loss": 4.8487, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019637155297532653, |
|
"loss": 5.2399, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019593613933236573, |
|
"loss": 4.5827, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001955007256894049, |
|
"loss": 4.6606, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001950653120464441, |
|
"loss": 5.3387, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001946298984034833, |
|
"loss": 5.545, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019419448476052249, |
|
"loss": 5.0344, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019375907111756165, |
|
"loss": 5.9759, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019332365747460085, |
|
"loss": 4.5979, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019288824383164005, |
|
"loss": 4.7675, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019245283018867922, |
|
"loss": 4.6892, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001920174165457184, |
|
"loss": 4.8701, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001915820029027576, |
|
"loss": 4.6816, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019114658925979678, |
|
"loss": 4.3436, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019071117561683597, |
|
"loss": 4.7221, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00019027576197387517, |
|
"loss": 4.4863, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018984034833091434, |
|
"loss": 4.4107, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018940493468795353, |
|
"loss": 4.9736, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018896952104499273, |
|
"loss": 4.3843, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001885341074020319, |
|
"loss": 4.7796, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001880986937590711, |
|
"loss": 4.3747, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001876632801161103, |
|
"loss": 4.4827, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018722786647314948, |
|
"loss": 4.5165, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018679245283018865, |
|
"loss": 4.4969, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018635703918722785, |
|
"loss": 4.7664, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018592162554426704, |
|
"loss": 4.4828, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018548621190130621, |
|
"loss": 4.7631, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001850507982583454, |
|
"loss": 4.2864, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001846153846153846, |
|
"loss": 4.6127, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018417997097242377, |
|
"loss": 4.6952, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018374455732946297, |
|
"loss": 4.8167, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018330914368650217, |
|
"loss": 4.6103, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018287373004354133, |
|
"loss": 4.5139, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018243831640058053, |
|
"loss": 4.5599, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018200290275761973, |
|
"loss": 4.8973, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018156748911465892, |
|
"loss": 4.4198, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001811320754716981, |
|
"loss": 4.5592, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001806966618287373, |
|
"loss": 4.8103, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018026124818577648, |
|
"loss": 4.4989, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017982583454281565, |
|
"loss": 4.7375, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017939042089985485, |
|
"loss": 4.5348, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017895500725689404, |
|
"loss": 4.7231, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001785195936139332, |
|
"loss": 4.6088, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001780841799709724, |
|
"loss": 4.8997, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001776487663280116, |
|
"loss": 4.9077, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017721335268505077, |
|
"loss": 4.7907, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017677793904208997, |
|
"loss": 4.8741, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017634252539912916, |
|
"loss": 4.9389, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017590711175616833, |
|
"loss": 4.4176, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017547169811320753, |
|
"loss": 4.3583, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00017503628447024673, |
|
"loss": 4.8908, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00017460087082728592, |
|
"loss": 4.8744, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001741654571843251, |
|
"loss": 5.2567, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00017373004354136429, |
|
"loss": 4.4392, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00017329462989840348, |
|
"loss": 4.7618, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00017285921625544265, |
|
"loss": 4.2707, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00017242380261248185, |
|
"loss": 4.4303, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00017198838896952104, |
|
"loss": 4.8627, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001715529753265602, |
|
"loss": 5.733, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001711175616835994, |
|
"loss": 4.6037, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001706821480406386, |
|
"loss": 5.4275, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00017024673439767777, |
|
"loss": 4.9265, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016981132075471697, |
|
"loss": 4.5584, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016937590711175616, |
|
"loss": 4.7239, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016894049346879536, |
|
"loss": 4.3661, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016850507982583453, |
|
"loss": 4.5606, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016806966618287372, |
|
"loss": 4.9325, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016763425253991292, |
|
"loss": 4.6627, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001671988388969521, |
|
"loss": 4.933, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016676342525399128, |
|
"loss": 5.0272, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016632801161103048, |
|
"loss": 4.7472, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016589259796806965, |
|
"loss": 4.8959, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016545718432510884, |
|
"loss": 4.4994, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00016502177068214804, |
|
"loss": 5.1709, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001645863570391872, |
|
"loss": 4.6819, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001641509433962264, |
|
"loss": 4.7547, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001637155297532656, |
|
"loss": 4.5043, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00016328011611030477, |
|
"loss": 4.2965, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00016284470246734397, |
|
"loss": 4.3956, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00016240928882438316, |
|
"loss": 4.6692, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00016197387518142236, |
|
"loss": 4.7531, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00016153846153846153, |
|
"loss": 4.4669, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00016110304789550072, |
|
"loss": 4.6101, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00016066763425253992, |
|
"loss": 5.0766, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001602322206095791, |
|
"loss": 4.725, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00015979680696661828, |
|
"loss": 4.9691, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015936139332365748, |
|
"loss": 4.5378, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015892597968069665, |
|
"loss": 4.6755, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015849056603773584, |
|
"loss": 4.8338, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015805515239477504, |
|
"loss": 4.8006, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001576197387518142, |
|
"loss": 4.6393, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001571843251088534, |
|
"loss": 4.6717, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001567489114658926, |
|
"loss": 4.3868, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001563134978229318, |
|
"loss": 4.5879, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015587808417997096, |
|
"loss": 4.4318, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015544267053701016, |
|
"loss": 4.6803, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015500725689404936, |
|
"loss": 4.5354, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00015457184325108852, |
|
"loss": 4.3185, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015413642960812772, |
|
"loss": 4.5848, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015370101596516692, |
|
"loss": 4.4021, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015326560232220606, |
|
"loss": 4.5211, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015283018867924528, |
|
"loss": 5.1432, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015239477503628448, |
|
"loss": 5.2663, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015195936139332362, |
|
"loss": 5.1663, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015152394775036284, |
|
"loss": 4.4686, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015108853410740204, |
|
"loss": 4.3942, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015065312046444118, |
|
"loss": 4.5995, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00015021770682148038, |
|
"loss": 4.3086, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001497822931785196, |
|
"loss": 4.7466, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00014934687953555877, |
|
"loss": 4.8963, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014891146589259794, |
|
"loss": 5.0766, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014847605224963713, |
|
"loss": 4.5171, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014804063860667633, |
|
"loss": 4.7791, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014760522496371552, |
|
"loss": 4.4509, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001471698113207547, |
|
"loss": 4.5231, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001467343976777939, |
|
"loss": 4.6349, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014629898403483308, |
|
"loss": 4.4544, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014586357039187225, |
|
"loss": 4.3388, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014542815674891145, |
|
"loss": 4.5616, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00014499274310595064, |
|
"loss": 4.6136, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001445573294629898, |
|
"loss": 4.1963, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.000144121915820029, |
|
"loss": 4.5643, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001436865021770682, |
|
"loss": 4.4824, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00014325108853410737, |
|
"loss": 4.5173, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00014281567489114657, |
|
"loss": 4.4166, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00014238026124818577, |
|
"loss": 4.4031, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00014194484760522496, |
|
"loss": 4.2849, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00014150943396226413, |
|
"loss": 4.4519, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00014107402031930333, |
|
"loss": 4.8794, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00014063860667634252, |
|
"loss": 4.1686, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001402031930333817, |
|
"loss": 4.1047, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001397677793904209, |
|
"loss": 4.4521, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00013933236574746008, |
|
"loss": 4.7197, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00013889695210449925, |
|
"loss": 4.439, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013846153846153845, |
|
"loss": 4.6244, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013802612481857764, |
|
"loss": 4.2425, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001375907111756168, |
|
"loss": 4.508, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.000137155297532656, |
|
"loss": 4.7327, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001367198838896952, |
|
"loss": 4.3981, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013628447024673437, |
|
"loss": 4.5901, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013584905660377357, |
|
"loss": 4.3026, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013541364296081276, |
|
"loss": 4.745, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013497822931785196, |
|
"loss": 4.4287, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013454281567489113, |
|
"loss": 4.421, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013410740203193032, |
|
"loss": 4.5041, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013367198838896952, |
|
"loss": 4.484, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001332365747460087, |
|
"loss": 4.6316, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00013280116110304789, |
|
"loss": 4.5471, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00013236574746008708, |
|
"loss": 4.901, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00013193033381712625, |
|
"loss": 4.8479, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00013149492017416545, |
|
"loss": 5.0569, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00013105950653120464, |
|
"loss": 4.5231, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001306240928882438, |
|
"loss": 4.5042, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.000130188679245283, |
|
"loss": 4.9721, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001297532656023222, |
|
"loss": 4.4415, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00012931785195936137, |
|
"loss": 4.7974, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00012888243831640057, |
|
"loss": 4.4361, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00012844702467343976, |
|
"loss": 4.6191, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012801161103047896, |
|
"loss": 4.5298, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012757619738751813, |
|
"loss": 4.6714, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012714078374455732, |
|
"loss": 4.4432, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012670537010159652, |
|
"loss": 4.5082, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001262699564586357, |
|
"loss": 4.5454, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012583454281567488, |
|
"loss": 4.9197, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012539912917271408, |
|
"loss": 4.4218, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012496371552975325, |
|
"loss": 4.8506, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012452830188679244, |
|
"loss": 4.5414, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012409288824383164, |
|
"loss": 5.0392, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001236574746008708, |
|
"loss": 4.4776, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012322206095791, |
|
"loss": 4.8287, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001227866473149492, |
|
"loss": 4.3984, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001223512336719884, |
|
"loss": 4.3385, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012191582002902757, |
|
"loss": 4.3623, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012148040638606675, |
|
"loss": 4.3756, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012104499274310594, |
|
"loss": 4.7014, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012060957910014513, |
|
"loss": 4.699, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012017416545718431, |
|
"loss": 4.4153, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001197387518142235, |
|
"loss": 4.4669, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00011930333817126269, |
|
"loss": 4.7014, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00011886792452830188, |
|
"loss": 4.6546, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00011843251088534106, |
|
"loss": 4.3518, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011799709724238025, |
|
"loss": 4.7564, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011756168359941944, |
|
"loss": 4.3889, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011712626995645863, |
|
"loss": 4.7127, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011669085631349781, |
|
"loss": 4.354, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.000116255442670537, |
|
"loss": 4.3408, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011582002902757619, |
|
"loss": 4.6165, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011538461538461538, |
|
"loss": 4.4945, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011494920174165456, |
|
"loss": 4.3871, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011451378809869375, |
|
"loss": 4.2537, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011407837445573294, |
|
"loss": 4.7368, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011364296081277212, |
|
"loss": 4.7466, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00011320754716981132, |
|
"loss": 4.4367, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001127721335268505, |
|
"loss": 4.5984, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00011233671988388969, |
|
"loss": 4.6279, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00011190130624092888, |
|
"loss": 4.4899, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00011146589259796806, |
|
"loss": 4.5431, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00011103047895500725, |
|
"loss": 4.5151, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00011059506531204644, |
|
"loss": 4.4568, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00011015965166908562, |
|
"loss": 4.644, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00010972423802612482, |
|
"loss": 4.4773, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.000109288824383164, |
|
"loss": 4.5586, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00010885341074020318, |
|
"loss": 4.1504, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00010841799709724238, |
|
"loss": 4.3842, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00010798258345428156, |
|
"loss": 4.5864, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010754716981132073, |
|
"loss": 4.3185, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010711175616835994, |
|
"loss": 4.4693, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010667634252539912, |
|
"loss": 4.3239, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010624092888243832, |
|
"loss": 4.4073, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0001058055152394775, |
|
"loss": 4.4021, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010537010159651667, |
|
"loss": 4.2928, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010493468795355588, |
|
"loss": 4.7388, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010449927431059505, |
|
"loss": 4.2615, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010406386066763423, |
|
"loss": 4.1396, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010362844702467343, |
|
"loss": 4.36, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010319303338171261, |
|
"loss": 4.6646, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010275761973875182, |
|
"loss": 4.6852, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010232220609579099, |
|
"loss": 4.4552, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010188679245283017, |
|
"loss": 4.7696, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010145137880986937, |
|
"loss": 4.4537, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010101596516690855, |
|
"loss": 4.3338, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010058055152394773, |
|
"loss": 4.4076, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010014513788098693, |
|
"loss": 4.159, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.970972423802611e-05, |
|
"loss": 4.9478, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.92743105950653e-05, |
|
"loss": 4.5281, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.883889695210449e-05, |
|
"loss": 4.223, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.840348330914367e-05, |
|
"loss": 4.5062, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.796806966618286e-05, |
|
"loss": 4.4958, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.753265602322205e-05, |
|
"loss": 4.06, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.709724238026124e-05, |
|
"loss": 4.3861, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.666182873730043e-05, |
|
"loss": 4.263, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.622641509433961e-05, |
|
"loss": 4.2316, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.57910014513788e-05, |
|
"loss": 4.6462, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.535558780841799e-05, |
|
"loss": 4.3962, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.492017416545717e-05, |
|
"loss": 4.3858, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.448476052249636e-05, |
|
"loss": 4.4299, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.404934687953555e-05, |
|
"loss": 4.3167, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.361393323657474e-05, |
|
"loss": 4.3328, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.317851959361392e-05, |
|
"loss": 4.2086, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.274310595065311e-05, |
|
"loss": 4.6029, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.23076923076923e-05, |
|
"loss": 4.8252, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.187227866473149e-05, |
|
"loss": 4.7713, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.143686502177067e-05, |
|
"loss": 4.2025, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.100145137880986e-05, |
|
"loss": 4.5659, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.056603773584905e-05, |
|
"loss": 4.2216, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.013062409288824e-05, |
|
"loss": 4.3045, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.969521044992742e-05, |
|
"loss": 4.4816, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.92597968069666e-05, |
|
"loss": 4.3277, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.88243831640058e-05, |
|
"loss": 4.37, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.838896952104498e-05, |
|
"loss": 4.2816, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.795355587808417e-05, |
|
"loss": 4.5968, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.751814223512336e-05, |
|
"loss": 4.5067, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.708272859216254e-05, |
|
"loss": 4.1224, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.664731494920174e-05, |
|
"loss": 4.3888, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.621190130624092e-05, |
|
"loss": 4.4085, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.57764876632801e-05, |
|
"loss": 4.9815, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.53410740203193e-05, |
|
"loss": 4.5786, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.490566037735848e-05, |
|
"loss": 4.4196, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.447024673439768e-05, |
|
"loss": 4.5136, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.403483309143686e-05, |
|
"loss": 4.5883, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.580376625061035, |
|
"eval_runtime": 767.4671, |
|
"eval_samples_per_second": 3.442, |
|
"eval_steps_per_second": 0.288, |
|
"eval_wer": 1.9531535105117017, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.359941944847604e-05, |
|
"loss": 4.4247, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.316400580551524e-05, |
|
"loss": 4.3317, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.272859216255442e-05, |
|
"loss": 4.3181, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.22931785195936e-05, |
|
"loss": 4.6889, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.18577648766328e-05, |
|
"loss": 4.4723, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.142235123367198e-05, |
|
"loss": 4.2809, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.098693759071118e-05, |
|
"loss": 4.3144, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.055152394775036e-05, |
|
"loss": 4.2816, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.011611030478954e-05, |
|
"loss": 4.317, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.968069666182874e-05, |
|
"loss": 4.316, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.924528301886792e-05, |
|
"loss": 4.2896, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.88098693759071e-05, |
|
"loss": 4.3558, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.83744557329463e-05, |
|
"loss": 4.2193, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.793904208998548e-05, |
|
"loss": 4.2468, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.750362844702468e-05, |
|
"loss": 4.342, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.706821480406386e-05, |
|
"loss": 4.5863, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.663280116110303e-05, |
|
"loss": 4.2557, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.619738751814224e-05, |
|
"loss": 4.4134, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.576197387518142e-05, |
|
"loss": 4.5682, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.532656023222059e-05, |
|
"loss": 4.3113, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.48911465892598e-05, |
|
"loss": 4.5479, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.445573294629897e-05, |
|
"loss": 4.4953, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.402031930333816e-05, |
|
"loss": 4.8161, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.358490566037735e-05, |
|
"loss": 4.127, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.314949201741654e-05, |
|
"loss": 4.2629, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.271407837445572e-05, |
|
"loss": 4.204, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.22786647314949e-05, |
|
"loss": 4.3665, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.18432510885341e-05, |
|
"loss": 4.2541, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.140783744557328e-05, |
|
"loss": 4.4344, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.097242380261248e-05, |
|
"loss": 4.192, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.053701015965166e-05, |
|
"loss": 4.418, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.010159651669085e-05, |
|
"loss": 4.4248, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.966618287373004e-05, |
|
"loss": 4.8127, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.923076923076922e-05, |
|
"loss": 4.0747, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.87953555878084e-05, |
|
"loss": 4.1781, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.83599419448476e-05, |
|
"loss": 4.1634, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.792452830188678e-05, |
|
"loss": 4.5865, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.748911465892598e-05, |
|
"loss": 4.5169, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.705370101596516e-05, |
|
"loss": 4.3504, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.661828737300434e-05, |
|
"loss": 4.2245, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.618287373004354e-05, |
|
"loss": 4.4095, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.574746008708272e-05, |
|
"loss": 4.3194, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.53120464441219e-05, |
|
"loss": 4.3948, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.48766328011611e-05, |
|
"loss": 4.3705, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.444121915820028e-05, |
|
"loss": 4.7656, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.400580551523948e-05, |
|
"loss": 4.2077, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.357039187227866e-05, |
|
"loss": 4.268, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.313497822931784e-05, |
|
"loss": 4.4685, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.269956458635704e-05, |
|
"loss": 4.9569, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.226415094339622e-05, |
|
"loss": 4.5919, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.18287373004354e-05, |
|
"loss": 4.4872, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.13933236574746e-05, |
|
"loss": 4.219, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.095791001451378e-05, |
|
"loss": 4.2008, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.052249637155297e-05, |
|
"loss": 4.6235, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.0087082728592154e-05, |
|
"loss": 4.5256, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.965166908563134e-05, |
|
"loss": 4.4614, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.921625544267053e-05, |
|
"loss": 4.4202, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.878084179970972e-05, |
|
"loss": 4.1262, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.8345428156748904e-05, |
|
"loss": 4.4001, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.791001451378809e-05, |
|
"loss": 4.4339, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.747460087082728e-05, |
|
"loss": 4.4371, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.703918722786647e-05, |
|
"loss": 4.2648, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.660377358490566e-05, |
|
"loss": 4.6402, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.616835994194484e-05, |
|
"loss": 4.252, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.573294629898403e-05, |
|
"loss": 4.1555, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.529753265602322e-05, |
|
"loss": 4.2044, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.486211901306241e-05, |
|
"loss": 4.278, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.442670537010159e-05, |
|
"loss": 4.2826, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.399129172714078e-05, |
|
"loss": 4.2409, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.355587808417997e-05, |
|
"loss": 4.4354, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.312046444121916e-05, |
|
"loss": 4.2282, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.2685050798258335e-05, |
|
"loss": 4.5258, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.2249637155297524e-05, |
|
"loss": 3.9681, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.181422351233671e-05, |
|
"loss": 4.4695, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.137880986937591e-05, |
|
"loss": 4.4431, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.0943396226415085e-05, |
|
"loss": 4.6197, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.0507982583454274e-05, |
|
"loss": 4.5793, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.007256894049346e-05, |
|
"loss": 4.4406, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.963715529753265e-05, |
|
"loss": 4.2787, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.9201741654571834e-05, |
|
"loss": 4.106, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.8766328011611024e-05, |
|
"loss": 4.434, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.833091436865021e-05, |
|
"loss": 4.4248, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.78955007256894e-05, |
|
"loss": 4.7218, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.7460087082728584e-05, |
|
"loss": 4.2686, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.702467343976777e-05, |
|
"loss": 4.2947, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.658925979680696e-05, |
|
"loss": 4.2582, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.615384615384615e-05, |
|
"loss": 4.2652, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.5718432510885334e-05, |
|
"loss": 4.8073, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.528301886792452e-05, |
|
"loss": 4.2283, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.484760522496371e-05, |
|
"loss": 4.3468, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.44121915820029e-05, |
|
"loss": 4.2619, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.397677793904208e-05, |
|
"loss": 4.1893, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.354136429608127e-05, |
|
"loss": 4.2702, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.310595065312046e-05, |
|
"loss": 4.3322, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.267053701015965e-05, |
|
"loss": 4.3116, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.223512336719884e-05, |
|
"loss": 4.6978, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.179970972423802e-05, |
|
"loss": 4.4064, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.136429608127721e-05, |
|
"loss": 5.3322, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.09288824383164e-05, |
|
"loss": 4.75, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.049346879535559e-05, |
|
"loss": 4.291, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.005805515239477e-05, |
|
"loss": 4.9697, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.962264150943396e-05, |
|
"loss": 4.2999, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.918722786647315e-05, |
|
"loss": 4.236, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.875181422351234e-05, |
|
"loss": 4.1279, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.8316400580551515e-05, |
|
"loss": 4.295, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.788098693759071e-05, |
|
"loss": 4.0693, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.74455732946299e-05, |
|
"loss": 4.7203, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.701015965166908e-05, |
|
"loss": 4.3843, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.657474600870827e-05, |
|
"loss": 4.3651, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.613933236574745e-05, |
|
"loss": 4.1765, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.570391872278664e-05, |
|
"loss": 4.3239, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.526850507982583e-05, |
|
"loss": 4.728, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.483309143686502e-05, |
|
"loss": 4.3438, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.43976777939042e-05, |
|
"loss": 3.9835, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.396226415094339e-05, |
|
"loss": 4.4246, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.352685050798258e-05, |
|
"loss": 4.4157, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.309143686502177e-05, |
|
"loss": 4.4774, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.265602322206095e-05, |
|
"loss": 4.1395, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.222060957910014e-05, |
|
"loss": 3.9259, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.178519593613933e-05, |
|
"loss": 4.4353, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.134978229317852e-05, |
|
"loss": 4.4326, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.09143686502177e-05, |
|
"loss": 4.5233, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.047895500725689e-05, |
|
"loss": 4.1491, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.0043541364296077e-05, |
|
"loss": 4.3489, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.9608127721335266e-05, |
|
"loss": 4.705, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.9172714078374452e-05, |
|
"loss": 4.2625, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.873730043541364e-05, |
|
"loss": 4.344, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 4.274, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.7866473149492016e-05, |
|
"loss": 4.3003, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.7431059506531205e-05, |
|
"loss": 4.5258, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.699564586357039e-05, |
|
"loss": 4.0453, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.656023222060958e-05, |
|
"loss": 4.1659, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.6124818577648762e-05, |
|
"loss": 4.3705, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.5689404934687955e-05, |
|
"loss": 4.1862, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.5253991291727137e-05, |
|
"loss": 4.496, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.4818577648766326e-05, |
|
"loss": 4.1733, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.4383164005805512e-05, |
|
"loss": 4.204, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.39477503628447e-05, |
|
"loss": 4.274, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.3512336719883887e-05, |
|
"loss": 4.1716, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.3076923076923076e-05, |
|
"loss": 4.2, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.264150943396226e-05, |
|
"loss": 4.4644, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.220609579100145e-05, |
|
"loss": 4.2474, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.1770682148040636e-05, |
|
"loss": 4.4789, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.1335268505079825e-05, |
|
"loss": 4.5033, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.089985486211901e-05, |
|
"loss": 4.1848, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.04644412191582e-05, |
|
"loss": 4.5938, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.0029027576197386e-05, |
|
"loss": 5.8271, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9593613933236575e-05, |
|
"loss": 4.3477, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9158200290275757e-05, |
|
"loss": 4.6769, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.872278664731495e-05, |
|
"loss": 4.2925, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.8287373004354136e-05, |
|
"loss": 4.2285, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.785195936139332e-05, |
|
"loss": 4.0426, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.741654571843251e-05, |
|
"loss": 4.1833, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6981132075471696e-05, |
|
"loss": 4.0901, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6545718432510885e-05, |
|
"loss": 4.4645, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.611030478955007e-05, |
|
"loss": 4.3001, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.567489114658926e-05, |
|
"loss": 4.4954, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5239477503628446e-05, |
|
"loss": 4.2257, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4804063860667633e-05, |
|
"loss": 4.0976, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.436865021770682e-05, |
|
"loss": 4.3462, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3933236574746008e-05, |
|
"loss": 4.327, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3497822931785195e-05, |
|
"loss": 4.5508, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3062409288824381e-05, |
|
"loss": 4.4825, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2626995645863568e-05, |
|
"loss": 4.1438, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2191582002902756e-05, |
|
"loss": 4.3982, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1756168359941943e-05, |
|
"loss": 4.4645, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.132075471698113e-05, |
|
"loss": 4.077, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0885341074020318e-05, |
|
"loss": 4.1337, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0449927431059506e-05, |
|
"loss": 4.2341, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0014513788098693e-05, |
|
"loss": 4.3717, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.579100145137879e-06, |
|
"loss": 4.2809, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.143686502177068e-06, |
|
"loss": 4.0888, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.708272859216255e-06, |
|
"loss": 4.2783, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.272859216255443e-06, |
|
"loss": 4.2921, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.83744557329463e-06, |
|
"loss": 4.0271, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.4020319303338166e-06, |
|
"loss": 4.2022, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.966618287373004e-06, |
|
"loss": 4.2548, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.5312046444121905e-06, |
|
"loss": 4.6275, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.095791001451378e-06, |
|
"loss": 4.0099, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.660377358490565e-06, |
|
"loss": 4.3623, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.224963715529753e-06, |
|
"loss": 4.3703, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.789550072568939e-06, |
|
"loss": 4.3294, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.354136429608128e-06, |
|
"loss": 4.2065, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.918722786647315e-06, |
|
"loss": 4.3322, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.483309143686502e-06, |
|
"loss": 4.2888, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.047895500725689e-06, |
|
"loss": 4.372, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.6124818577648764e-06, |
|
"loss": 4.1643, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.177068214804064e-06, |
|
"loss": 4.3376, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.741654571843251e-06, |
|
"loss": 4.4054, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1189, |
|
"total_flos": 0.0, |
|
"train_loss": 4.616779040648618, |
|
"train_runtime": 6389.1881, |
|
"train_samples_per_second": 4.467, |
|
"train_steps_per_second": 0.186 |
|
} |
|
], |
|
"max_steps": 1189, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|