|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 1784, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.8987, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.0275, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.1495, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6e-07, |
|
"loss": 4.6853, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2e-06, |
|
"loss": 4.7796, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.8181, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.4e-06, |
|
"loss": 4.668, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.9999999999999997e-06, |
|
"loss": 4.8098, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-06, |
|
"loss": 5.0043, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2e-06, |
|
"loss": 4.6714, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8e-06, |
|
"loss": 4.7061, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.399999999999999e-06, |
|
"loss": 4.6284, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.999999999999999e-06, |
|
"loss": 4.7973, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.599999999999999e-06, |
|
"loss": 4.4247, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.5414, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.799999999999998e-06, |
|
"loss": 4.6507, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.4e-06, |
|
"loss": 4.6245, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.999999999999999e-06, |
|
"loss": 4.4725, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.6e-06, |
|
"loss": 4.4363, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.7024, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0799999999999998e-05, |
|
"loss": 4.3845, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.14e-05, |
|
"loss": 4.7751, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1999999999999999e-05, |
|
"loss": 4.5633, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.26e-05, |
|
"loss": 4.315, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3199999999999997e-05, |
|
"loss": 4.4592, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3799999999999998e-05, |
|
"loss": 4.584, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.3579, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4999999999999999e-05, |
|
"loss": 4.0938, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5599999999999996e-05, |
|
"loss": 4.4154, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6199999999999997e-05, |
|
"loss": 4.3189, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.68e-05, |
|
"loss": 4.2552, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.5623, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.7999999999999997e-05, |
|
"loss": 4.2836, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8599999999999998e-05, |
|
"loss": 4.1511, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.92e-05, |
|
"loss": 4.4044, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.98e-05, |
|
"loss": 4.6801, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.3039, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.4422, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.1599999999999996e-05, |
|
"loss": 4.5188, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.2199999999999998e-05, |
|
"loss": 4.5726, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.28e-05, |
|
"loss": 4.3135, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.34e-05, |
|
"loss": 4.1679, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.3999999999999997e-05, |
|
"loss": 4.4289, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 4.2429, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.52e-05, |
|
"loss": 4.5192, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.5799999999999997e-05, |
|
"loss": 4.5163, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.6399999999999995e-05, |
|
"loss": 4.5619, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.6999999999999996e-05, |
|
"loss": 4.8743, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.7599999999999997e-05, |
|
"loss": 4.8881, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.8063, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.2264, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.4088, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 4.3993, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.2467, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.119999999999999e-05, |
|
"loss": 4.3273, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.1799999999999994e-05, |
|
"loss": 4.419, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.2399999999999995e-05, |
|
"loss": 4.3186, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.2999999999999996e-05, |
|
"loss": 4.2985, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.36e-05, |
|
"loss": 4.3233, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.42e-05, |
|
"loss": 4.2305, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.3408, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.539999999999999e-05, |
|
"loss": 4.2246, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.5999999999999994e-05, |
|
"loss": 4.601, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.6599999999999995e-05, |
|
"loss": 4.0831, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.7199999999999996e-05, |
|
"loss": 4.2891, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.78e-05, |
|
"loss": 4.4117, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.84e-05, |
|
"loss": 4.3885, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.9e-05, |
|
"loss": 4.4074, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.96e-05, |
|
"loss": 4.3227, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.2538, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.3241, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.0814, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.3634, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.259999999999999e-05, |
|
"loss": 4.173, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.319999999999999e-05, |
|
"loss": 4.6653, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.3799999999999994e-05, |
|
"loss": 4.4373, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.4399999999999995e-05, |
|
"loss": 4.253, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 4.1852, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.56e-05, |
|
"loss": 4.2115, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.62e-05, |
|
"loss": 4.541, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.68e-05, |
|
"loss": 4.1658, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7399999999999993e-05, |
|
"loss": 4.3241, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7999999999999994e-05, |
|
"loss": 4.1587, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.8599999999999995e-05, |
|
"loss": 4.3266, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9199999999999997e-05, |
|
"loss": 4.2094, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.98e-05, |
|
"loss": 4.1966, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.04e-05, |
|
"loss": 4.5955, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.1e-05, |
|
"loss": 4.1396, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.1599999999999994e-05, |
|
"loss": 4.5032, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.2199999999999995e-05, |
|
"loss": 4.0923, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.279999999999999e-05, |
|
"loss": 4.4655, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.339999999999999e-05, |
|
"loss": 4.1286, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.399999999999999e-05, |
|
"loss": 3.9898, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.459999999999999e-05, |
|
"loss": 4.4139, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.519999999999999e-05, |
|
"loss": 4.6238, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.5799999999999994e-05, |
|
"loss": 4.8141, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.5297, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.4874, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.7581, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.7329, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.4531, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.94e-05, |
|
"loss": 4.3324, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 4.3949, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 4.2711, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.12e-05, |
|
"loss": 4.294, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.2988, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.239999999999999e-05, |
|
"loss": 4.3157, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.299999999999999e-05, |
|
"loss": 4.4341, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.359999999999999e-05, |
|
"loss": 4.3025, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.419999999999999e-05, |
|
"loss": 4.3431, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.479999999999999e-05, |
|
"loss": 4.2753, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.539999999999999e-05, |
|
"loss": 4.1674, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.599999999999999e-05, |
|
"loss": 4.3238, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.659999999999999e-05, |
|
"loss": 4.0734, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.72e-05, |
|
"loss": 4.0799, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.78e-05, |
|
"loss": 4.1991, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.84e-05, |
|
"loss": 4.4108, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.2132, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.3399, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.4068, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.079999999999999e-05, |
|
"loss": 4.4561, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.139999999999999e-05, |
|
"loss": 4.3531, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.199999999999999e-05, |
|
"loss": 4.3516, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.259999999999999e-05, |
|
"loss": 4.3174, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.319999999999999e-05, |
|
"loss": 4.0512, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.379999999999999e-05, |
|
"loss": 4.5767, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.439999999999999e-05, |
|
"loss": 4.345, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.3119, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.56e-05, |
|
"loss": 4.2563, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.62e-05, |
|
"loss": 4.4141, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.68e-05, |
|
"loss": 4.6154, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.74e-05, |
|
"loss": 4.4004, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.8e-05, |
|
"loss": 4.3886, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.86e-05, |
|
"loss": 4.3843, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.92e-05, |
|
"loss": 4.2755, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.5099, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.2549, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.4867, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.6586, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.3022, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.5577, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.6748, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.3893, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.459999999999998e-05, |
|
"loss": 4.5241, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.519999999999998e-05, |
|
"loss": 5.097, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.579999999999998e-05, |
|
"loss": 4.6905, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.639999999999999e-05, |
|
"loss": 4.7197, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.699999999999999e-05, |
|
"loss": 4.7719, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.759999999999999e-05, |
|
"loss": 4.8233, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.819999999999999e-05, |
|
"loss": 4.228, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.879999999999999e-05, |
|
"loss": 4.1789, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.939999999999999e-05, |
|
"loss": 4.607, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 4.152, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.059999999999999e-05, |
|
"loss": 4.6215, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.12e-05, |
|
"loss": 4.4727, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.18e-05, |
|
"loss": 4.7536, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.24e-05, |
|
"loss": 4.5145, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.3e-05, |
|
"loss": 4.4542, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.36e-05, |
|
"loss": 4.4696, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.419999999999999e-05, |
|
"loss": 4.9688, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.479999999999999e-05, |
|
"loss": 4.4859, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.539999999999999e-05, |
|
"loss": 4.741, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.599999999999999e-05, |
|
"loss": 4.6048, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.659999999999999e-05, |
|
"loss": 4.5725, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.719999999999999e-05, |
|
"loss": 4.4576, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.779999999999999e-05, |
|
"loss": 4.5854, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.839999999999999e-05, |
|
"loss": 4.331, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.9e-05, |
|
"loss": 4.1595, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.96e-05, |
|
"loss": 4.2601, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001002, |
|
"loss": 4.3851, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001008, |
|
"loss": 4.5368, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001014, |
|
"loss": 5.5086, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.000102, |
|
"loss": 4.3675, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001026, |
|
"loss": 4.5266, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010319999999999999, |
|
"loss": 4.7312, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010379999999999999, |
|
"loss": 4.3899, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010439999999999999, |
|
"loss": 4.4549, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010499999999999999, |
|
"loss": 4.3311, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010559999999999998, |
|
"loss": 4.1532, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010619999999999998, |
|
"loss": 4.6106, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010679999999999998, |
|
"loss": 4.5489, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010739999999999998, |
|
"loss": 4.3362, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010799999999999998, |
|
"loss": 4.7618, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010859999999999998, |
|
"loss": 4.6088, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010919999999999998, |
|
"loss": 4.6477, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00010979999999999999, |
|
"loss": 4.4798, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00011039999999999999, |
|
"loss": 4.4516, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00011099999999999999, |
|
"loss": 4.6847, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00011159999999999999, |
|
"loss": 4.5905, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00011219999999999999, |
|
"loss": 4.4689, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00011279999999999999, |
|
"loss": 4.6269, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00011339999999999999, |
|
"loss": 4.4034, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 4.6338, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001146, |
|
"loss": 4.4695, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001152, |
|
"loss": 4.7839, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001158, |
|
"loss": 5.239, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001164, |
|
"loss": 5.0441, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.000117, |
|
"loss": 4.5253, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001176, |
|
"loss": 4.8081, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001182, |
|
"loss": 4.772, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001188, |
|
"loss": 4.6347, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001194, |
|
"loss": 4.4527, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 4.5706, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00012059999999999999, |
|
"loss": 4.591, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00012119999999999999, |
|
"loss": 4.4277, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012179999999999999, |
|
"loss": 4.8654, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001224, |
|
"loss": 4.6914, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012299999999999998, |
|
"loss": 4.6337, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001236, |
|
"loss": 4.6592, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012419999999999998, |
|
"loss": 4.6443, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012479999999999997, |
|
"loss": 4.5077, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012539999999999999, |
|
"loss": 4.8719, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012599999999999997, |
|
"loss": 4.6238, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001266, |
|
"loss": 4.316, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012719999999999997, |
|
"loss": 5.0334, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001278, |
|
"loss": 4.1594, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012839999999999998, |
|
"loss": 4.616, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.000129, |
|
"loss": 4.3934, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00012959999999999998, |
|
"loss": 4.7009, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001302, |
|
"loss": 5.0774, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00013079999999999998, |
|
"loss": 4.5736, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001314, |
|
"loss": 4.8936, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00013199999999999998, |
|
"loss": 4.3545, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001326, |
|
"loss": 4.4807, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00013319999999999999, |
|
"loss": 4.67, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001338, |
|
"loss": 4.7084, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001344, |
|
"loss": 4.6215, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.000135, |
|
"loss": 4.3859, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001356, |
|
"loss": 4.4294, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001362, |
|
"loss": 4.6407, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001368, |
|
"loss": 4.4371, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001374, |
|
"loss": 4.647, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.000138, |
|
"loss": 4.6561, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001386, |
|
"loss": 4.6821, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001392, |
|
"loss": 4.4471, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00013979999999999998, |
|
"loss": 4.9001, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001404, |
|
"loss": 4.5673, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00014099999999999998, |
|
"loss": 4.8226, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00014159999999999997, |
|
"loss": 4.6531, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001422, |
|
"loss": 4.6618, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00014279999999999997, |
|
"loss": 4.6119, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001434, |
|
"loss": 4.4451, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00014399999999999998, |
|
"loss": 6.0049, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001446, |
|
"loss": 5.359, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00014519999999999998, |
|
"loss": 4.7498, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001458, |
|
"loss": 4.7383, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00014639999999999998, |
|
"loss": 5.379, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.000147, |
|
"loss": 5.0694, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00014759999999999998, |
|
"loss": 4.5803, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001482, |
|
"loss": 5.1926, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00014879999999999998, |
|
"loss": 4.5571, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001494, |
|
"loss": 5.7546, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00015, |
|
"loss": 4.9364, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00015059999999999997, |
|
"loss": 4.7004, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001512, |
|
"loss": 5.0182, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00015179999999999998, |
|
"loss": 4.7166, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001524, |
|
"loss": 4.5697, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00015299999999999998, |
|
"loss": 5.2672, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001536, |
|
"loss": 4.8173, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00015419999999999998, |
|
"loss": 4.329, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001548, |
|
"loss": 4.9393, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00015539999999999998, |
|
"loss": 4.7213, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000156, |
|
"loss": 4.9763, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00015659999999999998, |
|
"loss": 4.6642, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001572, |
|
"loss": 4.8533, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001578, |
|
"loss": 4.3969, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001584, |
|
"loss": 4.7146, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000159, |
|
"loss": 4.6821, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001596, |
|
"loss": 4.788, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001602, |
|
"loss": 4.2517, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001608, |
|
"loss": 4.5119, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001614, |
|
"loss": 4.8131, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000162, |
|
"loss": 4.6499, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001626, |
|
"loss": 5.3647, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001632, |
|
"loss": 4.6299, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001638, |
|
"loss": 5.581, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001644, |
|
"loss": 4.5606, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.000165, |
|
"loss": 4.7095, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001656, |
|
"loss": 4.3864, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001662, |
|
"loss": 4.5545, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001668, |
|
"loss": 5.0875, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001674, |
|
"loss": 5.2147, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.000168, |
|
"loss": 4.8058, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001686, |
|
"loss": 4.4864, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00016919999999999997, |
|
"loss": 4.9827, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00016979999999999998, |
|
"loss": 4.7352, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017039999999999997, |
|
"loss": 4.7162, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017099999999999998, |
|
"loss": 4.8099, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017159999999999997, |
|
"loss": 4.5027, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017219999999999998, |
|
"loss": 5.066, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017279999999999997, |
|
"loss": 4.5522, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017339999999999996, |
|
"loss": 5.6078, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017399999999999997, |
|
"loss": 4.7149, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00017459999999999996, |
|
"loss": 4.8358, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017519999999999998, |
|
"loss": 4.6981, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017579999999999996, |
|
"loss": 7.6051, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017639999999999998, |
|
"loss": 5.2167, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017699999999999997, |
|
"loss": 5.1343, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017759999999999998, |
|
"loss": 4.683, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017819999999999997, |
|
"loss": 5.2883, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017879999999999998, |
|
"loss": 5.2891, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017939999999999997, |
|
"loss": 5.4221, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 4.9102, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018059999999999997, |
|
"loss": 4.5784, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018119999999999999, |
|
"loss": 5.5962, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018179999999999997, |
|
"loss": 4.7911, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001824, |
|
"loss": 4.673, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018299999999999998, |
|
"loss": 6.4645, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001836, |
|
"loss": 4.8379, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018419999999999998, |
|
"loss": 5.2851, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001848, |
|
"loss": 4.7361, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018539999999999998, |
|
"loss": 5.0055, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.000186, |
|
"loss": 4.7797, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018659999999999998, |
|
"loss": 4.9896, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001872, |
|
"loss": 4.7066, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018779999999999998, |
|
"loss": 4.5315, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018839999999999997, |
|
"loss": 4.5292, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018899999999999999, |
|
"loss": 4.5059, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018959999999999997, |
|
"loss": 5.0415, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001902, |
|
"loss": 5.0882, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019079999999999998, |
|
"loss": 5.3482, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001914, |
|
"loss": 5.0749, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019199999999999998, |
|
"loss": 4.9192, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001926, |
|
"loss": 4.5151, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019319999999999998, |
|
"loss": 4.8147, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001938, |
|
"loss": 4.8692, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019439999999999998, |
|
"loss": 5.3922, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.000195, |
|
"loss": 5.1391, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019559999999999998, |
|
"loss": 4.8038, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001962, |
|
"loss": 5.0244, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00019679999999999999, |
|
"loss": 4.8476, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001974, |
|
"loss": 5.0352, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.000198, |
|
"loss": 5.9123, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001986, |
|
"loss": 5.1061, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001992, |
|
"loss": 5.058, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001998, |
|
"loss": 5.1996, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002004, |
|
"loss": 4.6912, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.000201, |
|
"loss": 4.772, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002016, |
|
"loss": 4.6496, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002022, |
|
"loss": 4.9221, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002028, |
|
"loss": 4.6008, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00020339999999999998, |
|
"loss": 4.7957, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.000204, |
|
"loss": 4.9876, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00020459999999999999, |
|
"loss": 4.3914, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002052, |
|
"loss": 5.8961, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002058, |
|
"loss": 4.5013, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00020639999999999998, |
|
"loss": 5.2834, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00020699999999999996, |
|
"loss": 5.1501, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00020759999999999998, |
|
"loss": 5.3556, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00020819999999999996, |
|
"loss": 5.5409, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00020879999999999998, |
|
"loss": 4.9931, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00020939999999999997, |
|
"loss": 5.1523, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00020999999999999998, |
|
"loss": 4.9868, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021059999999999997, |
|
"loss": 4.6934, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021119999999999996, |
|
"loss": 4.5688, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021179999999999997, |
|
"loss": 4.8821, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021239999999999996, |
|
"loss": 5.2561, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021299999999999997, |
|
"loss": 4.9584, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021359999999999996, |
|
"loss": 5.2996, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021419999999999998, |
|
"loss": 4.6552, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021479999999999996, |
|
"loss": 4.9308, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021539999999999998, |
|
"loss": 4.4198, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021599999999999996, |
|
"loss": 5.0918, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021659999999999998, |
|
"loss": 5.079, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00021719999999999997, |
|
"loss": 5.2672, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00021779999999999998, |
|
"loss": 5.1869, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00021839999999999997, |
|
"loss": 4.7009, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00021899999999999998, |
|
"loss": 4.6603, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00021959999999999997, |
|
"loss": 5.1922, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022019999999999999, |
|
"loss": 4.9497, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022079999999999997, |
|
"loss": 4.5666, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0002214, |
|
"loss": 5.4305, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022199999999999998, |
|
"loss": 4.7896, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0002226, |
|
"loss": 5.4258, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022319999999999998, |
|
"loss": 4.4568, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0002238, |
|
"loss": 4.7348, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022439999999999998, |
|
"loss": 4.6844, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.000225, |
|
"loss": 4.5267, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022559999999999998, |
|
"loss": 4.7165, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022619999999999997, |
|
"loss": 5.2593, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022679999999999998, |
|
"loss": 4.3269, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022739999999999997, |
|
"loss": 4.7356, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00022799999999999999, |
|
"loss": 4.5989, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00022859999999999997, |
|
"loss": 5.4692, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002292, |
|
"loss": 5.0367, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00022979999999999997, |
|
"loss": 4.7516, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002304, |
|
"loss": 5.9294, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00023099999999999998, |
|
"loss": 4.5957, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002316, |
|
"loss": 4.7088, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00023219999999999998, |
|
"loss": 5.8763, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002328, |
|
"loss": 4.4181, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00023339999999999998, |
|
"loss": 5.2992, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.000234, |
|
"loss": 5.0853, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00023459999999999998, |
|
"loss": 4.912, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002352, |
|
"loss": 5.1396, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00023579999999999999, |
|
"loss": 5.4524, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002364, |
|
"loss": 5.438, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.000237, |
|
"loss": 7.1359, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002376, |
|
"loss": 10.3571, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002382, |
|
"loss": 5.2293, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002388, |
|
"loss": 4.9652, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002394, |
|
"loss": 4.883, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 4.8143, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002406, |
|
"loss": 5.0558, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024119999999999998, |
|
"loss": 4.6178, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002418, |
|
"loss": 5.0112, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024239999999999998, |
|
"loss": 4.9684, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.000243, |
|
"loss": 5.8897, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024359999999999999, |
|
"loss": 5.3244, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024419999999999997, |
|
"loss": 5.1039, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002448, |
|
"loss": 5.0326, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024539999999999995, |
|
"loss": 5.2395, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024599999999999996, |
|
"loss": 5.1967, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002466, |
|
"loss": 5.2712, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002472, |
|
"loss": 5.3194, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024779999999999995, |
|
"loss": 4.7941, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024839999999999997, |
|
"loss": 5.6131, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.000249, |
|
"loss": 4.8572, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00024959999999999994, |
|
"loss": 4.7795, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025019999999999996, |
|
"loss": 4.9344, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025079999999999997, |
|
"loss": 4.6473, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002514, |
|
"loss": 4.8754, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025199999999999995, |
|
"loss": 4.9176, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025259999999999996, |
|
"loss": 5.5264, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002532, |
|
"loss": 4.7957, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002538, |
|
"loss": 4.7374, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025439999999999995, |
|
"loss": 5.3779, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025499999999999996, |
|
"loss": 6.1729, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002556, |
|
"loss": 4.7028, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002562, |
|
"loss": 4.7961, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025679999999999995, |
|
"loss": 5.1186, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025739999999999997, |
|
"loss": 4.7296, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000258, |
|
"loss": 4.7054, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002586, |
|
"loss": 4.7615, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025919999999999996, |
|
"loss": 4.8103, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00025979999999999997, |
|
"loss": 4.4192, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002604, |
|
"loss": 4.9114, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.000261, |
|
"loss": 4.9089, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00026159999999999996, |
|
"loss": 4.5249, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002622, |
|
"loss": 4.8355, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002628, |
|
"loss": 5.442, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00026339999999999995, |
|
"loss": 6.2262, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00026399999999999997, |
|
"loss": 4.7432, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002646, |
|
"loss": 5.8577, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002652, |
|
"loss": 4.7698, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00026579999999999996, |
|
"loss": 7.5969, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00026639999999999997, |
|
"loss": 5.9009, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.000267, |
|
"loss": 5.2798, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002676, |
|
"loss": 6.5801, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00026819999999999996, |
|
"loss": 4.91, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002688, |
|
"loss": 8.2594, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002694, |
|
"loss": 5.2204, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00027, |
|
"loss": 6.2953, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00027059999999999996, |
|
"loss": 5.6008, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002712, |
|
"loss": 4.9293, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002718, |
|
"loss": 4.8321, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002724, |
|
"loss": 4.6382, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00027299999999999997, |
|
"loss": 5.6477, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002736, |
|
"loss": 4.5272, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002742, |
|
"loss": 5.0871, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002748, |
|
"loss": 5.669, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00027539999999999997, |
|
"loss": 4.8767, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.000276, |
|
"loss": 5.1443, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002766, |
|
"loss": 4.8547, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002772, |
|
"loss": 4.6289, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002778, |
|
"loss": 5.0307, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002784, |
|
"loss": 4.8807, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.000279, |
|
"loss": 5.6747, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00027959999999999997, |
|
"loss": 4.6908, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002802, |
|
"loss": 4.9543, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002808, |
|
"loss": 5.3293, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00028139999999999996, |
|
"loss": 4.7712, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028199999999999997, |
|
"loss": 4.7345, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002826, |
|
"loss": 5.2528, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028319999999999994, |
|
"loss": 5.1536, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028379999999999996, |
|
"loss": 5.2428, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002844, |
|
"loss": 5.6762, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.000285, |
|
"loss": 4.6718, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028559999999999995, |
|
"loss": 5.4173, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028619999999999996, |
|
"loss": 5.4427, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002868, |
|
"loss": 4.7836, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028739999999999994, |
|
"loss": 6.2387, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028799999999999995, |
|
"loss": 5.3981, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028859999999999997, |
|
"loss": 4.9966, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002892, |
|
"loss": 5.5966, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00028979999999999994, |
|
"loss": 5.8143, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00029039999999999996, |
|
"loss": 5.4283, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00029099999999999997, |
|
"loss": 5.1403, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002916, |
|
"loss": 5.253, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00029219999999999995, |
|
"loss": 5.1972, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029279999999999996, |
|
"loss": 5.3527, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002934, |
|
"loss": 5.4327, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.000294, |
|
"loss": 4.9888, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029459999999999995, |
|
"loss": 6.1206, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029519999999999997, |
|
"loss": 5.533, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002958, |
|
"loss": 4.9783, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002964, |
|
"loss": 4.6209, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029699999999999996, |
|
"loss": 5.9688, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029759999999999997, |
|
"loss": 5.1729, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002982, |
|
"loss": 4.5471, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 5.581608295440674, |
|
"eval_runtime": 900.7705, |
|
"eval_samples_per_second": 2.933, |
|
"eval_steps_per_second": 0.367, |
|
"eval_wer": 1.9376041253470846, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002988, |
|
"loss": 5.933, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029939999999999996, |
|
"loss": 5.1431, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003, |
|
"loss": 5.2846, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002997663551401869, |
|
"loss": 4.9981, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029953271028037383, |
|
"loss": 5.6008, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029929906542056073, |
|
"loss": 5.0449, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00029906542056074763, |
|
"loss": 4.8612, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002988317757009346, |
|
"loss": 4.804, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002985981308411215, |
|
"loss": 5.7526, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002983644859813084, |
|
"loss": 5.2698, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002981308411214953, |
|
"loss": 4.894, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029789719626168224, |
|
"loss": 5.1537, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029766355140186914, |
|
"loss": 5.4263, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029742990654205604, |
|
"loss": 4.4888, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029719626168224294, |
|
"loss": 4.6535, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002969626168224299, |
|
"loss": 5.9521, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002967289719626168, |
|
"loss": 4.8879, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002964953271028037, |
|
"loss": 5.3439, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029626168224299065, |
|
"loss": 4.9201, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029602803738317755, |
|
"loss": 4.9309, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002957943925233645, |
|
"loss": 4.6364, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029556074766355135, |
|
"loss": 4.8853, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002953271028037383, |
|
"loss": 5.3484, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002950934579439252, |
|
"loss": 4.9527, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029485981308411215, |
|
"loss": 6.347, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00029462616822429905, |
|
"loss": 5.2694, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029439252336448596, |
|
"loss": 5.2538, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029415887850467286, |
|
"loss": 4.54, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002939252336448598, |
|
"loss": 5.0422, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002936915887850467, |
|
"loss": 6.0477, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002934579439252336, |
|
"loss": 4.5828, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029322429906542056, |
|
"loss": 4.5899, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029299065420560746, |
|
"loss": 5.068, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029275700934579436, |
|
"loss": 4.5531, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029252336448598126, |
|
"loss": 5.2529, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002922897196261682, |
|
"loss": 4.7926, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002920560747663551, |
|
"loss": 4.8982, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.000291822429906542, |
|
"loss": 5.4155, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002915887850467289, |
|
"loss": 5.0782, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029135514018691587, |
|
"loss": 5.2239, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002911214953271028, |
|
"loss": 5.1793, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002908878504672897, |
|
"loss": 4.9732, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029065420560747663, |
|
"loss": 5.5249, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029042056074766353, |
|
"loss": 4.8778, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002901869158878505, |
|
"loss": 5.0025, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028995327102803733, |
|
"loss": 6.4381, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002897196261682243, |
|
"loss": 5.6839, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002894859813084112, |
|
"loss": 5.835, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028925233644859814, |
|
"loss": 5.7503, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028901869158878504, |
|
"loss": 4.9375, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028878504672897194, |
|
"loss": 5.2374, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028855140186915884, |
|
"loss": 5.7827, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002883177570093458, |
|
"loss": 5.9669, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002880841121495327, |
|
"loss": 4.8548, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002878504672897196, |
|
"loss": 4.9032, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028761682242990655, |
|
"loss": 5.6123, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028738317757009345, |
|
"loss": 5.4261, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028714953271028035, |
|
"loss": 5.0758, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00028691588785046725, |
|
"loss": 4.534, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002866822429906542, |
|
"loss": 4.7744, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002864485981308411, |
|
"loss": 5.2177, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.000286214953271028, |
|
"loss": 6.0423, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002859813084112149, |
|
"loss": 4.5625, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028574766355140185, |
|
"loss": 5.242, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028551401869158875, |
|
"loss": 5.9316, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028528037383177565, |
|
"loss": 4.6705, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002850467289719626, |
|
"loss": 4.8982, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002848130841121495, |
|
"loss": 5.2801, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028457943925233646, |
|
"loss": 4.8128, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002843457943925233, |
|
"loss": 5.0719, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028411214953271026, |
|
"loss": 5.2668, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028387850467289716, |
|
"loss": 5.0351, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002836448598130841, |
|
"loss": 5.8277, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.000283411214953271, |
|
"loss": 4.7576, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002831775700934579, |
|
"loss": 4.9129, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002829439252336448, |
|
"loss": 5.08, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028271028037383177, |
|
"loss": 4.928, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028247663551401867, |
|
"loss": 4.8651, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00028224299065420557, |
|
"loss": 4.9735, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002820093457943925, |
|
"loss": 5.5964, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002817757009345794, |
|
"loss": 4.6658, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002815420560747663, |
|
"loss": 5.1068, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002813084112149532, |
|
"loss": 4.9917, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002810747663551402, |
|
"loss": 5.1377, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002808411214953271, |
|
"loss": 6.1242, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.000280607476635514, |
|
"loss": 4.6956, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002803738317757009, |
|
"loss": 5.6856, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00028014018691588784, |
|
"loss": 4.8435, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00027990654205607474, |
|
"loss": 5.375, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00027967289719626164, |
|
"loss": 5.1088, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002794392523364486, |
|
"loss": 5.0008, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002792056074766355, |
|
"loss": 5.0166, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00027897196261682244, |
|
"loss": 5.1512, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002787383177570093, |
|
"loss": 4.9898, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00027850467289719624, |
|
"loss": 4.821, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00027827102803738314, |
|
"loss": 4.9955, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002780373831775701, |
|
"loss": 7.4815, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000277803738317757, |
|
"loss": 6.8128, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002775700934579439, |
|
"loss": 5.2313, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002773364485981308, |
|
"loss": 5.3007, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027710280373831775, |
|
"loss": 4.9507, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027686915887850465, |
|
"loss": 5.0799, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027663551401869155, |
|
"loss": 4.7422, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002764018691588785, |
|
"loss": 4.7912, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002761682242990654, |
|
"loss": 4.9524, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002759345794392523, |
|
"loss": 4.6615, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002757009345794392, |
|
"loss": 5.3252, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027546728971962616, |
|
"loss": 5.1719, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027523364485981306, |
|
"loss": 6.0747, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027499999999999996, |
|
"loss": 4.9708, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027476635514018686, |
|
"loss": 5.6665, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002745327102803738, |
|
"loss": 4.6714, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002742990654205607, |
|
"loss": 4.8929, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002740654205607476, |
|
"loss": 4.9788, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00027383177570093457, |
|
"loss": 5.2246, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00027359813084112147, |
|
"loss": 4.7412, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002733644859813084, |
|
"loss": 5.2217, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00027313084112149527, |
|
"loss": 4.6002, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002728971962616822, |
|
"loss": 4.6342, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002726635514018691, |
|
"loss": 5.5986, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002724299065420561, |
|
"loss": 4.9237, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.000272196261682243, |
|
"loss": 5.1799, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002719626168224299, |
|
"loss": 5.0955, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002717289719626168, |
|
"loss": 5.6877, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00027149532710280373, |
|
"loss": 5.0829, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00027126168224299063, |
|
"loss": 4.7393, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00027102803738317753, |
|
"loss": 4.7369, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002707943925233645, |
|
"loss": 5.3679, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002705607476635514, |
|
"loss": 6.4361, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002703271028037383, |
|
"loss": 4.7886, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002700934579439252, |
|
"loss": 4.9049, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00026985981308411214, |
|
"loss": 5.1811, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00026962616822429904, |
|
"loss": 4.7211, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026939252336448594, |
|
"loss": 4.9558, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026915887850467284, |
|
"loss": 4.738, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002689252336448598, |
|
"loss": 4.8307, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002686915887850467, |
|
"loss": 4.6711, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002684579439252336, |
|
"loss": 4.4691, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026822429906542055, |
|
"loss": 4.8473, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026799065420560745, |
|
"loss": 6.5801, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002677570093457944, |
|
"loss": 6.0626, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026752336448598125, |
|
"loss": 5.4207, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002672897196261682, |
|
"loss": 4.8942, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002670560747663551, |
|
"loss": 4.9021, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026682242990654206, |
|
"loss": 4.7175, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026658878504672896, |
|
"loss": 4.7728, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026635514018691586, |
|
"loss": 5.7365, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00026612149532710276, |
|
"loss": 6.7046, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002658878504672897, |
|
"loss": 5.2938, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002656542056074766, |
|
"loss": 5.2759, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002654205607476635, |
|
"loss": 6.0766, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026518691588785047, |
|
"loss": 5.2083, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026495327102803737, |
|
"loss": 5.1199, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026471962616822427, |
|
"loss": 4.78, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026448598130841117, |
|
"loss": 5.0556, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002642523364485981, |
|
"loss": 4.7203, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.000264018691588785, |
|
"loss": 4.7321, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002637850467289719, |
|
"loss": 6.6024, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002635514018691588, |
|
"loss": 5.0324, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002633177570093458, |
|
"loss": 4.9096, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002630841121495327, |
|
"loss": 5.4461, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002628504672897196, |
|
"loss": 4.8457, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026261682242990653, |
|
"loss": 4.8117, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026238317757009343, |
|
"loss": 5.1213, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002621495327102804, |
|
"loss": 5.0574, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00026191588785046723, |
|
"loss": 5.1551, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002616822429906542, |
|
"loss": 4.7985, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002614485981308411, |
|
"loss": 5.4033, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00026121495327102804, |
|
"loss": 4.612, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00026098130841121494, |
|
"loss": 4.9412, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00026074766355140184, |
|
"loss": 4.8942, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00026051401869158874, |
|
"loss": 5.4892, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002602803738317757, |
|
"loss": 4.7117, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002600467289719626, |
|
"loss": 4.6364, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002598130841121495, |
|
"loss": 4.8958, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00025957943925233645, |
|
"loss": 4.7709, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00025934579439252335, |
|
"loss": 4.5826, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00025911214953271025, |
|
"loss": 4.6646, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00025887850467289715, |
|
"loss": 4.8181, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002586448598130841, |
|
"loss": 5.2085, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.000258411214953271, |
|
"loss": 4.9686, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002581775700934579, |
|
"loss": 4.7409, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002579439252336448, |
|
"loss": 5.1911, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00025771028037383176, |
|
"loss": 5.0611, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00025747663551401866, |
|
"loss": 5.1322, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00025724299065420556, |
|
"loss": 4.9188, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002570093457943925, |
|
"loss": 4.7283, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002567757009345794, |
|
"loss": 4.7484, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00025654205607476637, |
|
"loss": 4.927, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002563084112149532, |
|
"loss": 4.749, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00025607476635514017, |
|
"loss": 4.5379, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00025584112149532707, |
|
"loss": 5.0992, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.000255607476635514, |
|
"loss": 5.0194, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002553738317757009, |
|
"loss": 4.7863, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002551401869158878, |
|
"loss": 4.9485, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002549065420560747, |
|
"loss": 4.9278, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002546728971962617, |
|
"loss": 4.4361, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002544392523364486, |
|
"loss": 4.9515, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002542056074766355, |
|
"loss": 5.1618, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00025397196261682243, |
|
"loss": 4.8154, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00025373831775700933, |
|
"loss": 4.929, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00025350467289719623, |
|
"loss": 4.6813, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00025327102803738313, |
|
"loss": 4.897, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002530373831775701, |
|
"loss": 5.4599, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.000252803738317757, |
|
"loss": 5.6072, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002525700934579439, |
|
"loss": 5.3548, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002523364485981308, |
|
"loss": 5.0716, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00025210280373831774, |
|
"loss": 5.1283, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00025186915887850464, |
|
"loss": 4.6845, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00025163551401869154, |
|
"loss": 5.046, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002514018691588785, |
|
"loss": 5.2705, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002511682242990654, |
|
"loss": 4.8732, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00025093457943925235, |
|
"loss": 4.9474, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002507009345794392, |
|
"loss": 5.0853, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00025046728971962615, |
|
"loss": 5.0843, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00025023364485981305, |
|
"loss": 4.5215, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00025, |
|
"loss": 4.7131, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002497663551401869, |
|
"loss": 5.1258, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002495327102803738, |
|
"loss": 4.6722, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002492990654205607, |
|
"loss": 4.6464, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00024906542056074766, |
|
"loss": 4.9733, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00024883177570093456, |
|
"loss": 4.9711, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024859813084112146, |
|
"loss": 4.7633, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002483644859813084, |
|
"loss": 5.4998, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002481308411214953, |
|
"loss": 4.9303, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002478971962616822, |
|
"loss": 5.1392, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002476635514018691, |
|
"loss": 4.3532, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024742990654205607, |
|
"loss": 5.0674, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024719626168224297, |
|
"loss": 4.6014, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024696261682242987, |
|
"loss": 4.7569, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024672897196261677, |
|
"loss": 4.6892, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002464953271028037, |
|
"loss": 4.6065, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002462616822429906, |
|
"loss": 6.3043, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002460280373831775, |
|
"loss": 4.7317, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002457943925233645, |
|
"loss": 4.8874, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002455607476635514, |
|
"loss": 4.7145, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024532710280373833, |
|
"loss": 4.6753, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002450934579439252, |
|
"loss": 4.6223, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024485981308411213, |
|
"loss": 5.2338, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00024462616822429903, |
|
"loss": 4.7033, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.000244392523364486, |
|
"loss": 5.5446, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002441588785046729, |
|
"loss": 4.7961, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002439252336448598, |
|
"loss": 5.8382, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024369158878504671, |
|
"loss": 5.0871, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024345794392523364, |
|
"loss": 6.1311, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024322429906542051, |
|
"loss": 5.2221, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024299065420560744, |
|
"loss": 4.9638, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024275700934579437, |
|
"loss": 5.1487, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002425233644859813, |
|
"loss": 4.647, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002422897196261682, |
|
"loss": 5.0244, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024205607476635512, |
|
"loss": 5.1103, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024182242990654205, |
|
"loss": 4.8882, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024158878504672895, |
|
"loss": 4.8263, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024135514018691585, |
|
"loss": 4.8644, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024112149532710278, |
|
"loss": 4.9268, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002408878504672897, |
|
"loss": 4.9957, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00024065420560747663, |
|
"loss": 5.0076, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002404205607476635, |
|
"loss": 5.1385, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00024018691588785043, |
|
"loss": 4.8003, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023995327102803736, |
|
"loss": 4.7509, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023971962616822429, |
|
"loss": 5.3028, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023948598130841119, |
|
"loss": 4.7583, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002392523364485981, |
|
"loss": 4.7361, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023901869158878504, |
|
"loss": 4.4698, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023878504672897194, |
|
"loss": 4.5434, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023855140186915884, |
|
"loss": 4.7767, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023831775700934577, |
|
"loss": 5.0727, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002380841121495327, |
|
"loss": 4.951, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023785046728971962, |
|
"loss": 5.0511, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002376168224299065, |
|
"loss": 4.7732, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023738317757009342, |
|
"loss": 5.1477, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023714953271028035, |
|
"loss": 4.6437, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023691588785046728, |
|
"loss": 4.7255, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023668224299065418, |
|
"loss": 4.9444, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002364485981308411, |
|
"loss": 4.581, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023621495327102803, |
|
"loss": 4.5809, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023598130841121493, |
|
"loss": 4.8372, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023574766355140183, |
|
"loss": 5.2178, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023551401869158876, |
|
"loss": 4.8039, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023528037383177569, |
|
"loss": 4.8313, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002350467289719626, |
|
"loss": 5.0778, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023481308411214949, |
|
"loss": 4.9378, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002345794392523364, |
|
"loss": 4.7003, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023434579439252334, |
|
"loss": 4.5985, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023411214953271027, |
|
"loss": 4.6101, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023387850467289717, |
|
"loss": 5.1548, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002336448598130841, |
|
"loss": 5.0424, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023341121495327102, |
|
"loss": 5.1229, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023317757009345792, |
|
"loss": 5.0449, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023294392523364482, |
|
"loss": 5.2281, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023271028037383175, |
|
"loss": 4.9368, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023247663551401868, |
|
"loss": 4.7728, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002322429906542056, |
|
"loss": 5.0216, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00023200934579439248, |
|
"loss": 4.8232, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002317757009345794, |
|
"loss": 5.4885, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00023154205607476633, |
|
"loss": 5.4549, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00023130841121495326, |
|
"loss": 5.1114, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00023107476635514016, |
|
"loss": 5.061, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00023084112149532709, |
|
"loss": 4.9769, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.000230607476635514, |
|
"loss": 4.9299, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002303738317757009, |
|
"loss": 5.6714, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002301401869158878, |
|
"loss": 5.0961, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00022990654205607474, |
|
"loss": 5.1892, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00022967289719626167, |
|
"loss": 4.7421, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002294392523364486, |
|
"loss": 4.9102, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00022920560747663547, |
|
"loss": 4.9065, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002289719626168224, |
|
"loss": 4.704, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00022873831775700932, |
|
"loss": 4.584, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00022850467289719625, |
|
"loss": 4.818, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00022827102803738315, |
|
"loss": 4.9744, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00022803738317757008, |
|
"loss": 5.1315, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.000227803738317757, |
|
"loss": 5.0164, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002275700934579439, |
|
"loss": 4.8607, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002273364485981308, |
|
"loss": 4.5535, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022710280373831773, |
|
"loss": 4.6626, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022686915887850466, |
|
"loss": 4.6536, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022663551401869158, |
|
"loss": 5.2567, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022640186915887846, |
|
"loss": 4.7812, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022616822429906539, |
|
"loss": 4.666, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002259345794392523, |
|
"loss": 4.7216, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022570093457943924, |
|
"loss": 4.8084, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022546728971962614, |
|
"loss": 4.6977, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022523364485981307, |
|
"loss": 5.9018, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.000225, |
|
"loss": 4.8642, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002247663551401869, |
|
"loss": 5.131, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022453271028037382, |
|
"loss": 4.7648, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022429906542056072, |
|
"loss": 5.4213, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022406542056074765, |
|
"loss": 4.8285, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00022383177570093458, |
|
"loss": 5.0544, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002235981308411215, |
|
"loss": 4.6938, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022336448598130838, |
|
"loss": 5.0009, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002231308411214953, |
|
"loss": 5.077, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022289719626168223, |
|
"loss": 4.9157, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022266355140186916, |
|
"loss": 4.471, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022242990654205606, |
|
"loss": 4.7155, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022219626168224296, |
|
"loss": 4.7203, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022196261682242988, |
|
"loss": 4.9135, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002217289719626168, |
|
"loss": 4.9905, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002214953271028037, |
|
"loss": 5.0028, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022126168224299064, |
|
"loss": 5.0444, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022102803738317757, |
|
"loss": 5.6934, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002207943925233645, |
|
"loss": 4.6798, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022056074766355137, |
|
"loss": 4.5773, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002203271028037383, |
|
"loss": 4.8751, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00022009345794392522, |
|
"loss": 4.4002, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00021985981308411215, |
|
"loss": 4.5314, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00021962616822429905, |
|
"loss": 4.4511, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021939252336448595, |
|
"loss": 5.081, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021915887850467288, |
|
"loss": 4.9182, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002189252336448598, |
|
"loss": 6.4733, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002186915887850467, |
|
"loss": 4.9352, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021845794392523363, |
|
"loss": 4.7182, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021822429906542056, |
|
"loss": 4.9097, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021799065420560748, |
|
"loss": 4.8379, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021775700934579436, |
|
"loss": 4.8323, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021752336448598128, |
|
"loss": 5.0515, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002172897196261682, |
|
"loss": 4.6746, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021705607476635514, |
|
"loss": 4.9048, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021682242990654204, |
|
"loss": 5.1008, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021658878504672894, |
|
"loss": 5.2501, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021635514018691587, |
|
"loss": 4.8127, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002161214953271028, |
|
"loss": 5.1189, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002158878504672897, |
|
"loss": 5.127, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021565420560747662, |
|
"loss": 4.8071, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00021542056074766355, |
|
"loss": 5.7193, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021518691588785047, |
|
"loss": 4.803, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021495327102803735, |
|
"loss": 4.8942, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021471962616822427, |
|
"loss": 5.032, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002144859813084112, |
|
"loss": 4.7539, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021425233644859813, |
|
"loss": 4.6788, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021401869158878503, |
|
"loss": 4.6206, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021378504672897193, |
|
"loss": 5.1516, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021355140186915886, |
|
"loss": 4.9009, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021331775700934578, |
|
"loss": 4.7841, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021308411214953268, |
|
"loss": 4.9146, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002128504672897196, |
|
"loss": 5.0889, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021261682242990654, |
|
"loss": 5.0272, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021238317757009347, |
|
"loss": 4.6575, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021214953271028034, |
|
"loss": 4.6819, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021191588785046727, |
|
"loss": 4.7811, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002116822429906542, |
|
"loss": 5.0642, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021144859813084112, |
|
"loss": 4.5642, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021121495327102802, |
|
"loss": 5.1497, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021098130841121492, |
|
"loss": 4.8615, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021074766355140185, |
|
"loss": 4.9596, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021051401869158877, |
|
"loss": 5.1478, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021028037383177567, |
|
"loss": 5.4373, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002100467289719626, |
|
"loss": 4.5459, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020981308411214953, |
|
"loss": 5.1358, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020957943925233646, |
|
"loss": 5.0646, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020934579439252333, |
|
"loss": 4.6713, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020911214953271026, |
|
"loss": 4.8727, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020887850467289718, |
|
"loss": 5.5929, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002086448598130841, |
|
"loss": 4.7889, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.000208411214953271, |
|
"loss": 4.9456, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002081775700934579, |
|
"loss": 5.0594, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020794392523364484, |
|
"loss": 4.6649, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020771028037383176, |
|
"loss": 6.2458, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00020747663551401867, |
|
"loss": 6.1455, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002072429906542056, |
|
"loss": 8.3342, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020700934579439252, |
|
"loss": 4.8155, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020677570093457945, |
|
"loss": 4.9064, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020654205607476632, |
|
"loss": 5.468, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020630841121495325, |
|
"loss": 4.964, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020607476635514017, |
|
"loss": 5.0216, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002058411214953271, |
|
"loss": 5.0149, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.000205607476635514, |
|
"loss": 4.8836, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002053738317757009, |
|
"loss": 4.5883, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020514018691588783, |
|
"loss": 4.8891, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020490654205607476, |
|
"loss": 4.7208, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020467289719626166, |
|
"loss": 5.1443, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020443925233644858, |
|
"loss": 4.6899, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002042056074766355, |
|
"loss": 4.7846, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020397196261682244, |
|
"loss": 4.7023, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002037383177570093, |
|
"loss": 4.8883, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020350467289719624, |
|
"loss": 5.2402, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020327102803738316, |
|
"loss": 4.5024, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002030373831775701, |
|
"loss": 5.3522, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.000202803738317757, |
|
"loss": 4.784, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002025700934579439, |
|
"loss": 4.3737, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020233644859813082, |
|
"loss": 4.6135, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020210280373831775, |
|
"loss": 4.9948, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020186915887850465, |
|
"loss": 4.7271, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020163551401869157, |
|
"loss": 4.5752, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002014018691588785, |
|
"loss": 4.8277, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020116822429906543, |
|
"loss": 5.2676, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002009345794392523, |
|
"loss": 4.5385, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020070093457943923, |
|
"loss": 4.6389, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020046728971962616, |
|
"loss": 4.5535, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020023364485981308, |
|
"loss": 4.8505, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019999999999999998, |
|
"loss": 4.9484, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019976635514018688, |
|
"loss": 4.9759, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0001995327102803738, |
|
"loss": 4.6066, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019929906542056074, |
|
"loss": 4.8184, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019906542056074764, |
|
"loss": 5.2863, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019883177570093456, |
|
"loss": 4.8743, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001985981308411215, |
|
"loss": 4.8111, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019836448598130842, |
|
"loss": 4.46, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001981308411214953, |
|
"loss": 4.6367, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019789719626168222, |
|
"loss": 4.8193, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019766355140186915, |
|
"loss": 4.7381, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019742990654205607, |
|
"loss": 4.6991, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019719626168224297, |
|
"loss": 5.7858, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019696261682242987, |
|
"loss": 4.8903, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001967289719626168, |
|
"loss": 4.8371, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019649532710280373, |
|
"loss": 4.7936, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019626168224299063, |
|
"loss": 4.5183, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019602803738317755, |
|
"loss": 4.8386, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019579439252336448, |
|
"loss": 5.5206, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001955607476635514, |
|
"loss": 4.3584, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019532710280373828, |
|
"loss": 4.7839, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001950934579439252, |
|
"loss": 4.8174, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019485981308411214, |
|
"loss": 4.7915, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019462616822429906, |
|
"loss": 4.406, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019439252336448596, |
|
"loss": 4.7956, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019415887850467286, |
|
"loss": 4.4982, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001939252336448598, |
|
"loss": 4.892, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019369158878504672, |
|
"loss": 4.8088, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019345794392523362, |
|
"loss": 5.399, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019322429906542055, |
|
"loss": 4.9746, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019299065420560747, |
|
"loss": 4.5356, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001927570093457944, |
|
"loss": 4.7247, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019252336448598127, |
|
"loss": 5.0142, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001922897196261682, |
|
"loss": 4.6553, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019205607476635513, |
|
"loss": 4.5916, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019182242990654205, |
|
"loss": 4.5721, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019158878504672895, |
|
"loss": 5.2081, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019135514018691585, |
|
"loss": 4.624, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019112149532710278, |
|
"loss": 5.0746, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001908878504672897, |
|
"loss": 4.4659, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001906542056074766, |
|
"loss": 4.8608, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019042056074766354, |
|
"loss": 4.8085, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019018691588785046, |
|
"loss": 4.7054, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001899532710280374, |
|
"loss": 5.526, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018971962616822426, |
|
"loss": 4.6951, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001894859813084112, |
|
"loss": 5.1382, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018925233644859812, |
|
"loss": 5.1781, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018901869158878504, |
|
"loss": 4.6907, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018878504672897195, |
|
"loss": 4.7877, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018855140186915885, |
|
"loss": 4.8763, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018831775700934577, |
|
"loss": 4.3011, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001880841121495327, |
|
"loss": 4.3919, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001878504672897196, |
|
"loss": 4.9187, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018761682242990653, |
|
"loss": 4.909, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018738317757009345, |
|
"loss": 4.4464, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018714953271028038, |
|
"loss": 4.8126, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018691588785046725, |
|
"loss": 4.705, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018668224299065418, |
|
"loss": 4.6552, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001864485981308411, |
|
"loss": 4.9665, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018621495327102804, |
|
"loss": 4.5219, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018598130841121494, |
|
"loss": 4.9879, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018574766355140184, |
|
"loss": 4.9641, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018551401869158876, |
|
"loss": 5.1829, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001852803738317757, |
|
"loss": 5.1299, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001850467289719626, |
|
"loss": 4.7448, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018481308411214952, |
|
"loss": 4.4289, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018457943925233644, |
|
"loss": 4.7672, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018434579439252337, |
|
"loss": 5.3529, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018411214953271024, |
|
"loss": 4.8228, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018387850467289717, |
|
"loss": 4.6377, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 5.072754859924316, |
|
"eval_runtime": 903.6611, |
|
"eval_samples_per_second": 2.924, |
|
"eval_steps_per_second": 0.366, |
|
"eval_wer": 1.9177905593018643, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001836448598130841, |
|
"loss": 4.9653, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018341121495327103, |
|
"loss": 4.6313, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018317757009345793, |
|
"loss": 4.6301, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018294392523364483, |
|
"loss": 5.1457, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018271028037383175, |
|
"loss": 4.7742, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018247663551401868, |
|
"loss": 4.7146, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018224299065420558, |
|
"loss": 5.1547, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001820093457943925, |
|
"loss": 5.2403, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018177570093457944, |
|
"loss": 5.126, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018154205607476636, |
|
"loss": 4.5986, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018130841121495324, |
|
"loss": 4.8227, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018107476635514016, |
|
"loss": 4.6896, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001808411214953271, |
|
"loss": 4.7199, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018060747663551402, |
|
"loss": 5.0175, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018037383177570092, |
|
"loss": 5.6869, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018014018691588782, |
|
"loss": 4.8219, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017990654205607474, |
|
"loss": 4.7156, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017967289719626167, |
|
"loss": 4.7404, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017943925233644857, |
|
"loss": 5.3779, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001792056074766355, |
|
"loss": 5.596, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017897196261682243, |
|
"loss": 5.3479, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017873831775700935, |
|
"loss": 4.8651, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017850467289719623, |
|
"loss": 4.7978, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017827102803738315, |
|
"loss": 4.783, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017803738317757008, |
|
"loss": 4.9094, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.000177803738317757, |
|
"loss": 4.6664, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001775700934579439, |
|
"loss": 4.7607, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001773364485981308, |
|
"loss": 5.0478, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017710280373831773, |
|
"loss": 4.8521, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017686915887850466, |
|
"loss": 5.113, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017663551401869156, |
|
"loss": 4.6649, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001764018691588785, |
|
"loss": 4.8997, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017616822429906542, |
|
"loss": 4.8152, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017593457943925234, |
|
"loss": 4.3392, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017570093457943922, |
|
"loss": 4.5973, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017546728971962614, |
|
"loss": 4.8074, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017523364485981307, |
|
"loss": 4.4612, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.000175, |
|
"loss": 4.6995, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001747663551401869, |
|
"loss": 4.738, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001745327102803738, |
|
"loss": 5.6264, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017429906542056073, |
|
"loss": 5.0616, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017406542056074765, |
|
"loss": 4.74, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00017383177570093455, |
|
"loss": 5.2807, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017359813084112148, |
|
"loss": 5.0314, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001733644859813084, |
|
"loss": 5.445, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017313084112149533, |
|
"loss": 4.777, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001728971962616822, |
|
"loss": 4.6379, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017266355140186913, |
|
"loss": 4.6174, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017242990654205606, |
|
"loss": 4.9705, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.000172196261682243, |
|
"loss": 5.6703, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001719626168224299, |
|
"loss": 5.6907, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001717289719626168, |
|
"loss": 4.4903, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017149532710280372, |
|
"loss": 4.7383, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017126168224299064, |
|
"loss": 4.4566, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017102803738317754, |
|
"loss": 4.7452, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017079439252336447, |
|
"loss": 4.5495, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001705607476635514, |
|
"loss": 5.0378, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00017032710280373833, |
|
"loss": 4.6649, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001700934579439252, |
|
"loss": 5.0953, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00016985981308411213, |
|
"loss": 4.4979, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00016962616822429905, |
|
"loss": 4.7325, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016939252336448598, |
|
"loss": 4.6278, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016915887850467288, |
|
"loss": 5.0017, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016892523364485978, |
|
"loss": 4.364, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001686915887850467, |
|
"loss": 5.0188, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016845794392523363, |
|
"loss": 4.2547, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016822429906542053, |
|
"loss": 4.7862, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016799065420560746, |
|
"loss": 4.7286, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001677570093457944, |
|
"loss": 4.7978, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016752336448598132, |
|
"loss": 4.5335, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001672897196261682, |
|
"loss": 4.5555, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016705607476635512, |
|
"loss": 4.3638, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016682242990654204, |
|
"loss": 4.508, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016658878504672897, |
|
"loss": 5.2232, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016635514018691587, |
|
"loss": 4.7749, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016612149532710277, |
|
"loss": 5.182, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001658878504672897, |
|
"loss": 4.9489, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016565420560747662, |
|
"loss": 4.659, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00016542056074766352, |
|
"loss": 4.6653, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016518691588785045, |
|
"loss": 4.874, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016495327102803738, |
|
"loss": 4.9778, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001647196261682243, |
|
"loss": 4.8992, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016448598130841118, |
|
"loss": 4.3147, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001642523364485981, |
|
"loss": 4.7119, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016401869158878503, |
|
"loss": 4.7404, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016378504672897196, |
|
"loss": 4.8604, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016355140186915886, |
|
"loss": 4.8493, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016331775700934576, |
|
"loss": 5.0921, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001630841121495327, |
|
"loss": 4.937, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016285046728971962, |
|
"loss": 4.3971, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016261682242990652, |
|
"loss": 4.5391, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016238317757009344, |
|
"loss": 4.8539, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016214953271028037, |
|
"loss": 4.9911, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001619158878504673, |
|
"loss": 5.5408, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016168224299065417, |
|
"loss": 4.7559, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001614485981308411, |
|
"loss": 5.7429, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00016121495327102802, |
|
"loss": 5.2865, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00016098130841121495, |
|
"loss": 5.9881, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00016074766355140185, |
|
"loss": 4.863, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00016051401869158875, |
|
"loss": 5.001, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00016028037383177568, |
|
"loss": 4.4963, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001600467289719626, |
|
"loss": 4.7383, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001598130841121495, |
|
"loss": 4.771, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015957943925233643, |
|
"loss": 4.3538, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015934579439252336, |
|
"loss": 4.5339, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001591121495327103, |
|
"loss": 4.4357, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015887850467289716, |
|
"loss": 4.3471, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001586448598130841, |
|
"loss": 4.4512, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015841121495327101, |
|
"loss": 4.5906, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015817757009345794, |
|
"loss": 4.7666, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015794392523364484, |
|
"loss": 4.5706, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015771028037383174, |
|
"loss": 4.7768, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00015747663551401867, |
|
"loss": 4.7465, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001572429906542056, |
|
"loss": 5.0073, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001570093457943925, |
|
"loss": 5.1489, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015677570093457942, |
|
"loss": 4.6082, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015654205607476635, |
|
"loss": 4.7613, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015630841121495328, |
|
"loss": 4.6944, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015607476635514015, |
|
"loss": 4.5798, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015584112149532708, |
|
"loss": 4.564, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.000155607476635514, |
|
"loss": 4.6052, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015537383177570093, |
|
"loss": 4.5484, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015514018691588783, |
|
"loss": 4.5938, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015490654205607473, |
|
"loss": 4.4018, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015467289719626166, |
|
"loss": 5.1897, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001544392523364486, |
|
"loss": 4.3624, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001542056074766355, |
|
"loss": 4.3703, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015397196261682241, |
|
"loss": 5.0461, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015373831775700934, |
|
"loss": 4.3255, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015350467289719627, |
|
"loss": 5.0283, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015327102803738314, |
|
"loss": 4.823, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00015303738317757007, |
|
"loss": 4.5654, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.000152803738317757, |
|
"loss": 4.9188, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015257009345794392, |
|
"loss": 4.5866, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015233644859813082, |
|
"loss": 5.1037, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015210280373831772, |
|
"loss": 4.4932, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015186915887850465, |
|
"loss": 4.9721, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015163551401869158, |
|
"loss": 4.7306, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015140186915887848, |
|
"loss": 4.4082, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001511682242990654, |
|
"loss": 4.7153, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015093457943925233, |
|
"loss": 4.566, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015070093457943926, |
|
"loss": 4.9826, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015046728971962613, |
|
"loss": 5.618, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015023364485981306, |
|
"loss": 4.8602, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00015, |
|
"loss": 4.494, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00014976635514018691, |
|
"loss": 4.8555, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00014953271028037381, |
|
"loss": 4.8804, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00014929906542056074, |
|
"loss": 4.9153, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00014906542056074764, |
|
"loss": 5.914, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00014883177570093457, |
|
"loss": 4.6331, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014859813084112147, |
|
"loss": 4.8212, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001483644859813084, |
|
"loss": 5.0512, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014813084112149532, |
|
"loss": 4.592, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014789719626168225, |
|
"loss": 4.6896, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014766355140186915, |
|
"loss": 4.6735, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014742990654205608, |
|
"loss": 4.9311, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014719626168224298, |
|
"loss": 4.4262, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001469626168224299, |
|
"loss": 4.5413, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001467289719626168, |
|
"loss": 4.421, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014649532710280373, |
|
"loss": 4.5932, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014626168224299063, |
|
"loss": 4.6563, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014602803738317756, |
|
"loss": 4.6683, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014579439252336446, |
|
"loss": 4.7811, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001455607476635514, |
|
"loss": 4.3987, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014532710280373831, |
|
"loss": 4.4767, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014509345794392524, |
|
"loss": 4.7227, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014485981308411214, |
|
"loss": 4.6941, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00014462616822429907, |
|
"loss": 4.3973, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014439252336448597, |
|
"loss": 4.6582, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001441588785046729, |
|
"loss": 4.7733, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001439252336448598, |
|
"loss": 4.9257, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014369158878504672, |
|
"loss": 4.3115, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014345794392523362, |
|
"loss": 4.8651, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014322429906542055, |
|
"loss": 4.4354, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014299065420560745, |
|
"loss": 4.7014, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014275700934579438, |
|
"loss": 4.788, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001425233644859813, |
|
"loss": 4.7997, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014228971962616823, |
|
"loss": 5.054, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014205607476635513, |
|
"loss": 4.3161, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014182242990654206, |
|
"loss": 4.5677, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014158878504672896, |
|
"loss": 4.4084, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014135514018691589, |
|
"loss": 4.9911, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014112149532710279, |
|
"loss": 4.4559, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001408878504672897, |
|
"loss": 4.5061, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001406542056074766, |
|
"loss": 5.236, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00014042056074766354, |
|
"loss": 4.4686, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00014018691588785044, |
|
"loss": 4.8665, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013995327102803737, |
|
"loss": 4.6375, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001397196261682243, |
|
"loss": 4.5114, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013948598130841122, |
|
"loss": 4.8559, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013925233644859812, |
|
"loss": 4.7208, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013901869158878505, |
|
"loss": 5.0585, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013878504672897195, |
|
"loss": 4.9908, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013855140186915888, |
|
"loss": 4.6023, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013831775700934578, |
|
"loss": 4.9435, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001380841121495327, |
|
"loss": 4.6312, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001378504672897196, |
|
"loss": 4.3065, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013761682242990653, |
|
"loss": 4.8449, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013738317757009343, |
|
"loss": 4.7368, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013714953271028036, |
|
"loss": 4.3137, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013691588785046729, |
|
"loss": 4.7101, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001366822429906542, |
|
"loss": 4.6089, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001364485981308411, |
|
"loss": 4.5011, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00013621495327102804, |
|
"loss": 4.5113, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013598130841121494, |
|
"loss": 4.2968, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013574766355140187, |
|
"loss": 4.79, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013551401869158877, |
|
"loss": 4.8039, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001352803738317757, |
|
"loss": 4.5847, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001350467289719626, |
|
"loss": 4.553, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013481308411214952, |
|
"loss": 4.4305, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013457943925233642, |
|
"loss": 4.7628, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013434579439252335, |
|
"loss": 4.7811, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013411214953271028, |
|
"loss": 4.6595, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001338785046728972, |
|
"loss": 4.4837, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001336448598130841, |
|
"loss": 4.5001, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013341121495327103, |
|
"loss": 4.6376, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013317757009345793, |
|
"loss": 4.3984, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013294392523364486, |
|
"loss": 4.2217, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013271028037383176, |
|
"loss": 4.3163, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013247663551401869, |
|
"loss": 4.8204, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00013224299065420559, |
|
"loss": 4.4814, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001320093457943925, |
|
"loss": 4.6491, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001317757009345794, |
|
"loss": 4.7224, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00013154205607476634, |
|
"loss": 4.6153, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00013130841121495327, |
|
"loss": 5.1254, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001310747663551402, |
|
"loss": 4.6913, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001308411214953271, |
|
"loss": 4.4284, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00013060747663551402, |
|
"loss": 4.4841, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00013037383177570092, |
|
"loss": 4.3891, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00013014018691588785, |
|
"loss": 5.0009, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012990654205607475, |
|
"loss": 4.694, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012967289719626168, |
|
"loss": 5.124, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012943925233644858, |
|
"loss": 4.7175, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001292056074766355, |
|
"loss": 4.4971, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001289719626168224, |
|
"loss": 4.7049, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012873831775700933, |
|
"loss": 4.6269, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012850467289719626, |
|
"loss": 4.6521, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012827102803738318, |
|
"loss": 4.3151, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012803738317757008, |
|
"loss": 4.6416, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.000127803738317757, |
|
"loss": 4.9035, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001275700934579439, |
|
"loss": 4.5872, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012733644859813084, |
|
"loss": 4.9876, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012710280373831774, |
|
"loss": 4.964, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012686915887850467, |
|
"loss": 4.4855, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012663551401869157, |
|
"loss": 4.1817, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001264018691588785, |
|
"loss": 4.8327, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001261682242990654, |
|
"loss": 4.7371, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012593457943925232, |
|
"loss": 4.6397, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012570093457943925, |
|
"loss": 4.7792, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012546728971962618, |
|
"loss": 4.8694, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012523364485981308, |
|
"loss": 4.7286, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.000125, |
|
"loss": 4.8146, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001247663551401869, |
|
"loss": 4.7488, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012453271028037383, |
|
"loss": 4.7155, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012429906542056073, |
|
"loss": 4.9373, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012406542056074766, |
|
"loss": 4.4673, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012383177570093456, |
|
"loss": 4.3828, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012359813084112148, |
|
"loss": 4.3086, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012336448598130838, |
|
"loss": 4.5156, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001231308411214953, |
|
"loss": 5.0298, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012289719626168224, |
|
"loss": 4.3251, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012266355140186917, |
|
"loss": 4.6947, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012242990654205607, |
|
"loss": 4.6539, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.000122196261682243, |
|
"loss": 4.3909, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001219626168224299, |
|
"loss": 5.1204, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012172897196261682, |
|
"loss": 4.6467, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012149532710280372, |
|
"loss": 4.7147, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012126168224299065, |
|
"loss": 4.6279, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012102803738317756, |
|
"loss": 4.6076, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012079439252336448, |
|
"loss": 4.4556, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012056074766355139, |
|
"loss": 4.5697, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012032710280373832, |
|
"loss": 4.3969, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012009345794392522, |
|
"loss": 4.8635, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00011985981308411214, |
|
"loss": 4.4007, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00011962616822429906, |
|
"loss": 4.4937, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011939252336448597, |
|
"loss": 4.9556, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011915887850467288, |
|
"loss": 4.6646, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011892523364485981, |
|
"loss": 4.7646, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011869158878504671, |
|
"loss": 4.7527, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011845794392523364, |
|
"loss": 4.6587, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011822429906542055, |
|
"loss": 4.5145, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011799065420560747, |
|
"loss": 4.5064, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011775700934579438, |
|
"loss": 4.7615, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001175233644859813, |
|
"loss": 4.4567, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001172897196261682, |
|
"loss": 4.5432, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011705607476635513, |
|
"loss": 4.5347, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011682242990654205, |
|
"loss": 4.7786, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011658878504672896, |
|
"loss": 4.5019, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011635514018691587, |
|
"loss": 4.5446, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001161214953271028, |
|
"loss": 4.5309, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001158878504672897, |
|
"loss": 4.6369, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011565420560747663, |
|
"loss": 5.2667, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011542056074766354, |
|
"loss": 6.2945, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011518691588785046, |
|
"loss": 4.3663, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011495327102803737, |
|
"loss": 4.3191, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001147196261682243, |
|
"loss": 4.1791, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001144859813084112, |
|
"loss": 4.237, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011425233644859812, |
|
"loss": 4.0311, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011401869158878504, |
|
"loss": 6.0924, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011378504672897195, |
|
"loss": 4.2445, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011355140186915887, |
|
"loss": 4.8752, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011331775700934579, |
|
"loss": 4.7493, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011308411214953269, |
|
"loss": 4.6395, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011285046728971962, |
|
"loss": 4.5663, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011261682242990653, |
|
"loss": 4.4646, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011238317757009345, |
|
"loss": 4.6283, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011214953271028036, |
|
"loss": 4.5374, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011191588785046729, |
|
"loss": 4.2707, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011168224299065419, |
|
"loss": 4.4542, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011144859813084112, |
|
"loss": 4.3583, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011121495327102803, |
|
"loss": 4.3053, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00011098130841121494, |
|
"loss": 4.7099, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00011074766355140186, |
|
"loss": 4.3973, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00011051401869158878, |
|
"loss": 4.3032, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00011028037383177568, |
|
"loss": 4.8801, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00011004672897196261, |
|
"loss": 4.7108, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010981308411214952, |
|
"loss": 4.3154, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010957943925233644, |
|
"loss": 5.1603, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010934579439252335, |
|
"loss": 4.4641, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010911214953271028, |
|
"loss": 4.1332, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010887850467289718, |
|
"loss": 4.2032, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001086448598130841, |
|
"loss": 4.4041, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010841121495327102, |
|
"loss": 4.5958, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010817757009345793, |
|
"loss": 4.3467, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010794392523364485, |
|
"loss": 4.6209, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010771028037383177, |
|
"loss": 4.2929, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010747663551401867, |
|
"loss": 4.1587, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001072429906542056, |
|
"loss": 4.7793, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00010700934579439251, |
|
"loss": 4.7722, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010677570093457943, |
|
"loss": 4.8571, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010654205607476634, |
|
"loss": 4.2917, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010630841121495327, |
|
"loss": 4.9375, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010607476635514017, |
|
"loss": 4.5551, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001058411214953271, |
|
"loss": 4.6761, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010560747663551401, |
|
"loss": 4.3065, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010537383177570092, |
|
"loss": 4.6009, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010514018691588784, |
|
"loss": 4.7881, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010490654205607476, |
|
"loss": 4.6032, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010467289719626166, |
|
"loss": 4.806, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010443925233644859, |
|
"loss": 5.0689, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001042056074766355, |
|
"loss": 4.8513, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010397196261682242, |
|
"loss": 4.7286, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010373831775700933, |
|
"loss": 4.6413, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010350467289719626, |
|
"loss": 5.067, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010327102803738316, |
|
"loss": 4.7387, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00010303738317757009, |
|
"loss": 4.5827, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.000102803738317757, |
|
"loss": 5.3836, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010257009345794391, |
|
"loss": 4.4282, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010233644859813083, |
|
"loss": 4.5781, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010210280373831776, |
|
"loss": 4.6178, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010186915887850466, |
|
"loss": 4.6899, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010163551401869158, |
|
"loss": 4.6505, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001014018691588785, |
|
"loss": 4.6107, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010116822429906541, |
|
"loss": 4.8563, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010093457943925232, |
|
"loss": 4.4199, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010070093457943925, |
|
"loss": 4.7724, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010046728971962615, |
|
"loss": 4.1903, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00010023364485981308, |
|
"loss": 4.6997, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.999999999999999e-05, |
|
"loss": 4.6855, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.97663551401869e-05, |
|
"loss": 4.997, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.953271028037382e-05, |
|
"loss": 4.4632, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.929906542056075e-05, |
|
"loss": 4.4651, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.906542056074765e-05, |
|
"loss": 4.2949, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.883177570093457e-05, |
|
"loss": 4.9577, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.859813084112149e-05, |
|
"loss": 4.8504, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.83644859813084e-05, |
|
"loss": 4.3042, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.813084112149531e-05, |
|
"loss": 4.581, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.789719626168224e-05, |
|
"loss": 4.3119, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.766355140186914e-05, |
|
"loss": 4.6041, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.742990654205607e-05, |
|
"loss": 4.6708, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.719626168224298e-05, |
|
"loss": 4.7258, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.69626168224299e-05, |
|
"loss": 4.2403, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.672897196261681e-05, |
|
"loss": 4.9588, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.649532710280374e-05, |
|
"loss": 4.6089, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.626168224299064e-05, |
|
"loss": 4.4039, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.602803738317756e-05, |
|
"loss": 4.3359, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.579439252336448e-05, |
|
"loss": 4.6864, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.556074766355139e-05, |
|
"loss": 4.5197, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.53271028037383e-05, |
|
"loss": 4.4822, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.509345794392523e-05, |
|
"loss": 4.4383, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.485981308411213e-05, |
|
"loss": 4.0791, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.462616822429906e-05, |
|
"loss": 4.5552, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.439252336448597e-05, |
|
"loss": 4.4294, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.415887850467289e-05, |
|
"loss": 4.6212, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.39252336448598e-05, |
|
"loss": 4.8217, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.369158878504673e-05, |
|
"loss": 4.4626, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.345794392523363e-05, |
|
"loss": 4.9503, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.322429906542055e-05, |
|
"loss": 4.6515, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.299065420560747e-05, |
|
"loss": 4.4508, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.275700934579438e-05, |
|
"loss": 4.7594, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.25233644859813e-05, |
|
"loss": 4.2903, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.228971962616822e-05, |
|
"loss": 4.349, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.205607476635512e-05, |
|
"loss": 4.4674, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.182242990654205e-05, |
|
"loss": 4.6315, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.158878504672896e-05, |
|
"loss": 4.9254, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.135514018691588e-05, |
|
"loss": 4.3552, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.112149532710279e-05, |
|
"loss": 4.8721, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.088785046728972e-05, |
|
"loss": 4.47, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.065420560747662e-05, |
|
"loss": 4.5815, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.042056074766354e-05, |
|
"loss": 4.7547, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.018691588785046e-05, |
|
"loss": 4.2618, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.995327102803737e-05, |
|
"loss": 4.7009, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.971962616822429e-05, |
|
"loss": 4.339, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.948598130841121e-05, |
|
"loss": 4.5113, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.925233644859811e-05, |
|
"loss": 4.2392, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.901869158878504e-05, |
|
"loss": 5.1986, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.878504672897195e-05, |
|
"loss": 4.5835, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.855140186915887e-05, |
|
"loss": 4.4664, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.831775700934578e-05, |
|
"loss": 4.4967, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.808411214953271e-05, |
|
"loss": 4.3904, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.785046728971961e-05, |
|
"loss": 4.1992, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.761682242990654e-05, |
|
"loss": 4.2952, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.738317757009345e-05, |
|
"loss": 4.7417, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.714953271028036e-05, |
|
"loss": 4.9638, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.691588785046728e-05, |
|
"loss": 4.522, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.66822429906542e-05, |
|
"loss": 4.6829, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.64485981308411e-05, |
|
"loss": 4.6061, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.621495327102803e-05, |
|
"loss": 4.6179, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.598130841121494e-05, |
|
"loss": 4.8331, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.574766355140186e-05, |
|
"loss": 4.3792, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.551401869158877e-05, |
|
"loss": 4.5551, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.52803738317757e-05, |
|
"loss": 4.7392, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.50467289719626e-05, |
|
"loss": 4.3609, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.481308411214953e-05, |
|
"loss": 4.4721, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.457943925233644e-05, |
|
"loss": 5.0827, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.434579439252335e-05, |
|
"loss": 4.5881, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.411214953271027e-05, |
|
"loss": 4.2625, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.38785046728972e-05, |
|
"loss": 4.5462, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.36448598130841e-05, |
|
"loss": 4.6827, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.341121495327102e-05, |
|
"loss": 4.9506, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.317757009345794e-05, |
|
"loss": 5.4046, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.294392523364485e-05, |
|
"loss": 4.555, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.271028037383176e-05, |
|
"loss": 5.0098, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.247663551401869e-05, |
|
"loss": 4.2566, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.224299065420559e-05, |
|
"loss": 4.4053, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.200934579439252e-05, |
|
"loss": 4.2827, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.177570093457943e-05, |
|
"loss": 4.6128, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.154205607476634e-05, |
|
"loss": 4.4264, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.130841121495326e-05, |
|
"loss": 4.6697, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.107476635514018e-05, |
|
"loss": 4.5781, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.084112149532708e-05, |
|
"loss": 5.115, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.060747663551401e-05, |
|
"loss": 4.3891, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.037383177570093e-05, |
|
"loss": 4.3513, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.014018691588784e-05, |
|
"loss": 4.3578, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.990654205607475e-05, |
|
"loss": 4.5404, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.967289719626168e-05, |
|
"loss": 4.839, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.943925233644858e-05, |
|
"loss": 4.5189, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.920560747663551e-05, |
|
"loss": 4.6335, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.897196261682242e-05, |
|
"loss": 4.8941, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.873831775700933e-05, |
|
"loss": 4.4223, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.850467289719625e-05, |
|
"loss": 4.3988, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.827102803738318e-05, |
|
"loss": 4.5387, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.803738317757008e-05, |
|
"loss": 4.3465, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.7803738317757e-05, |
|
"loss": 4.1828, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.757009345794392e-05, |
|
"loss": 4.3629, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.733644859813083e-05, |
|
"loss": 4.4213, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.710280373831774e-05, |
|
"loss": 4.9567, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.686915887850467e-05, |
|
"loss": 4.5918, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.663551401869157e-05, |
|
"loss": 4.6807, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.64018691588785e-05, |
|
"loss": 4.9541, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.616822429906541e-05, |
|
"loss": 4.5968, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.593457943925233e-05, |
|
"loss": 4.6322, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.570093457943924e-05, |
|
"loss": 4.4, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.546728971962617e-05, |
|
"loss": 4.4597, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.523364485981307e-05, |
|
"loss": 4.8758, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.3989, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.476635514018691e-05, |
|
"loss": 4.3877, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.453271028037382e-05, |
|
"loss": 4.5695, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.429906542056073e-05, |
|
"loss": 4.3639, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.406542056074766e-05, |
|
"loss": 4.1633, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.383177570093458e-05, |
|
"loss": 4.5091, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.359813084112149e-05, |
|
"loss": 4.1899, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.33644859813084e-05, |
|
"loss": 4.3316, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.313084112149532e-05, |
|
"loss": 4.2494, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.289719626168223e-05, |
|
"loss": 4.4593, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.266355140186916e-05, |
|
"loss": 4.6835, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.242990654205607e-05, |
|
"loss": 4.5329, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.219626168224298e-05, |
|
"loss": 4.0649, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.19626168224299e-05, |
|
"loss": 4.4632, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.172897196261681e-05, |
|
"loss": 4.3951, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.149532710280372e-05, |
|
"loss": 4.4505, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.126168224299065e-05, |
|
"loss": 4.9738, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.102803738317757e-05, |
|
"loss": 4.4743, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.079439252336448e-05, |
|
"loss": 4.11, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.056074766355139e-05, |
|
"loss": 4.3696, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.03271028037383e-05, |
|
"loss": 4.7327, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.009345794392522e-05, |
|
"loss": 4.6291, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.985981308411215e-05, |
|
"loss": 4.668, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.962616822429906e-05, |
|
"loss": 4.5731, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.939252336448597e-05, |
|
"loss": 4.2246, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.915887850467289e-05, |
|
"loss": 4.8637, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.89252336448598e-05, |
|
"loss": 4.5471, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.869158878504672e-05, |
|
"loss": 4.011, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.845794392523364e-05, |
|
"loss": 4.3462, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.822429906542056e-05, |
|
"loss": 4.3227, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.799065420560747e-05, |
|
"loss": 5.5984, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.775700934579438e-05, |
|
"loss": 4.7689, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.75233644859813e-05, |
|
"loss": 4.7399, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.728971962616821e-05, |
|
"loss": 4.2169, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.705607476635514e-05, |
|
"loss": 4.5932, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.684563636779785, |
|
"eval_runtime": 902.9412, |
|
"eval_samples_per_second": 2.926, |
|
"eval_steps_per_second": 0.367, |
|
"eval_wer": 1.962951209837366, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.682242990654205e-05, |
|
"loss": 4.5794, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.658878504672897e-05, |
|
"loss": 4.408, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.635514018691588e-05, |
|
"loss": 4.5533, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.612149532710279e-05, |
|
"loss": 4.6865, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.58878504672897e-05, |
|
"loss": 4.7087, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.565420560747663e-05, |
|
"loss": 4.5525, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.542056074766355e-05, |
|
"loss": 4.3097, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.518691588785046e-05, |
|
"loss": 4.4776, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.495327102803737e-05, |
|
"loss": 4.4956, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.471962616822429e-05, |
|
"loss": 4.4221, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.44859813084112e-05, |
|
"loss": 4.2104, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.425233644859813e-05, |
|
"loss": 4.4226, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.401869158878504e-05, |
|
"loss": 4.1319, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.378504672897196e-05, |
|
"loss": 4.3302, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.355140186915887e-05, |
|
"loss": 4.3782, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.331775700934578e-05, |
|
"loss": 4.7651, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.30841121495327e-05, |
|
"loss": 4.2555, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.285046728971962e-05, |
|
"loss": 4.5066, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.261682242990654e-05, |
|
"loss": 4.6007, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.238317757009345e-05, |
|
"loss": 4.5272, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.214953271028036e-05, |
|
"loss": 4.4501, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.191588785046728e-05, |
|
"loss": 4.6648, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.168224299065419e-05, |
|
"loss": 4.8737, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.144859813084112e-05, |
|
"loss": 4.248, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.121495327102803e-05, |
|
"loss": 4.2556, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.098130841121495e-05, |
|
"loss": 4.1853, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.074766355140186e-05, |
|
"loss": 4.3537, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.051401869158878e-05, |
|
"loss": 4.3641, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.0280373831775694e-05, |
|
"loss": 4.1877, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.004672897196261e-05, |
|
"loss": 4.3508, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.981308411214953e-05, |
|
"loss": 4.3853, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.957943925233644e-05, |
|
"loss": 4.3307, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.9345794392523356e-05, |
|
"loss": 4.8142, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.9112149532710276e-05, |
|
"loss": 4.3588, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.887850467289719e-05, |
|
"loss": 4.4076, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.86448598130841e-05, |
|
"loss": 4.09, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.8411214953271024e-05, |
|
"loss": 4.7277, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.817757009345794e-05, |
|
"loss": 4.655, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.794392523364485e-05, |
|
"loss": 4.3764, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.771028037383177e-05, |
|
"loss": 4.5638, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.7476635514018685e-05, |
|
"loss": 4.7757, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.72429906542056e-05, |
|
"loss": 4.1836, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.700934579439252e-05, |
|
"loss": 4.6114, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.677570093457943e-05, |
|
"loss": 4.7632, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.6542056074766346e-05, |
|
"loss": 5.0162, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.630841121495327e-05, |
|
"loss": 4.1637, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.607476635514018e-05, |
|
"loss": 4.6761, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.5841121495327094e-05, |
|
"loss": 4.6081, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.5607476635514014e-05, |
|
"loss": 4.9931, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.537383177570093e-05, |
|
"loss": 4.278, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.514018691588784e-05, |
|
"loss": 4.6468, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.490654205607476e-05, |
|
"loss": 4.4786, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.4672897196261676e-05, |
|
"loss": 4.3608, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.443925233644859e-05, |
|
"loss": 4.3711, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.420560747663551e-05, |
|
"loss": 4.5756, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.397196261682242e-05, |
|
"loss": 4.9567, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.373831775700934e-05, |
|
"loss": 4.5372, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.350467289719626e-05, |
|
"loss": 4.6352, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.327102803738317e-05, |
|
"loss": 4.356, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.3037383177570085e-05, |
|
"loss": 4.6309, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.2803738317757005e-05, |
|
"loss": 4.444, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.257009345794392e-05, |
|
"loss": 4.3915, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.233644859813083e-05, |
|
"loss": 4.557, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.210280373831775e-05, |
|
"loss": 4.4178, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.1869158878504666e-05, |
|
"loss": 4.7344, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.163551401869158e-05, |
|
"loss": 4.2944, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.14018691588785e-05, |
|
"loss": 4.0712, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.1168224299065414e-05, |
|
"loss": 4.4221, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.093457943925233e-05, |
|
"loss": 4.4791, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.070093457943925e-05, |
|
"loss": 4.3037, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.046728971962616e-05, |
|
"loss": 4.3704, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.0233644859813075e-05, |
|
"loss": 4.6434, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.9999999999999996e-05, |
|
"loss": 4.5905, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.976635514018691e-05, |
|
"loss": 4.2027, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.953271028037382e-05, |
|
"loss": 4.2243, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.929906542056074e-05, |
|
"loss": 4.4988, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.906542056074766e-05, |
|
"loss": 4.4496, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.883177570093457e-05, |
|
"loss": 4.7679, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.859813084112149e-05, |
|
"loss": 4.5786, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.8364485981308405e-05, |
|
"loss": 4.9594, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.813084112149532e-05, |
|
"loss": 4.5915, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.789719626168224e-05, |
|
"loss": 4.5056, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.766355140186915e-05, |
|
"loss": 4.3929, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.7429906542056066e-05, |
|
"loss": 4.2318, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.7196261682242986e-05, |
|
"loss": 4.3119, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.69626168224299e-05, |
|
"loss": 4.3862, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.6728971962616814e-05, |
|
"loss": 4.2969, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.6495327102803734e-05, |
|
"loss": 4.4787, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.626168224299065e-05, |
|
"loss": 4.3504, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.602803738317756e-05, |
|
"loss": 4.2523, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.579439252336448e-05, |
|
"loss": 4.3615, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.5560747663551395e-05, |
|
"loss": 4.2794, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.532710280373831e-05, |
|
"loss": 4.291, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.509345794392523e-05, |
|
"loss": 4.3676, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.485981308411214e-05, |
|
"loss": 4.4046, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.4626168224299057e-05, |
|
"loss": 4.5139, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.439252336448598e-05, |
|
"loss": 4.57, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.415887850467289e-05, |
|
"loss": 4.6402, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.3925233644859804e-05, |
|
"loss": 5.634, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.3691588785046725e-05, |
|
"loss": 4.7402, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.345794392523364e-05, |
|
"loss": 4.4763, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.322429906542055e-05, |
|
"loss": 4.1862, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.299065420560747e-05, |
|
"loss": 4.3362, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.2757009345794386e-05, |
|
"loss": 4.5014, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.25233644859813e-05, |
|
"loss": 4.155, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.228971962616822e-05, |
|
"loss": 4.4603, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.2056074766355134e-05, |
|
"loss": 4.5002, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.182242990654205e-05, |
|
"loss": 4.3655, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.158878504672897e-05, |
|
"loss": 4.5439, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.135514018691588e-05, |
|
"loss": 4.2401, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.1121495327102795e-05, |
|
"loss": 4.6235, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.0887850467289715e-05, |
|
"loss": 4.4492, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.065420560747663e-05, |
|
"loss": 4.6652, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.042056074766354e-05, |
|
"loss": 4.2725, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.018691588785046e-05, |
|
"loss": 4.3769, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9953271028037377e-05, |
|
"loss": 4.3812, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.971962616822429e-05, |
|
"loss": 4.5223, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.948598130841121e-05, |
|
"loss": 4.3296, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9252336448598124e-05, |
|
"loss": 4.3042, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.901869158878504e-05, |
|
"loss": 4.3898, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.878504672897196e-05, |
|
"loss": 4.8097, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.855140186915887e-05, |
|
"loss": 4.096, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8317757009345785e-05, |
|
"loss": 4.4866, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8084112149532706e-05, |
|
"loss": 4.2791, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.785046728971962e-05, |
|
"loss": 4.4026, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.761682242990653e-05, |
|
"loss": 4.6418, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7383177570093454e-05, |
|
"loss": 4.4598, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.714953271028037e-05, |
|
"loss": 4.2184, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.691588785046729e-05, |
|
"loss": 4.4338, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.66822429906542e-05, |
|
"loss": 4.3216, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.6448598130841115e-05, |
|
"loss": 4.281, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.6214953271028035e-05, |
|
"loss": 4.4902, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.598130841121495e-05, |
|
"loss": 4.5964, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.574766355140186e-05, |
|
"loss": 4.2606, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.551401869158878e-05, |
|
"loss": 4.4497, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5280373831775697e-05, |
|
"loss": 4.3028, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.504672897196261e-05, |
|
"loss": 4.2589, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.481308411214953e-05, |
|
"loss": 4.941, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4579439252336444e-05, |
|
"loss": 4.2086, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.434579439252336e-05, |
|
"loss": 4.643, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.411214953271028e-05, |
|
"loss": 4.5205, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.387850467289719e-05, |
|
"loss": 4.4125, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3644859813084105e-05, |
|
"loss": 4.4828, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3411214953271026e-05, |
|
"loss": 4.4454, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.317757009345794e-05, |
|
"loss": 4.7431, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.294392523364485e-05, |
|
"loss": 4.7787, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.2710280373831774e-05, |
|
"loss": 4.4746, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.247663551401869e-05, |
|
"loss": 5.2549, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.22429906542056e-05, |
|
"loss": 4.6796, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.200934579439252e-05, |
|
"loss": 4.4343, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.1775700934579435e-05, |
|
"loss": 4.4162, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.154205607476635e-05, |
|
"loss": 4.3306, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.130841121495327e-05, |
|
"loss": 4.4044, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.107476635514018e-05, |
|
"loss": 4.3585, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.0841121495327096e-05, |
|
"loss": 4.2039, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.0607476635514017e-05, |
|
"loss": 4.3417, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.037383177570093e-05, |
|
"loss": 4.5688, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.0140186915887847e-05, |
|
"loss": 4.3957, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9906542056074764e-05, |
|
"loss": 4.204, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9672897196261678e-05, |
|
"loss": 4.3833, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9439252336448595e-05, |
|
"loss": 4.4503, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9205607476635512e-05, |
|
"loss": 4.392, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8971962616822425e-05, |
|
"loss": 4.0731, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8738317757009342e-05, |
|
"loss": 4.4957, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.850467289719626e-05, |
|
"loss": 4.1702, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8271028037383173e-05, |
|
"loss": 4.4144, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.803738317757009e-05, |
|
"loss": 4.5028, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.7803738317757007e-05, |
|
"loss": 4.1839, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.757009345794392e-05, |
|
"loss": 4.2156, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.7336448598130838e-05, |
|
"loss": 4.4568, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.7102803738317755e-05, |
|
"loss": 4.4087, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.686915887850467e-05, |
|
"loss": 4.6661, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6635514018691585e-05, |
|
"loss": 4.2527, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6401869158878502e-05, |
|
"loss": 4.4357, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6168224299065416e-05, |
|
"loss": 4.6565, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.5934579439252333e-05, |
|
"loss": 4.206, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.570093457943925e-05, |
|
"loss": 4.1303, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.5467289719626164e-05, |
|
"loss": 4.337, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.523364485981308e-05, |
|
"loss": 4.6861, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.4999999999999998e-05, |
|
"loss": 4.8476, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.476635514018691e-05, |
|
"loss": 4.0134, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.453271028037383e-05, |
|
"loss": 4.098, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.4299065420560745e-05, |
|
"loss": 4.5636, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.406542056074766e-05, |
|
"loss": 4.1809, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.3831775700934576e-05, |
|
"loss": 4.4536, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.3598130841121493e-05, |
|
"loss": 4.3799, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.3364485981308407e-05, |
|
"loss": 4.2094, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.3130841121495324e-05, |
|
"loss": 4.472, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.289719626168224e-05, |
|
"loss": 4.4078, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.2663551401869154e-05, |
|
"loss": 4.26, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.242990654205607e-05, |
|
"loss": 4.6382, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.219626168224299e-05, |
|
"loss": 4.4575, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.1962616822429902e-05, |
|
"loss": 4.6798, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.172897196261682e-05, |
|
"loss": 4.7406, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.1495327102803736e-05, |
|
"loss": 4.695, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.126168224299065e-05, |
|
"loss": 7.1009, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.1028037383177567e-05, |
|
"loss": 4.4314, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.0794392523364484e-05, |
|
"loss": 4.8768, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.0560747663551397e-05, |
|
"loss": 4.6938, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.0327102803738314e-05, |
|
"loss": 4.7121, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.009345794392523e-05, |
|
"loss": 5.2045, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.9859813084112145e-05, |
|
"loss": 4.4894, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.9626168224299062e-05, |
|
"loss": 4.1926, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.939252336448598e-05, |
|
"loss": 4.3933, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9158878504672893e-05, |
|
"loss": 4.382, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.892523364485981e-05, |
|
"loss": 4.2146, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8691588785046727e-05, |
|
"loss": 4.1402, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8457943925233644e-05, |
|
"loss": 4.6311, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8224299065420557e-05, |
|
"loss": 4.8325, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7990654205607474e-05, |
|
"loss": 4.5718, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.775700934579439e-05, |
|
"loss": 4.2275, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7523364485981305e-05, |
|
"loss": 4.4351, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7289719626168222e-05, |
|
"loss": 4.9079, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.705607476635514e-05, |
|
"loss": 4.3129, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6822429906542053e-05, |
|
"loss": 4.3238, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.658878504672897e-05, |
|
"loss": 4.6123, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6355140186915887e-05, |
|
"loss": 4.749, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.61214953271028e-05, |
|
"loss": 4.8785, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5887850467289717e-05, |
|
"loss": 4.2084, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5654205607476634e-05, |
|
"loss": 4.2968, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5420560747663548e-05, |
|
"loss": 4.5638, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5186915887850465e-05, |
|
"loss": 4.5775, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4953271028037382e-05, |
|
"loss": 4.7003, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4719626168224297e-05, |
|
"loss": 4.3962, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4485981308411213e-05, |
|
"loss": 4.3603, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.425233644859813e-05, |
|
"loss": 4.31, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4018691588785045e-05, |
|
"loss": 4.7106, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.378504672897196e-05, |
|
"loss": 4.4806, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3551401869158877e-05, |
|
"loss": 4.6771, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3317757009345793e-05, |
|
"loss": 4.443, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3084112149532708e-05, |
|
"loss": 4.0589, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2850467289719625e-05, |
|
"loss": 4.2644, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.261682242990654e-05, |
|
"loss": 4.3072, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2383177570093456e-05, |
|
"loss": 4.4704, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2149532710280373e-05, |
|
"loss": 4.9955, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.1915887850467288e-05, |
|
"loss": 4.892, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.1682242990654203e-05, |
|
"loss": 4.0713, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.144859813084112e-05, |
|
"loss": 4.4093, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.1214953271028036e-05, |
|
"loss": 4.3263, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0981308411214951e-05, |
|
"loss": 4.2525, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0747663551401868e-05, |
|
"loss": 4.4634, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0514018691588783e-05, |
|
"loss": 4.4893, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0280373831775699e-05, |
|
"loss": 4.218, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0046728971962616e-05, |
|
"loss": 4.2345, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.813084112149531e-06, |
|
"loss": 4.4428, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.579439252336446e-06, |
|
"loss": 4.5344, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.345794392523363e-06, |
|
"loss": 4.4899, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.112149532710279e-06, |
|
"loss": 4.2383, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.878504672897196e-06, |
|
"loss": 4.4987, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.644859813084111e-06, |
|
"loss": 4.5145, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.411214953271026e-06, |
|
"loss": 4.0823, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.177570093457943e-06, |
|
"loss": 4.2956, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.943925233644859e-06, |
|
"loss": 4.5371, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.710280373831774e-06, |
|
"loss": 4.7801, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.476635514018691e-06, |
|
"loss": 4.5643, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.242990654205606e-06, |
|
"loss": 4.3877, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.0093457943925225e-06, |
|
"loss": 4.2976, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.775700934579439e-06, |
|
"loss": 4.2241, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.542056074766354e-06, |
|
"loss": 4.6761, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.30841121495327e-06, |
|
"loss": 4.3976, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.074766355140186e-06, |
|
"loss": 4.8187, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.841121495327102e-06, |
|
"loss": 4.203, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.607476635514018e-06, |
|
"loss": 4.2129, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.373831775700934e-06, |
|
"loss": 4.661, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.140186915887849e-06, |
|
"loss": 4.1663, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.9065420560747655e-06, |
|
"loss": 4.3171, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.672897196261682e-06, |
|
"loss": 4.3728, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.439252336448598e-06, |
|
"loss": 4.2334, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.205607476635513e-06, |
|
"loss": 4.537, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.971962616822429e-06, |
|
"loss": 4.1671, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7383177570093455e-06, |
|
"loss": 4.2857, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.5046728971962613e-06, |
|
"loss": 4.2345, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.271028037383177e-06, |
|
"loss": 4.3355, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.037383177570093e-06, |
|
"loss": 4.3151, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.803738317757009e-06, |
|
"loss": 4.285, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.5700934579439247e-06, |
|
"loss": 4.3268, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.336448598130841e-06, |
|
"loss": 4.5804, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.1028037383177566e-06, |
|
"loss": 4.3359, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.8691588785046728e-06, |
|
"loss": 4.8936, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6355140186915885e-06, |
|
"loss": 4.5387, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.4018691588785045e-06, |
|
"loss": 4.8575, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.1682242990654204e-06, |
|
"loss": 4.3431, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.345794392523364e-07, |
|
"loss": 4.9246, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.009345794392522e-07, |
|
"loss": 3.7122, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1784, |
|
"total_flos": 0.0, |
|
"train_loss": 4.771062652893665, |
|
"train_runtime": 8121.4369, |
|
"train_samples_per_second": 3.514, |
|
"train_steps_per_second": 0.22 |
|
} |
|
], |
|
"max_steps": 1784, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|