|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9987389659520807, |
|
"global_step": 594, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6e-07, |
|
"loss": 4.9877, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2e-06, |
|
"loss": 4.8754, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.8216, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4e-06, |
|
"loss": 4.7633, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.9999999999999997e-06, |
|
"loss": 4.7562, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.8158, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2e-06, |
|
"loss": 4.6936, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8e-06, |
|
"loss": 4.7747, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.399999999999999e-06, |
|
"loss": 4.6495, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.999999999999999e-06, |
|
"loss": 4.6263, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.599999999999999e-06, |
|
"loss": 4.6435, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.5393, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.799999999999998e-06, |
|
"loss": 4.4934, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.4e-06, |
|
"loss": 4.3827, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.999999999999999e-06, |
|
"loss": 4.528, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.6e-06, |
|
"loss": 4.5121, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.5744, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0799999999999998e-05, |
|
"loss": 4.3183, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.14e-05, |
|
"loss": 4.5438, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1999999999999999e-05, |
|
"loss": 4.4622, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.26e-05, |
|
"loss": 4.4327, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3199999999999997e-05, |
|
"loss": 4.4276, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3799999999999998e-05, |
|
"loss": 4.4719, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.2464, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4999999999999999e-05, |
|
"loss": 4.664, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5599999999999996e-05, |
|
"loss": 4.3188, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6199999999999997e-05, |
|
"loss": 4.202, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.68e-05, |
|
"loss": 4.226, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.3916, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7999999999999997e-05, |
|
"loss": 4.4153, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8599999999999998e-05, |
|
"loss": 4.2902, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.92e-05, |
|
"loss": 4.2797, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.98e-05, |
|
"loss": 4.2387, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.2934, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.4521, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1599999999999996e-05, |
|
"loss": 4.2516, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.2199999999999998e-05, |
|
"loss": 4.4088, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.28e-05, |
|
"loss": 4.5498, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.34e-05, |
|
"loss": 4.2121, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.3999999999999997e-05, |
|
"loss": 4.2337, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 4.4752, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.52e-05, |
|
"loss": 4.1934, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.5799999999999997e-05, |
|
"loss": 4.3419, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6399999999999995e-05, |
|
"loss": 4.4241, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6999999999999996e-05, |
|
"loss": 4.4737, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7599999999999997e-05, |
|
"loss": 4.3846, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.7292, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.7684, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.4745, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.193, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 4.2115, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.2324, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.119999999999999e-05, |
|
"loss": 4.3067, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.1799999999999994e-05, |
|
"loss": 4.167, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2399999999999995e-05, |
|
"loss": 4.3703, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.2999999999999996e-05, |
|
"loss": 4.1812, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.36e-05, |
|
"loss": 4.4778, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.42e-05, |
|
"loss": 4.2518, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.0458, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.539999999999999e-05, |
|
"loss": 4.2996, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.5999999999999994e-05, |
|
"loss": 4.3556, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.6599999999999995e-05, |
|
"loss": 4.2308, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.7199999999999996e-05, |
|
"loss": 4.2086, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.78e-05, |
|
"loss": 4.2349, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.84e-05, |
|
"loss": 4.1813, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.9e-05, |
|
"loss": 4.1444, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.96e-05, |
|
"loss": 4.1388, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.2267, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.2124, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.3128, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.2821, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.259999999999999e-05, |
|
"loss": 4.0836, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.319999999999999e-05, |
|
"loss": 4.2887, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3799999999999994e-05, |
|
"loss": 4.2636, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4399999999999995e-05, |
|
"loss": 4.2405, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 4.2182, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.56e-05, |
|
"loss": 4.2002, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.62e-05, |
|
"loss": 4.1308, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.68e-05, |
|
"loss": 4.3264, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7399999999999993e-05, |
|
"loss": 4.2728, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7999999999999994e-05, |
|
"loss": 4.2262, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8599999999999995e-05, |
|
"loss": 4.3323, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9199999999999997e-05, |
|
"loss": 4.2886, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.98e-05, |
|
"loss": 4.2094, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.04e-05, |
|
"loss": 4.1616, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.1e-05, |
|
"loss": 4.4014, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.1599999999999994e-05, |
|
"loss": 4.2411, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.2199999999999995e-05, |
|
"loss": 4.2378, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.279999999999999e-05, |
|
"loss": 4.2782, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.339999999999999e-05, |
|
"loss": 4.2438, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.399999999999999e-05, |
|
"loss": 4.3678, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.459999999999999e-05, |
|
"loss": 4.2209, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.519999999999999e-05, |
|
"loss": 4.4583, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.5799999999999994e-05, |
|
"loss": 4.3543, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.3987, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.3792, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.2989, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.5377, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.1863, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.94e-05, |
|
"loss": 4.3579, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 4.1935, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 4.206, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.12e-05, |
|
"loss": 4.1263, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.1788, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.239999999999999e-05, |
|
"loss": 4.1409, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.299999999999999e-05, |
|
"loss": 4.2692, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.359999999999999e-05, |
|
"loss": 4.3054, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.419999999999999e-05, |
|
"loss": 4.4333, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.479999999999999e-05, |
|
"loss": 4.2029, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.539999999999999e-05, |
|
"loss": 4.2235, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.599999999999999e-05, |
|
"loss": 4.256, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.659999999999999e-05, |
|
"loss": 4.2251, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.72e-05, |
|
"loss": 4.2755, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.78e-05, |
|
"loss": 4.269, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.84e-05, |
|
"loss": 4.2454, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.3376, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.1226, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.1287, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.079999999999999e-05, |
|
"loss": 4.1267, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.139999999999999e-05, |
|
"loss": 4.2983, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.199999999999999e-05, |
|
"loss": 4.4717, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.259999999999999e-05, |
|
"loss": 4.193, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.319999999999999e-05, |
|
"loss": 4.2596, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.379999999999999e-05, |
|
"loss": 4.2809, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.439999999999999e-05, |
|
"loss": 4.1158, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.1375, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.56e-05, |
|
"loss": 4.1286, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.62e-05, |
|
"loss": 4.2348, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.68e-05, |
|
"loss": 4.2981, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.74e-05, |
|
"loss": 4.2467, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.8e-05, |
|
"loss": 4.2083, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.86e-05, |
|
"loss": 4.2554, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.92e-05, |
|
"loss": 4.3453, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.3405, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.2022, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.182, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.284, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.224, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.4008, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.2829, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.3976, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.459999999999998e-05, |
|
"loss": 4.4763, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.519999999999998e-05, |
|
"loss": 4.3198, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.579999999999998e-05, |
|
"loss": 4.3517, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.639999999999999e-05, |
|
"loss": 4.7129, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.699999999999999e-05, |
|
"loss": 4.6215, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.759999999999999e-05, |
|
"loss": 4.5996, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.819999999999999e-05, |
|
"loss": 4.6987, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.879999999999999e-05, |
|
"loss": 4.577, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.939999999999999e-05, |
|
"loss": 4.3062, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 4.2302, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.059999999999999e-05, |
|
"loss": 4.2216, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.12e-05, |
|
"loss": 4.12, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.18e-05, |
|
"loss": 4.1832, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.24e-05, |
|
"loss": 4.1754, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.3e-05, |
|
"loss": 4.274, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.36e-05, |
|
"loss": 4.1414, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.419999999999999e-05, |
|
"loss": 4.0708, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.479999999999999e-05, |
|
"loss": 4.2012, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.539999999999999e-05, |
|
"loss": 4.2286, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.599999999999999e-05, |
|
"loss": 4.1608, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.659999999999999e-05, |
|
"loss": 4.1439, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.719999999999999e-05, |
|
"loss": 4.1617, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.779999999999999e-05, |
|
"loss": 4.2501, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.839999999999999e-05, |
|
"loss": 4.322, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.9e-05, |
|
"loss": 4.1637, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.96e-05, |
|
"loss": 4.3875, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001002, |
|
"loss": 4.1984, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001008, |
|
"loss": 4.2334, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001014, |
|
"loss": 4.0895, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.000102, |
|
"loss": 4.1526, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001026, |
|
"loss": 4.1554, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00010319999999999999, |
|
"loss": 4.174, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00010379999999999999, |
|
"loss": 4.3213, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010439999999999999, |
|
"loss": 4.3565, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010499999999999999, |
|
"loss": 4.2848, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010559999999999998, |
|
"loss": 4.1378, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010619999999999998, |
|
"loss": 4.2663, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010679999999999998, |
|
"loss": 4.1552, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010739999999999998, |
|
"loss": 4.4146, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00010799999999999998, |
|
"loss": 4.212, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00010859999999999998, |
|
"loss": 4.2198, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00010919999999999998, |
|
"loss": 4.0986, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00010979999999999999, |
|
"loss": 4.4803, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00011039999999999999, |
|
"loss": 4.2863, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00011099999999999999, |
|
"loss": 4.292, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00011159999999999999, |
|
"loss": 4.3438, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00011219999999999999, |
|
"loss": 4.4794, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00011279999999999999, |
|
"loss": 4.1904, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00011339999999999999, |
|
"loss": 4.4741, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 4.3827, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001146, |
|
"loss": 4.5862, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001152, |
|
"loss": 4.5182, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001158, |
|
"loss": 4.306, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001164, |
|
"loss": 4.3188, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.000117, |
|
"loss": 4.7074, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001176, |
|
"loss": 4.5491, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001182, |
|
"loss": 4.8521, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001188, |
|
"loss": 4.7298, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001194, |
|
"loss": 4.2078, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 4.2913, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00012059999999999999, |
|
"loss": 4.4183, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00012119999999999999, |
|
"loss": 4.1983, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00012179999999999999, |
|
"loss": 4.2838, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001224, |
|
"loss": 4.2377, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00012299999999999998, |
|
"loss": 4.3051, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001236, |
|
"loss": 4.681, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00012419999999999998, |
|
"loss": 4.3501, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00012479999999999997, |
|
"loss": 4.3868, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00012539999999999999, |
|
"loss": 4.3917, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00012599999999999997, |
|
"loss": 4.3078, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001266, |
|
"loss": 4.3063, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00012719999999999997, |
|
"loss": 4.3981, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001278, |
|
"loss": 4.2747, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00012839999999999998, |
|
"loss": 4.3503, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.000129, |
|
"loss": 4.182, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00012959999999999998, |
|
"loss": 4.437, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001302, |
|
"loss": 4.2125, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00013079999999999998, |
|
"loss": 4.3465, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001314, |
|
"loss": 4.3094, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00013199999999999998, |
|
"loss": 4.386, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001326, |
|
"loss": 4.3321, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00013319999999999999, |
|
"loss": 4.351, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001338, |
|
"loss": 4.6107, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001344, |
|
"loss": 4.2463, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.000135, |
|
"loss": 4.1284, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001356, |
|
"loss": 4.2243, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001362, |
|
"loss": 4.491, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001368, |
|
"loss": 4.4179, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001374, |
|
"loss": 4.1993, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.000138, |
|
"loss": 4.291, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001386, |
|
"loss": 4.7274, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001392, |
|
"loss": 4.2559, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013979999999999998, |
|
"loss": 4.5068, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001404, |
|
"loss": 4.2703, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00014099999999999998, |
|
"loss": 4.2862, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00014159999999999997, |
|
"loss": 4.2675, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001422, |
|
"loss": 4.3457, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00014279999999999997, |
|
"loss": 4.3633, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001434, |
|
"loss": 4.5277, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00014399999999999998, |
|
"loss": 4.4545, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001446, |
|
"loss": 4.3021, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00014519999999999998, |
|
"loss": 4.6181, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001458, |
|
"loss": 4.681, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00014639999999999998, |
|
"loss": 4.5904, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.000147, |
|
"loss": 4.819, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00014759999999999998, |
|
"loss": 4.7076, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001482, |
|
"loss": 4.5274, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00014879999999999998, |
|
"loss": 4.6464, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001494, |
|
"loss": 4.3735, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00015, |
|
"loss": 4.4111, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00015059999999999997, |
|
"loss": 4.5762, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001512, |
|
"loss": 4.3417, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00015179999999999998, |
|
"loss": 4.5584, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001524, |
|
"loss": 4.5966, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00015299999999999998, |
|
"loss": 4.3418, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001536, |
|
"loss": 4.3593, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00015419999999999998, |
|
"loss": 4.3958, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001548, |
|
"loss": 4.558, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00015539999999999998, |
|
"loss": 4.4078, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.000156, |
|
"loss": 4.3026, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00015659999999999998, |
|
"loss": 4.6461, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001572, |
|
"loss": 4.0745, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001578, |
|
"loss": 4.3821, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001584, |
|
"loss": 4.3248, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.000159, |
|
"loss": 4.4178, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001596, |
|
"loss": 4.3037, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001602, |
|
"loss": 4.4218, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001608, |
|
"loss": 4.3435, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001614, |
|
"loss": 4.2526, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.000162, |
|
"loss": 4.5418, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001626, |
|
"loss": 4.4274, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001632, |
|
"loss": 4.3375, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001638, |
|
"loss": 4.4932, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001644, |
|
"loss": 4.3847, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.000165, |
|
"loss": 4.5048, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001656, |
|
"loss": 4.4597, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001662, |
|
"loss": 4.4021, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001668, |
|
"loss": 4.4614, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001674, |
|
"loss": 4.5178, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.000168, |
|
"loss": 4.4294, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0001686, |
|
"loss": 4.3767, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00016919999999999997, |
|
"loss": 4.3318, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00016979999999999998, |
|
"loss": 4.594, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00017039999999999997, |
|
"loss": 4.2769, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00017099999999999998, |
|
"loss": 4.8188, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00017159999999999997, |
|
"loss": 4.1727, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00017219999999999998, |
|
"loss": 4.4774, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00017279999999999997, |
|
"loss": 4.6156, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00017339999999999996, |
|
"loss": 4.5715, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00017399999999999997, |
|
"loss": 4.3004, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00017459999999999996, |
|
"loss": 4.457, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00017519999999999998, |
|
"loss": 4.5484, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00017579999999999996, |
|
"loss": 4.6633, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00017639999999999998, |
|
"loss": 4.4057, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00017699999999999997, |
|
"loss": 5.0306, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00017759999999999998, |
|
"loss": 4.8782, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00017819999999999997, |
|
"loss": 4.7462, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00017879999999999998, |
|
"loss": 5.1954, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00017939999999999997, |
|
"loss": 4.601, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 4.5284, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00018059999999999997, |
|
"loss": 4.6252, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00018119999999999999, |
|
"loss": 4.521, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00018179999999999997, |
|
"loss": 4.5921, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0001824, |
|
"loss": 4.5414, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00018299999999999998, |
|
"loss": 4.5384, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0001836, |
|
"loss": 4.7024, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00018419999999999998, |
|
"loss": 4.5066, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0001848, |
|
"loss": 4.4578, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00018539999999999998, |
|
"loss": 4.2657, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.000186, |
|
"loss": 4.5986, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018659999999999998, |
|
"loss": 4.5689, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001872, |
|
"loss": 4.624, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018779999999999998, |
|
"loss": 4.3777, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018839999999999997, |
|
"loss": 4.6881, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018899999999999999, |
|
"loss": 4.4102, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018959999999999997, |
|
"loss": 4.8632, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001902, |
|
"loss": 4.5565, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019079999999999998, |
|
"loss": 4.6379, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001914, |
|
"loss": 4.2919, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019199999999999998, |
|
"loss": 4.4188, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001926, |
|
"loss": 4.6448, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019319999999999998, |
|
"loss": 4.4343, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001938, |
|
"loss": 4.8242, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019439999999999998, |
|
"loss": 4.3897, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.000195, |
|
"loss": 4.7531, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019559999999999998, |
|
"loss": 4.5004, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001962, |
|
"loss": 4.3962, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019679999999999999, |
|
"loss": 4.4023, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001974, |
|
"loss": 4.5184, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.000198, |
|
"loss": 4.3142, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001986, |
|
"loss": 4.5157, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001992, |
|
"loss": 4.5152, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001998, |
|
"loss": 4.4877, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002004, |
|
"loss": 4.44, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.000201, |
|
"loss": 4.6629, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002016, |
|
"loss": 4.4032, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002022, |
|
"loss": 4.4355, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002028, |
|
"loss": 4.5324, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00020339999999999998, |
|
"loss": 4.4673, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.000204, |
|
"loss": 4.8003, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00020459999999999999, |
|
"loss": 4.6395, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002052, |
|
"loss": 4.6675, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002058, |
|
"loss": 4.8611, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00020639999999999998, |
|
"loss": 4.363, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00020699999999999996, |
|
"loss": 4.3723, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00020759999999999998, |
|
"loss": 4.7011, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00020819999999999996, |
|
"loss": 4.5094, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00020879999999999998, |
|
"loss": 4.6712, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00020939999999999997, |
|
"loss": 4.629, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00020999999999999998, |
|
"loss": 4.5946, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00021059999999999997, |
|
"loss": 4.517, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021119999999999996, |
|
"loss": 4.4386, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021179999999999997, |
|
"loss": 4.5275, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021239999999999996, |
|
"loss": 4.3692, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021299999999999997, |
|
"loss": 4.5466, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021359999999999996, |
|
"loss": 4.9401, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00021419999999999998, |
|
"loss": 4.3218, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00021479999999999996, |
|
"loss": 4.6933, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00021539999999999998, |
|
"loss": 4.6379, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00021599999999999996, |
|
"loss": 4.377, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00021659999999999998, |
|
"loss": 4.898, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00021719999999999997, |
|
"loss": 4.4985, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00021779999999999998, |
|
"loss": 4.5904, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00021839999999999997, |
|
"loss": 4.4725, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00021899999999999998, |
|
"loss": 4.4717, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00021959999999999997, |
|
"loss": 4.3563, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00022019999999999999, |
|
"loss": 4.4058, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00022079999999999997, |
|
"loss": 4.7998, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002214, |
|
"loss": 4.5314, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00022199999999999998, |
|
"loss": 4.398, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002226, |
|
"loss": 4.4327, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00022319999999999998, |
|
"loss": 4.7163, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002238, |
|
"loss": 4.7412, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00022439999999999998, |
|
"loss": 4.6339, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.000225, |
|
"loss": 4.5151, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00022559999999999998, |
|
"loss": 4.7596, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00022619999999999997, |
|
"loss": 4.3111, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00022679999999999998, |
|
"loss": 4.5822, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00022739999999999997, |
|
"loss": 4.573, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00022799999999999999, |
|
"loss": 4.3831, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00022859999999999997, |
|
"loss": 4.5107, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002292, |
|
"loss": 4.4021, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00022979999999999997, |
|
"loss": 4.7495, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002304, |
|
"loss": 4.8891, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00023099999999999998, |
|
"loss": 5.0482, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002316, |
|
"loss": 4.5507, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00023219999999999998, |
|
"loss": 4.5954, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0002328, |
|
"loss": 4.4654, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023339999999999998, |
|
"loss": 4.5147, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.000234, |
|
"loss": 4.6645, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023459999999999998, |
|
"loss": 4.6974, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0002352, |
|
"loss": 4.4298, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023579999999999999, |
|
"loss": 4.8421, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002364, |
|
"loss": 4.6812, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.000237, |
|
"loss": 5.1311, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002376, |
|
"loss": 5.5849, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002382, |
|
"loss": 4.9822, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002388, |
|
"loss": 5.4755, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002394, |
|
"loss": 5.0482, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 4.7492, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002406, |
|
"loss": 4.5452, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00024119999999999998, |
|
"loss": 4.8564, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002418, |
|
"loss": 4.568, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00024239999999999998, |
|
"loss": 4.5792, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.000243, |
|
"loss": 4.8879, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00024359999999999999, |
|
"loss": 4.5071, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00024419999999999997, |
|
"loss": 4.9798, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002448, |
|
"loss": 4.5812, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00024539999999999995, |
|
"loss": 4.4708, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00024599999999999996, |
|
"loss": 4.7339, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002466, |
|
"loss": 4.467, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002472, |
|
"loss": 4.7394, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00024779999999999995, |
|
"loss": 4.5679, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00024839999999999997, |
|
"loss": 4.5506, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.000249, |
|
"loss": 4.5689, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00024959999999999994, |
|
"loss": 4.1599, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00025019999999999996, |
|
"loss": 4.8572, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00025079999999999997, |
|
"loss": 4.722, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0002514, |
|
"loss": 4.2483, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00025199999999999995, |
|
"loss": 4.3537, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00025259999999999996, |
|
"loss": 4.6733, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0002532, |
|
"loss": 4.4947, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0002538, |
|
"loss": 4.6257, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00025439999999999995, |
|
"loss": 4.5752, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00025499999999999996, |
|
"loss": 4.286, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0002556, |
|
"loss": 4.2402, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0002562, |
|
"loss": 4.8508, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00025679999999999995, |
|
"loss": 4.4226, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00025739999999999997, |
|
"loss": 4.8964, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.000258, |
|
"loss": 4.3863, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002586, |
|
"loss": 4.5225, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00025919999999999996, |
|
"loss": 4.6005, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00025979999999999997, |
|
"loss": 4.6333, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002604, |
|
"loss": 4.4055, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.000261, |
|
"loss": 4.5249, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00026159999999999996, |
|
"loss": 4.528, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0002622, |
|
"loss": 4.4824, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0002628, |
|
"loss": 4.9304, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00026339999999999995, |
|
"loss": 4.9839, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00026399999999999997, |
|
"loss": 4.8169, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0002646, |
|
"loss": 4.9251, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0002652, |
|
"loss": 4.7229, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00026579999999999996, |
|
"loss": 4.453, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00026639999999999997, |
|
"loss": 4.8808, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.000267, |
|
"loss": 4.5888, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0002676, |
|
"loss": 4.671, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00026819999999999996, |
|
"loss": 5.2919, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0002688, |
|
"loss": 4.4541, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0002694, |
|
"loss": 4.5015, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00027, |
|
"loss": 4.6347, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00027059999999999996, |
|
"loss": 4.4529, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0002712, |
|
"loss": 4.7072, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002718, |
|
"loss": 4.6178, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002724, |
|
"loss": 4.7541, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00027299999999999997, |
|
"loss": 4.8252, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002736, |
|
"loss": 4.7205, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002742, |
|
"loss": 4.7718, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002748, |
|
"loss": 4.5784, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00027539999999999997, |
|
"loss": 4.7469, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.000276, |
|
"loss": 4.4427, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002766, |
|
"loss": 4.5062, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002772, |
|
"loss": 4.6286, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002778, |
|
"loss": 4.8309, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002784, |
|
"loss": 4.5841, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.000279, |
|
"loss": 4.581, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00027959999999999997, |
|
"loss": 4.5023, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0002802, |
|
"loss": 4.5518, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0002808, |
|
"loss": 4.3897, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00028139999999999996, |
|
"loss": 4.4474, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00028199999999999997, |
|
"loss": 4.4435, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0002826, |
|
"loss": 4.5926, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00028319999999999994, |
|
"loss": 4.5936, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00028379999999999996, |
|
"loss": 4.5352, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0002844, |
|
"loss": 4.7309, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.000285, |
|
"loss": 4.6586, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00028559999999999995, |
|
"loss": 4.6348, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00028619999999999996, |
|
"loss": 4.3622, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0002868, |
|
"loss": 4.3913, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00028739999999999994, |
|
"loss": 4.8266, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00028799999999999995, |
|
"loss": 4.8246, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00028859999999999997, |
|
"loss": 4.9268, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0002892, |
|
"loss": 4.881, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00028979999999999994, |
|
"loss": 4.6537, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00029039999999999996, |
|
"loss": 4.5566, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00029099999999999997, |
|
"loss": 4.5838, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0002916, |
|
"loss": 4.5166, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00029219999999999995, |
|
"loss": 4.6484, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00029279999999999996, |
|
"loss": 4.3926, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0002934, |
|
"loss": 4.9063, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.000294, |
|
"loss": 4.5744, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00029459999999999995, |
|
"loss": 4.2873, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00029519999999999997, |
|
"loss": 4.6052, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0002958, |
|
"loss": 4.7127, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0002964, |
|
"loss": 4.8242, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00029699999999999996, |
|
"loss": 4.4981, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00029759999999999997, |
|
"loss": 4.5186, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0002982, |
|
"loss": 5.0847, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0002988, |
|
"loss": 4.7686, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.900937080383301, |
|
"eval_runtime": 769.6154, |
|
"eval_samples_per_second": 3.433, |
|
"eval_steps_per_second": 0.287, |
|
"eval_wer": 1.9500793335977786, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00029939999999999996, |
|
"loss": 4.7283, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0003, |
|
"loss": 4.4173, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00029680851063829784, |
|
"loss": 4.5777, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002936170212765957, |
|
"loss": 4.8135, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002904255319148936, |
|
"loss": 4.7635, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002872340425531915, |
|
"loss": 4.7398, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00028404255319148934, |
|
"loss": 4.7005, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002808510638297872, |
|
"loss": 4.7717, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00027765957446808506, |
|
"loss": 4.5064, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.000274468085106383, |
|
"loss": 4.806, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00027127659574468084, |
|
"loss": 4.6994, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0002680851063829787, |
|
"loss": 4.5931, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00026489361702127656, |
|
"loss": 4.4888, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0002617021276595745, |
|
"loss": 4.4274, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002585106382978723, |
|
"loss": 4.5197, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002553191489361702, |
|
"loss": 4.4446, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00025212765957446806, |
|
"loss": 4.7041, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002489361702127659, |
|
"loss": 4.5883, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00024574468085106384, |
|
"loss": 4.5084, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00024255319148936167, |
|
"loss": 4.5682, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00023936170212765956, |
|
"loss": 4.7571, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00023617021276595742, |
|
"loss": 4.9358, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00023297872340425529, |
|
"loss": 4.4314, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00022978723404255317, |
|
"loss": 4.1877, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00022659574468085106, |
|
"loss": 4.7371, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0002234042553191489, |
|
"loss": 4.6611, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00022021276595744679, |
|
"loss": 4.6147, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00021702127659574468, |
|
"loss": 4.4807, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00021382978723404254, |
|
"loss": 4.8237, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0002106382978723404, |
|
"loss": 4.3209, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0002074468085106383, |
|
"loss": 4.4169, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00020425531914893615, |
|
"loss": 4.6349, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00020106382978723404, |
|
"loss": 4.6437, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00019787234042553187, |
|
"loss": 4.4318, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00019468085106382976, |
|
"loss": 4.3929, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00019148936170212765, |
|
"loss": 4.5963, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001882978723404255, |
|
"loss": 4.474, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001851063829787234, |
|
"loss": 4.396, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00018191489361702126, |
|
"loss": 4.7928, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00017872340425531912, |
|
"loss": 4.569, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.000175531914893617, |
|
"loss": 4.4839, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0001723404255319149, |
|
"loss": 4.6734, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00016914893617021274, |
|
"loss": 4.405, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00016595744680851062, |
|
"loss": 4.5196, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00016276595744680849, |
|
"loss": 4.4101, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00015957446808510637, |
|
"loss": 4.9518, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00015638297872340426, |
|
"loss": 4.4228, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001531914893617021, |
|
"loss": 5.044, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00015, |
|
"loss": 5.1572, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00014680851063829785, |
|
"loss": 4.4465, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00014361702127659574, |
|
"loss": 4.5287, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001404255319148936, |
|
"loss": 4.616, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001372340425531915, |
|
"loss": 4.5176, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00013404255319148935, |
|
"loss": 4.3372, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00013085106382978724, |
|
"loss": 4.4698, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001276595744680851, |
|
"loss": 4.4738, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00012446808510638296, |
|
"loss": 4.6455, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00012127659574468084, |
|
"loss": 4.5513, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00011808510638297871, |
|
"loss": 4.3848, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00011489361702127659, |
|
"loss": 4.3127, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00011170212765957445, |
|
"loss": 4.7444, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00010851063829787234, |
|
"loss": 4.4381, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0001053191489361702, |
|
"loss": 4.4416, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00010212765957446807, |
|
"loss": 4.8204, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.893617021276594e-05, |
|
"loss": 4.5756, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.574468085106382e-05, |
|
"loss": 4.5617, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.25531914893617e-05, |
|
"loss": 4.4047, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.936170212765956e-05, |
|
"loss": 4.4505, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.617021276595745e-05, |
|
"loss": 4.356, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.297872340425531e-05, |
|
"loss": 4.4439, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.978723404255319e-05, |
|
"loss": 4.4821, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.659574468085105e-05, |
|
"loss": 4.3846, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.340425531914892e-05, |
|
"loss": 4.4297, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.02127659574468e-05, |
|
"loss": 4.3423, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.702127659574467e-05, |
|
"loss": 4.3857, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.382978723404255e-05, |
|
"loss": 4.3895, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.063829787234042e-05, |
|
"loss": 4.5989, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.7446808510638294e-05, |
|
"loss": 4.2233, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.425531914893617e-05, |
|
"loss": 4.3258, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.106382978723404e-05, |
|
"loss": 4.4218, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.787234042553191e-05, |
|
"loss": 4.5631, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.468085106382978e-05, |
|
"loss": 4.3969, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.1489361702127656e-05, |
|
"loss": 4.203, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.8297872340425525e-05, |
|
"loss": 4.3834, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.51063829787234e-05, |
|
"loss": 4.4053, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.1914893617021275e-05, |
|
"loss": 4.3702, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.8723404255319147e-05, |
|
"loss": 4.4538, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.553191489361702e-05, |
|
"loss": 4.617, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.234042553191489e-05, |
|
"loss": 4.4198, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9148936170212762e-05, |
|
"loss": 4.2778, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5957446808510637e-05, |
|
"loss": 4.5074, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.276595744680851e-05, |
|
"loss": 4.901, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.574468085106381e-06, |
|
"loss": 4.704, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.382978723404255e-06, |
|
"loss": 4.5343, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 594, |
|
"total_flos": 0.0, |
|
"train_loss": 4.477476079857309, |
|
"train_runtime": 5476.9762, |
|
"train_samples_per_second": 5.211, |
|
"train_steps_per_second": 0.108 |
|
} |
|
], |
|
"max_steps": 594, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|