|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9987389659520807, |
|
"global_step": 297, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6e-07, |
|
"loss": 4.7698, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2e-06, |
|
"loss": 4.8469, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.9086, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4e-06, |
|
"loss": 4.8415, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.9999999999999997e-06, |
|
"loss": 4.7132, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.9999999999999997e-06, |
|
"loss": 4.8118, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.7937, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.2e-06, |
|
"loss": 4.8404, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8e-06, |
|
"loss": 4.7309, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.399999999999999e-06, |
|
"loss": 4.5415, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.999999999999999e-06, |
|
"loss": 4.6528, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.599999999999999e-06, |
|
"loss": 4.662, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.5529, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.799999999999998e-06, |
|
"loss": 4.474, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.4e-06, |
|
"loss": 4.4883, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.999999999999999e-06, |
|
"loss": 4.4004, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.6e-06, |
|
"loss": 4.4632, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.4696, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0799999999999998e-05, |
|
"loss": 4.4129, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.14e-05, |
|
"loss": 4.4643, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1999999999999999e-05, |
|
"loss": 4.4646, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.26e-05, |
|
"loss": 4.4311, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3199999999999997e-05, |
|
"loss": 4.306, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3799999999999998e-05, |
|
"loss": 4.4385, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.276, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4999999999999999e-05, |
|
"loss": 4.5521, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5599999999999996e-05, |
|
"loss": 4.3171, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.6199999999999997e-05, |
|
"loss": 4.2827, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.68e-05, |
|
"loss": 4.1956, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.3854, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.7999999999999997e-05, |
|
"loss": 4.3831, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8599999999999998e-05, |
|
"loss": 4.2882, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.92e-05, |
|
"loss": 4.3278, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.98e-05, |
|
"loss": 4.2851, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.2957, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.2894, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.1599999999999996e-05, |
|
"loss": 4.3836, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.2199999999999998e-05, |
|
"loss": 4.3498, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.28e-05, |
|
"loss": 4.3839, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.34e-05, |
|
"loss": 4.2473, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.3999999999999997e-05, |
|
"loss": 4.3261, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 4.3655, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.52e-05, |
|
"loss": 4.1703, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.5799999999999997e-05, |
|
"loss": 4.468, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.6399999999999995e-05, |
|
"loss": 4.4049, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.6999999999999996e-05, |
|
"loss": 4.4439, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.7599999999999997e-05, |
|
"loss": 4.4014, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.5049, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.5962, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.3296, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 4.2871, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.1845, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.119999999999999e-05, |
|
"loss": 4.163, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.1799999999999994e-05, |
|
"loss": 4.1515, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.2399999999999995e-05, |
|
"loss": 4.1247, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.2999999999999996e-05, |
|
"loss": 4.1402, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.36e-05, |
|
"loss": 4.2412, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.42e-05, |
|
"loss": 4.2467, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.2223, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.539999999999999e-05, |
|
"loss": 4.1693, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.5999999999999994e-05, |
|
"loss": 4.2163, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.6599999999999995e-05, |
|
"loss": 4.1301, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.7199999999999996e-05, |
|
"loss": 4.2289, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.78e-05, |
|
"loss": 4.1746, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.84e-05, |
|
"loss": 4.2473, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9e-05, |
|
"loss": 4.2665, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.96e-05, |
|
"loss": 4.2199, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.246, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.1884, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.1402, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.1451, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.259999999999999e-05, |
|
"loss": 4.2652, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.319999999999999e-05, |
|
"loss": 4.1772, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.3799999999999994e-05, |
|
"loss": 4.1788, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.4399999999999995e-05, |
|
"loss": 4.2676, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 4.1542, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.56e-05, |
|
"loss": 4.1175, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.62e-05, |
|
"loss": 4.1771, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.68e-05, |
|
"loss": 4.0612, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.7399999999999993e-05, |
|
"loss": 4.2617, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.7999999999999994e-05, |
|
"loss": 4.1813, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8599999999999995e-05, |
|
"loss": 4.1929, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.9199999999999997e-05, |
|
"loss": 4.1566, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.98e-05, |
|
"loss": 4.2066, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.04e-05, |
|
"loss": 4.297, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.1e-05, |
|
"loss": 4.1838, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.1599999999999994e-05, |
|
"loss": 4.2046, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.2199999999999995e-05, |
|
"loss": 4.2594, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.279999999999999e-05, |
|
"loss": 4.2559, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.339999999999999e-05, |
|
"loss": 4.2763, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.399999999999999e-05, |
|
"loss": 4.2853, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.459999999999999e-05, |
|
"loss": 4.3723, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.519999999999999e-05, |
|
"loss": 4.333, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.5799999999999994e-05, |
|
"loss": 4.2612, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.2223, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.3846, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.5903, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.4963, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.5083, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.94e-05, |
|
"loss": 4.5454, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 4.146, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 4.2164, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.12e-05, |
|
"loss": 4.2249, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.1094, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.239999999999999e-05, |
|
"loss": 4.2254, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.299999999999999e-05, |
|
"loss": 4.1199, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.359999999999999e-05, |
|
"loss": 4.0813, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.419999999999999e-05, |
|
"loss": 4.1843, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.479999999999999e-05, |
|
"loss": 4.2144, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.539999999999999e-05, |
|
"loss": 4.1853, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.599999999999999e-05, |
|
"loss": 4.256, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.659999999999999e-05, |
|
"loss": 4.1764, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.72e-05, |
|
"loss": 4.1245, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.78e-05, |
|
"loss": 4.1798, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.84e-05, |
|
"loss": 4.1431, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.1608, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.09, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.1146, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.079999999999999e-05, |
|
"loss": 4.1764, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.139999999999999e-05, |
|
"loss": 4.181, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.199999999999999e-05, |
|
"loss": 4.1816, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.259999999999999e-05, |
|
"loss": 4.2696, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.319999999999999e-05, |
|
"loss": 4.1079, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.379999999999999e-05, |
|
"loss": 4.2193, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.439999999999999e-05, |
|
"loss": 4.2735, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.1796, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.56e-05, |
|
"loss": 4.2039, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.62e-05, |
|
"loss": 4.1144, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.68e-05, |
|
"loss": 4.2619, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.74e-05, |
|
"loss": 4.3677, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.8e-05, |
|
"loss": 4.2429, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.86e-05, |
|
"loss": 4.1074, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.92e-05, |
|
"loss": 4.2802, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.1459, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.3063, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.2316, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.1519, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.0775, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.1537, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.1356, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.3637, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.459999999999998e-05, |
|
"loss": 4.156, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.519999999999998e-05, |
|
"loss": 4.2406, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.579999999999998e-05, |
|
"loss": 4.3063, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.639999999999999e-05, |
|
"loss": 4.4181, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.699999999999999e-05, |
|
"loss": 4.173, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.759999999999999e-05, |
|
"loss": 4.4079, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.819999999999999e-05, |
|
"loss": 4.4766, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.879999999999999e-05, |
|
"loss": 4.4345, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.939999999999999e-05, |
|
"loss": 4.5567, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 4.2932, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.059999999999999e-05, |
|
"loss": 4.2495, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.12e-05, |
|
"loss": 4.2386, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.18e-05, |
|
"loss": 4.1652, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.24e-05, |
|
"loss": 4.1926, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.3e-05, |
|
"loss": 4.2151, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.36e-05, |
|
"loss": 4.1392, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.419999999999999e-05, |
|
"loss": 4.165, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.479999999999999e-05, |
|
"loss": 4.2136, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.539999999999999e-05, |
|
"loss": 4.1931, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.599999999999999e-05, |
|
"loss": 4.2008, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.659999999999999e-05, |
|
"loss": 4.1198, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.719999999999999e-05, |
|
"loss": 4.2464, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.779999999999999e-05, |
|
"loss": 4.1406, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.839999999999999e-05, |
|
"loss": 4.2095, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.9e-05, |
|
"loss": 4.1875, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.96e-05, |
|
"loss": 4.152, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001002, |
|
"loss": 4.1881, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001008, |
|
"loss": 4.2114, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001014, |
|
"loss": 4.1061, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.000102, |
|
"loss": 4.1188, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001026, |
|
"loss": 4.1931, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00010319999999999999, |
|
"loss": 4.0806, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00010379999999999999, |
|
"loss": 4.1139, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00010439999999999999, |
|
"loss": 4.3033, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00010499999999999999, |
|
"loss": 4.1447, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00010559999999999998, |
|
"loss": 4.2387, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00010619999999999998, |
|
"loss": 4.1149, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00010679999999999998, |
|
"loss": 4.1095, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00010739999999999998, |
|
"loss": 4.3137, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00010799999999999998, |
|
"loss": 4.1593, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00010859999999999998, |
|
"loss": 4.0903, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00010919999999999998, |
|
"loss": 4.1328, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00010979999999999999, |
|
"loss": 4.1959, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00011039999999999999, |
|
"loss": 4.2055, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00011099999999999999, |
|
"loss": 4.1536, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00011159999999999999, |
|
"loss": 4.195, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00011219999999999999, |
|
"loss": 4.1625, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00011279999999999999, |
|
"loss": 4.1326, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00011339999999999999, |
|
"loss": 4.2294, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 4.3466, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001146, |
|
"loss": 4.3242, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001152, |
|
"loss": 4.3581, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001158, |
|
"loss": 4.3301, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001164, |
|
"loss": 4.2756, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.000117, |
|
"loss": 4.3115, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001176, |
|
"loss": 4.1626, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001182, |
|
"loss": 4.6198, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001188, |
|
"loss": 4.4561, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001194, |
|
"loss": 4.4628, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 4.2635, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00012059999999999999, |
|
"loss": 4.2424, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00012119999999999999, |
|
"loss": 4.1895, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012179999999999999, |
|
"loss": 4.133, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001224, |
|
"loss": 4.1853, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00012299999999999998, |
|
"loss": 4.1186, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001236, |
|
"loss": 4.3218, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012419999999999998, |
|
"loss": 4.19, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00012479999999999997, |
|
"loss": 4.1701, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012539999999999999, |
|
"loss": 4.2564, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00012599999999999997, |
|
"loss": 4.0469, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001266, |
|
"loss": 4.1559, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00012719999999999997, |
|
"loss": 4.3674, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001278, |
|
"loss": 4.172, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00012839999999999998, |
|
"loss": 4.2285, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.000129, |
|
"loss": 4.182, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00012959999999999998, |
|
"loss": 4.1601, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001302, |
|
"loss": 4.0804, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013079999999999998, |
|
"loss": 4.0997, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001314, |
|
"loss": 4.1931, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00013199999999999998, |
|
"loss": 4.1306, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001326, |
|
"loss": 4.0679, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00013319999999999999, |
|
"loss": 4.1208, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001338, |
|
"loss": 4.1506, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001344, |
|
"loss": 4.1266, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.000135, |
|
"loss": 4.2373, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001356, |
|
"loss": 4.1706, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001362, |
|
"loss": 4.063, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001368, |
|
"loss": 4.1355, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001374, |
|
"loss": 4.1105, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.000138, |
|
"loss": 4.2918, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0001386, |
|
"loss": 4.1313, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0001392, |
|
"loss": 4.2142, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00013979999999999998, |
|
"loss": 4.2723, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001404, |
|
"loss": 4.24, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00014099999999999998, |
|
"loss": 4.0821, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00014159999999999997, |
|
"loss": 4.177, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0001422, |
|
"loss": 4.2761, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00014279999999999997, |
|
"loss": 4.1533, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001434, |
|
"loss": 4.1522, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00014399999999999998, |
|
"loss": 4.3344, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001446, |
|
"loss": 4.3554, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00014519999999999998, |
|
"loss": 4.1236, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001458, |
|
"loss": 4.1599, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00014639999999999998, |
|
"loss": 4.1697, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.000147, |
|
"loss": 4.2965, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00014759999999999998, |
|
"loss": 4.3114, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0001482, |
|
"loss": 4.2331, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00014879999999999998, |
|
"loss": 4.4326, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001494, |
|
"loss": 4.1969, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00015, |
|
"loss": 4.3411, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00015059999999999997, |
|
"loss": 4.2298, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001512, |
|
"loss": 4.2068, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00015179999999999998, |
|
"loss": 4.1757, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0001524, |
|
"loss": 4.1387, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00015299999999999998, |
|
"loss": 4.1108, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0001536, |
|
"loss": 4.3753, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00015419999999999998, |
|
"loss": 4.1422, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0001548, |
|
"loss": 4.2031, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00015539999999999998, |
|
"loss": 4.1135, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.000156, |
|
"loss": 4.2013, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00015659999999999998, |
|
"loss": 4.2562, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001572, |
|
"loss": 4.1273, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001578, |
|
"loss": 4.1866, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001584, |
|
"loss": 4.1688, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.000159, |
|
"loss": 4.3492, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001596, |
|
"loss": 4.1599, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001602, |
|
"loss": 4.1327, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001608, |
|
"loss": 4.0783, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0001614, |
|
"loss": 4.2492, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.000162, |
|
"loss": 4.0783, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0001626, |
|
"loss": 4.0342, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001632, |
|
"loss": 4.1312, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001638, |
|
"loss": 4.1231, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001644, |
|
"loss": 4.1969, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.000165, |
|
"loss": 4.1583, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001656, |
|
"loss": 4.161, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001662, |
|
"loss": 4.0859, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.0001668, |
|
"loss": 4.1179, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.0001674, |
|
"loss": 4.1002, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.000168, |
|
"loss": 4.2117, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0001686, |
|
"loss": 4.0938, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00016919999999999997, |
|
"loss": 4.2846, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00016979999999999998, |
|
"loss": 4.1019, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017039999999999997, |
|
"loss": 4.2145, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017099999999999998, |
|
"loss": 4.1841, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00017159999999999997, |
|
"loss": 4.1436, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00017219999999999998, |
|
"loss": 4.178, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00017279999999999997, |
|
"loss": 4.1402, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017339999999999996, |
|
"loss": 4.2526, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017399999999999997, |
|
"loss": 4.2094, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017459999999999996, |
|
"loss": 4.3642, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00017519999999999998, |
|
"loss": 15.0924, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00017579999999999996, |
|
"loss": 6.5611, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00017639999999999998, |
|
"loss": 4.5139, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00017699999999999997, |
|
"loss": 4.8705, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00017759999999999998, |
|
"loss": 4.5291, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 297, |
|
"total_flos": 0.0, |
|
"train_loss": 4.303745459225844, |
|
"train_runtime": 4795.9642, |
|
"train_samples_per_second": 5.95, |
|
"train_steps_per_second": 0.062 |
|
} |
|
], |
|
"max_steps": 297, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|