|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 892, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.7935, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6e-07, |
|
"loss": 5.0643, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2e-06, |
|
"loss": 4.9205, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.9075, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4e-06, |
|
"loss": 4.7267, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.9999999999999997e-06, |
|
"loss": 4.8143, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.7767, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2e-06, |
|
"loss": 4.6701, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2e-06, |
|
"loss": 4.7857, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8e-06, |
|
"loss": 4.6694, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.399999999999999e-06, |
|
"loss": 4.7207, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.999999999999999e-06, |
|
"loss": 4.6639, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.599999999999999e-06, |
|
"loss": 4.6055, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.5577, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.799999999999998e-06, |
|
"loss": 4.4804, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.4e-06, |
|
"loss": 4.5974, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.999999999999999e-06, |
|
"loss": 4.5684, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.6e-06, |
|
"loss": 4.678, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.3706, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0799999999999998e-05, |
|
"loss": 4.5311, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.14e-05, |
|
"loss": 4.4249, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1999999999999999e-05, |
|
"loss": 4.415, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.26e-05, |
|
"loss": 4.433, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.3199999999999997e-05, |
|
"loss": 4.5236, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.3799999999999998e-05, |
|
"loss": 4.4252, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.5983, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.4999999999999999e-05, |
|
"loss": 4.3082, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.5599999999999996e-05, |
|
"loss": 4.2028, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6199999999999997e-05, |
|
"loss": 4.2437, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.68e-05, |
|
"loss": 4.45, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.4441, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.7999999999999997e-05, |
|
"loss": 4.2113, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.8599999999999998e-05, |
|
"loss": 4.3137, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.92e-05, |
|
"loss": 4.2111, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.98e-05, |
|
"loss": 4.4028, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.3463, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.3769, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.1599999999999996e-05, |
|
"loss": 4.3872, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.2199999999999998e-05, |
|
"loss": 4.5496, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.28e-05, |
|
"loss": 4.3342, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.34e-05, |
|
"loss": 4.2224, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.3999999999999997e-05, |
|
"loss": 4.3825, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 4.1646, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.52e-05, |
|
"loss": 4.3405, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.5799999999999997e-05, |
|
"loss": 4.3905, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.6399999999999995e-05, |
|
"loss": 4.5426, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.6999999999999996e-05, |
|
"loss": 4.4031, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.7599999999999997e-05, |
|
"loss": 4.7509, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.9197, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.562, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.1801, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 4.3261, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.1817, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.119999999999999e-05, |
|
"loss": 4.2294, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.1799999999999994e-05, |
|
"loss": 4.2412, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.2399999999999995e-05, |
|
"loss": 4.319, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.2999999999999996e-05, |
|
"loss": 4.3087, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.36e-05, |
|
"loss": 4.5114, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.42e-05, |
|
"loss": 4.2629, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.136, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.539999999999999e-05, |
|
"loss": 4.3773, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.5999999999999994e-05, |
|
"loss": 4.3272, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.6599999999999995e-05, |
|
"loss": 4.1657, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.7199999999999996e-05, |
|
"loss": 4.321, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.78e-05, |
|
"loss": 4.1184, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.84e-05, |
|
"loss": 4.348, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.9e-05, |
|
"loss": 4.1546, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.96e-05, |
|
"loss": 4.1783, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.1314, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.2552, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.4319, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.3253, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.259999999999999e-05, |
|
"loss": 4.2573, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.319999999999999e-05, |
|
"loss": 4.2875, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.3799999999999994e-05, |
|
"loss": 4.1211, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.4399999999999995e-05, |
|
"loss": 4.3568, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 4.2824, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.56e-05, |
|
"loss": 4.2062, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.62e-05, |
|
"loss": 4.0674, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.68e-05, |
|
"loss": 4.3285, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.7399999999999993e-05, |
|
"loss": 4.3773, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.7999999999999994e-05, |
|
"loss": 4.1549, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8599999999999995e-05, |
|
"loss": 4.4001, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9199999999999997e-05, |
|
"loss": 4.3308, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.98e-05, |
|
"loss": 4.2605, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.04e-05, |
|
"loss": 4.2488, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.1e-05, |
|
"loss": 4.255, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.1599999999999994e-05, |
|
"loss": 4.2311, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.2199999999999995e-05, |
|
"loss": 4.3115, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.279999999999999e-05, |
|
"loss": 4.246, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.339999999999999e-05, |
|
"loss": 4.4423, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.399999999999999e-05, |
|
"loss": 4.389, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.459999999999999e-05, |
|
"loss": 4.113, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.519999999999999e-05, |
|
"loss": 4.3143, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.5799999999999994e-05, |
|
"loss": 4.6075, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.4951, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.319, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.5888, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.3275, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.3674, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.94e-05, |
|
"loss": 4.2166, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 4.262, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 4.342, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.12e-05, |
|
"loss": 4.2719, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.1079, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.239999999999999e-05, |
|
"loss": 4.3223, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.299999999999999e-05, |
|
"loss": 4.3472, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.359999999999999e-05, |
|
"loss": 4.4634, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.419999999999999e-05, |
|
"loss": 4.0303, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.479999999999999e-05, |
|
"loss": 4.1659, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.539999999999999e-05, |
|
"loss": 4.3407, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.599999999999999e-05, |
|
"loss": 4.2267, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.659999999999999e-05, |
|
"loss": 4.1988, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.72e-05, |
|
"loss": 4.1583, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.78e-05, |
|
"loss": 4.2206, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.84e-05, |
|
"loss": 3.9665, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.2874, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.1614, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.278, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.079999999999999e-05, |
|
"loss": 4.33, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.139999999999999e-05, |
|
"loss": 4.2586, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.199999999999999e-05, |
|
"loss": 4.2284, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.259999999999999e-05, |
|
"loss": 4.0917, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.319999999999999e-05, |
|
"loss": 4.3557, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.379999999999999e-05, |
|
"loss": 4.2795, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.439999999999999e-05, |
|
"loss": 4.3276, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.3446, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.56e-05, |
|
"loss": 4.2295, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.62e-05, |
|
"loss": 4.1818, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.68e-05, |
|
"loss": 4.3607, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.74e-05, |
|
"loss": 4.2731, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.8e-05, |
|
"loss": 4.3785, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.86e-05, |
|
"loss": 4.3502, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.92e-05, |
|
"loss": 4.3146, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.1596, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.2668, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.5135, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.4041, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.3569, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.1605, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.4837, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.5306, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.459999999999998e-05, |
|
"loss": 4.3956, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.519999999999998e-05, |
|
"loss": 4.3278, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.579999999999998e-05, |
|
"loss": 4.6011, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.639999999999999e-05, |
|
"loss": 4.4849, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.699999999999999e-05, |
|
"loss": 4.5009, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.759999999999999e-05, |
|
"loss": 4.5048, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.819999999999999e-05, |
|
"loss": 4.6745, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.879999999999999e-05, |
|
"loss": 4.407, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.939999999999999e-05, |
|
"loss": 4.4633, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 4.321, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.059999999999999e-05, |
|
"loss": 4.4206, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.12e-05, |
|
"loss": 4.1797, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.18e-05, |
|
"loss": 4.1876, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.24e-05, |
|
"loss": 4.2306, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.3e-05, |
|
"loss": 4.4635, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.36e-05, |
|
"loss": 4.4991, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.419999999999999e-05, |
|
"loss": 4.5696, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.479999999999999e-05, |
|
"loss": 4.2998, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.539999999999999e-05, |
|
"loss": 4.334, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.599999999999999e-05, |
|
"loss": 4.2896, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.659999999999999e-05, |
|
"loss": 4.2937, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.719999999999999e-05, |
|
"loss": 4.4709, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.779999999999999e-05, |
|
"loss": 4.3272, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.839999999999999e-05, |
|
"loss": 4.3714, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.9e-05, |
|
"loss": 4.3042, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.96e-05, |
|
"loss": 4.1345, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001002, |
|
"loss": 4.2959, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001008, |
|
"loss": 4.2594, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001014, |
|
"loss": 4.4797, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.000102, |
|
"loss": 4.5183, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001026, |
|
"loss": 4.3362, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010319999999999999, |
|
"loss": 4.2346, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010379999999999999, |
|
"loss": 4.3054, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010439999999999999, |
|
"loss": 4.1806, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010499999999999999, |
|
"loss": 4.2475, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010559999999999998, |
|
"loss": 4.2608, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010619999999999998, |
|
"loss": 4.432, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010679999999999998, |
|
"loss": 4.3326, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010739999999999998, |
|
"loss": 4.3451, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00010799999999999998, |
|
"loss": 4.2187, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00010859999999999998, |
|
"loss": 4.3603, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00010919999999999998, |
|
"loss": 4.4496, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00010979999999999999, |
|
"loss": 4.3975, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00011039999999999999, |
|
"loss": 4.2659, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00011099999999999999, |
|
"loss": 4.4568, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00011159999999999999, |
|
"loss": 4.151, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00011219999999999999, |
|
"loss": 4.2923, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00011279999999999999, |
|
"loss": 4.4521, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00011339999999999999, |
|
"loss": 4.4017, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 4.4227, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001146, |
|
"loss": 4.6037, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001152, |
|
"loss": 4.2636, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001158, |
|
"loss": 4.7476, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001164, |
|
"loss": 4.7315, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.000117, |
|
"loss": 4.6674, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001176, |
|
"loss": 5.1666, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001182, |
|
"loss": 5.1316, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001188, |
|
"loss": 4.7765, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001194, |
|
"loss": 4.3566, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 4.4518, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00012059999999999999, |
|
"loss": 4.4986, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00012119999999999999, |
|
"loss": 4.3431, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00012179999999999999, |
|
"loss": 4.238, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001224, |
|
"loss": 4.2508, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00012299999999999998, |
|
"loss": 4.3551, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001236, |
|
"loss": 4.1455, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00012419999999999998, |
|
"loss": 4.3884, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00012479999999999997, |
|
"loss": 4.3141, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00012539999999999999, |
|
"loss": 4.4313, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00012599999999999997, |
|
"loss": 4.4131, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001266, |
|
"loss": 4.3574, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00012719999999999997, |
|
"loss": 4.5215, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001278, |
|
"loss": 4.1939, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00012839999999999998, |
|
"loss": 4.3366, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000129, |
|
"loss": 4.4412, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00012959999999999998, |
|
"loss": 4.3818, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001302, |
|
"loss": 4.4043, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00013079999999999998, |
|
"loss": 4.4044, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001314, |
|
"loss": 4.208, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00013199999999999998, |
|
"loss": 4.3869, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001326, |
|
"loss": 4.4836, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00013319999999999999, |
|
"loss": 4.367, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001338, |
|
"loss": 4.63, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001344, |
|
"loss": 4.5293, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.000135, |
|
"loss": 4.487, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001356, |
|
"loss": 4.4011, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001362, |
|
"loss": 4.2859, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001368, |
|
"loss": 4.4017, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001374, |
|
"loss": 4.6719, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.000138, |
|
"loss": 4.5177, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001386, |
|
"loss": 4.3213, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001392, |
|
"loss": 4.5602, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00013979999999999998, |
|
"loss": 4.5167, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001404, |
|
"loss": 4.3494, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00014099999999999998, |
|
"loss": 4.2911, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00014159999999999997, |
|
"loss": 4.6164, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001422, |
|
"loss": 4.3868, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00014279999999999997, |
|
"loss": 4.4622, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001434, |
|
"loss": 4.5795, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00014399999999999998, |
|
"loss": 4.6158, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001446, |
|
"loss": 4.786, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00014519999999999998, |
|
"loss": 5.3147, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001458, |
|
"loss": 4.7131, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00014639999999999998, |
|
"loss": 4.5343, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.000147, |
|
"loss": 5.0141, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00014759999999999998, |
|
"loss": 4.8573, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001482, |
|
"loss": 5.0538, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00014879999999999998, |
|
"loss": 4.4255, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001494, |
|
"loss": 4.7096, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00015, |
|
"loss": 4.4132, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00015059999999999997, |
|
"loss": 4.5483, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001512, |
|
"loss": 4.5101, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00015179999999999998, |
|
"loss": 4.4749, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001524, |
|
"loss": 4.4696, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00015299999999999998, |
|
"loss": 4.5023, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001536, |
|
"loss": 4.2994, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00015419999999999998, |
|
"loss": 4.4836, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001548, |
|
"loss": 4.4787, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00015539999999999998, |
|
"loss": 4.5402, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.000156, |
|
"loss": 4.4271, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00015659999999999998, |
|
"loss": 4.3551, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001572, |
|
"loss": 4.356, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001578, |
|
"loss": 4.6543, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001584, |
|
"loss": 4.7402, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.000159, |
|
"loss": 4.3412, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001596, |
|
"loss": 4.7831, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001602, |
|
"loss": 4.3223, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001608, |
|
"loss": 4.7522, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001614, |
|
"loss": 4.4004, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.000162, |
|
"loss": 4.3634, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001626, |
|
"loss": 4.5498, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001632, |
|
"loss": 4.2597, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001638, |
|
"loss": 4.6339, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001644, |
|
"loss": 4.6287, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.000165, |
|
"loss": 4.4704, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001656, |
|
"loss": 4.2664, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001662, |
|
"loss": 4.3407, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001668, |
|
"loss": 4.609, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001674, |
|
"loss": 4.3177, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.000168, |
|
"loss": 4.4474, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001686, |
|
"loss": 4.1915, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00016919999999999997, |
|
"loss": 4.472, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00016979999999999998, |
|
"loss": 4.5332, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00017039999999999997, |
|
"loss": 4.4325, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00017099999999999998, |
|
"loss": 4.6275, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00017159999999999997, |
|
"loss": 4.6464, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00017219999999999998, |
|
"loss": 4.6767, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017279999999999997, |
|
"loss": 4.5076, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017339999999999996, |
|
"loss": 4.5415, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017399999999999997, |
|
"loss": 4.346, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017459999999999996, |
|
"loss": 5.3879, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017519999999999998, |
|
"loss": 4.4703, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017579999999999996, |
|
"loss": 4.5646, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017639999999999998, |
|
"loss": 5.0213, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017699999999999997, |
|
"loss": 4.9791, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00017759999999999998, |
|
"loss": 4.6661, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00017819999999999997, |
|
"loss": 5.1077, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00017879999999999998, |
|
"loss": 4.7887, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00017939999999999997, |
|
"loss": 4.2862, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 4.3871, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00018059999999999997, |
|
"loss": 4.4105, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00018119999999999999, |
|
"loss": 4.4237, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00018179999999999997, |
|
"loss": 4.2553, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001824, |
|
"loss": 4.4235, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00018299999999999998, |
|
"loss": 4.297, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001836, |
|
"loss": 4.7458, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00018419999999999998, |
|
"loss": 4.5352, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001848, |
|
"loss": 4.5423, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00018539999999999998, |
|
"loss": 4.663, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.000186, |
|
"loss": 4.4946, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00018659999999999998, |
|
"loss": 4.437, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001872, |
|
"loss": 4.5147, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00018779999999999998, |
|
"loss": 4.3544, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00018839999999999997, |
|
"loss": 4.6494, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00018899999999999999, |
|
"loss": 4.3586, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00018959999999999997, |
|
"loss": 4.5048, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001902, |
|
"loss": 4.633, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019079999999999998, |
|
"loss": 4.3602, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001914, |
|
"loss": 4.6183, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019199999999999998, |
|
"loss": 4.3107, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001926, |
|
"loss": 4.573, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019319999999999998, |
|
"loss": 4.8246, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001938, |
|
"loss": 4.7945, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019439999999999998, |
|
"loss": 4.5823, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.000195, |
|
"loss": 4.2816, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019559999999999998, |
|
"loss": 4.3848, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001962, |
|
"loss": 4.911, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019679999999999999, |
|
"loss": 4.8547, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001974, |
|
"loss": 4.4584, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.000198, |
|
"loss": 4.5381, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001986, |
|
"loss": 4.6458, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001992, |
|
"loss": 4.7019, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001998, |
|
"loss": 4.6578, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002004, |
|
"loss": 4.4093, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.000201, |
|
"loss": 4.3511, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002016, |
|
"loss": 4.5368, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002022, |
|
"loss": 4.4722, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002028, |
|
"loss": 4.5231, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00020339999999999998, |
|
"loss": 5.0982, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.000204, |
|
"loss": 4.6772, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00020459999999999999, |
|
"loss": 4.6195, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002052, |
|
"loss": 4.5202, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002058, |
|
"loss": 4.5568, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00020639999999999998, |
|
"loss": 4.7313, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00020699999999999996, |
|
"loss": 5.9084, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00020759999999999998, |
|
"loss": 4.6661, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00020819999999999996, |
|
"loss": 4.9368, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00020879999999999998, |
|
"loss": 5.0985, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00020939999999999997, |
|
"loss": 4.8769, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00020999999999999998, |
|
"loss": 4.4071, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021059999999999997, |
|
"loss": 4.4793, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021119999999999996, |
|
"loss": 4.5593, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021179999999999997, |
|
"loss": 5.1268, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021239999999999996, |
|
"loss": 4.5701, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021299999999999997, |
|
"loss": 4.723, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021359999999999996, |
|
"loss": 4.9217, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021419999999999998, |
|
"loss": 4.5271, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021479999999999996, |
|
"loss": 4.6167, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00021539999999999998, |
|
"loss": 4.6068, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00021599999999999996, |
|
"loss": 4.3911, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00021659999999999998, |
|
"loss": 4.3834, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00021719999999999997, |
|
"loss": 4.8017, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00021779999999999998, |
|
"loss": 4.3505, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00021839999999999997, |
|
"loss": 4.9693, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00021899999999999998, |
|
"loss": 4.5851, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00021959999999999997, |
|
"loss": 4.4264, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00022019999999999999, |
|
"loss": 4.5813, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00022079999999999997, |
|
"loss": 4.4873, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002214, |
|
"loss": 4.4055, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00022199999999999998, |
|
"loss": 4.7036, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002226, |
|
"loss": 4.5203, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00022319999999999998, |
|
"loss": 4.9871, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002238, |
|
"loss": 4.3301, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00022439999999999998, |
|
"loss": 4.7857, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.000225, |
|
"loss": 4.4468, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00022559999999999998, |
|
"loss": 4.5666, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00022619999999999997, |
|
"loss": 4.4932, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00022679999999999998, |
|
"loss": 4.5757, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00022739999999999997, |
|
"loss": 4.5106, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00022799999999999999, |
|
"loss": 4.54, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00022859999999999997, |
|
"loss": 5.2259, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002292, |
|
"loss": 4.4351, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00022979999999999997, |
|
"loss": 4.3878, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002304, |
|
"loss": 4.6613, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00023099999999999998, |
|
"loss": 4.5201, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002316, |
|
"loss": 4.5499, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023219999999999998, |
|
"loss": 4.5244, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002328, |
|
"loss": 4.6239, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023339999999999998, |
|
"loss": 4.8577, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.000234, |
|
"loss": 4.775, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023459999999999998, |
|
"loss": 4.8676, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002352, |
|
"loss": 4.7473, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00023579999999999999, |
|
"loss": 5.1698, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002364, |
|
"loss": 5.0435, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.000237, |
|
"loss": 5.0145, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002376, |
|
"loss": 4.9902, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002382, |
|
"loss": 4.8242, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002388, |
|
"loss": 4.968, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002394, |
|
"loss": 4.7138, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 4.6469, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002406, |
|
"loss": 5.0462, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024119999999999998, |
|
"loss": 4.5933, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002418, |
|
"loss": 4.6843, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024239999999999998, |
|
"loss": 4.6204, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.000243, |
|
"loss": 4.4525, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024359999999999999, |
|
"loss": 4.5578, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024419999999999997, |
|
"loss": 4.6749, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002448, |
|
"loss": 4.6923, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024539999999999995, |
|
"loss": 4.5342, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024599999999999996, |
|
"loss": 4.5524, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002466, |
|
"loss": 4.9933, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002472, |
|
"loss": 4.2736, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00024779999999999995, |
|
"loss": 4.549, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00024839999999999997, |
|
"loss": 4.579, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.000249, |
|
"loss": 4.5159, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00024959999999999994, |
|
"loss": 4.5826, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00025019999999999996, |
|
"loss": 4.4912, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00025079999999999997, |
|
"loss": 4.472, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002514, |
|
"loss": 4.5307, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00025199999999999995, |
|
"loss": 4.8471, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00025259999999999996, |
|
"loss": 4.715, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002532, |
|
"loss": 4.7253, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002538, |
|
"loss": 4.6058, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00025439999999999995, |
|
"loss": 4.5831, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00025499999999999996, |
|
"loss": 4.6162, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002556, |
|
"loss": 4.8844, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002562, |
|
"loss": 4.6076, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00025679999999999995, |
|
"loss": 4.5971, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00025739999999999997, |
|
"loss": 4.8466, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.000258, |
|
"loss": 4.9865, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002586, |
|
"loss": 4.4494, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00025919999999999996, |
|
"loss": 4.6771, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00025979999999999997, |
|
"loss": 4.5576, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002604, |
|
"loss": 4.8061, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.000261, |
|
"loss": 5.0413, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026159999999999996, |
|
"loss": 4.6549, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002622, |
|
"loss": 4.5003, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002628, |
|
"loss": 4.6389, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00026339999999999995, |
|
"loss": 4.8199, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00026399999999999997, |
|
"loss": 4.4152, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002646, |
|
"loss": 4.6895, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002652, |
|
"loss": 5.2644, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00026579999999999996, |
|
"loss": 4.6618, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00026639999999999997, |
|
"loss": 4.4472, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.000267, |
|
"loss": 4.656, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002676, |
|
"loss": 6.3588, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00026819999999999996, |
|
"loss": 5.6505, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002688, |
|
"loss": 7.89, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002694, |
|
"loss": 4.7483, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00027, |
|
"loss": 4.8522, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00027059999999999996, |
|
"loss": 4.8658, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002712, |
|
"loss": 4.5986, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002718, |
|
"loss": 4.8901, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002724, |
|
"loss": 4.9632, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00027299999999999997, |
|
"loss": 4.6942, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002736, |
|
"loss": 4.7098, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002742, |
|
"loss": 4.726, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002748, |
|
"loss": 4.8856, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00027539999999999997, |
|
"loss": 4.4117, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.000276, |
|
"loss": 4.7871, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002766, |
|
"loss": 4.755, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002772, |
|
"loss": 4.4392, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002778, |
|
"loss": 4.5389, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002784, |
|
"loss": 4.839, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.000279, |
|
"loss": 4.5853, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00027959999999999997, |
|
"loss": 4.7598, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002802, |
|
"loss": 4.9769, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002808, |
|
"loss": 4.5845, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00028139999999999996, |
|
"loss": 4.3553, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00028199999999999997, |
|
"loss": 4.5508, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002826, |
|
"loss": 5.4725, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00028319999999999994, |
|
"loss": 4.7822, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00028379999999999996, |
|
"loss": 4.7387, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002844, |
|
"loss": 4.7934, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.000285, |
|
"loss": 4.951, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00028559999999999995, |
|
"loss": 4.5306, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00028619999999999996, |
|
"loss": 4.6858, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002868, |
|
"loss": 4.6976, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00028739999999999994, |
|
"loss": 4.573, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00028799999999999995, |
|
"loss": 4.7996, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00028859999999999997, |
|
"loss": 4.5807, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002892, |
|
"loss": 4.788, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00028979999999999994, |
|
"loss": 4.8346, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00029039999999999996, |
|
"loss": 4.467, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00029099999999999997, |
|
"loss": 4.5186, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002916, |
|
"loss": 4.4779, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00029219999999999995, |
|
"loss": 4.8221, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00029279999999999996, |
|
"loss": 4.7609, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002934, |
|
"loss": 4.3691, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.000294, |
|
"loss": 4.9645, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00029459999999999995, |
|
"loss": 5.023, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00029519999999999997, |
|
"loss": 4.7208, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002958, |
|
"loss": 4.9856, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002964, |
|
"loss": 4.6846, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00029699999999999996, |
|
"loss": 4.314, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00029759999999999997, |
|
"loss": 5.0479, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002982, |
|
"loss": 4.9042, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002988, |
|
"loss": 4.4011, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 4.9983367919921875, |
|
"eval_runtime": 875.6736, |
|
"eval_samples_per_second": 3.017, |
|
"eval_steps_per_second": 0.378, |
|
"eval_wer": 1.95178500595002, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00029939999999999996, |
|
"loss": 5.1628, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0003, |
|
"loss": 4.4468, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.000299234693877551, |
|
"loss": 4.9952, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00029846938775510205, |
|
"loss": 4.8411, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00029770408163265304, |
|
"loss": 4.7504, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002969387755102041, |
|
"loss": 4.6484, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00029617346938775506, |
|
"loss": 4.9217, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002954081632653061, |
|
"loss": 4.715, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002946428571428571, |
|
"loss": 4.8663, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002938775510204081, |
|
"loss": 4.9294, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00029311224489795917, |
|
"loss": 4.619, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002923469387755102, |
|
"loss": 4.6691, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002915816326530612, |
|
"loss": 5.4484, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00029081632653061223, |
|
"loss": 4.6958, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002900510204081632, |
|
"loss": 4.7626, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00028928571428571425, |
|
"loss": 4.5196, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002885204081632653, |
|
"loss": 4.7934, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002877551020408163, |
|
"loss": 4.7005, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002869897959183673, |
|
"loss": 5.0122, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00028622448979591836, |
|
"loss": 4.819, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002854591836734694, |
|
"loss": 4.6823, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002846938775510204, |
|
"loss": 4.5671, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00028392857142857137, |
|
"loss": 4.7677, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002831632653061224, |
|
"loss": 4.9105, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00028239795918367345, |
|
"loss": 5.1979, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002816326530612245, |
|
"loss": 4.5759, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00028086734693877547, |
|
"loss": 4.7022, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002801020408163265, |
|
"loss": 4.659, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00027933673469387755, |
|
"loss": 4.7747, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00027857142857142854, |
|
"loss": 4.9831, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002778061224489796, |
|
"loss": 5.1574, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00027704081632653056, |
|
"loss": 4.5261, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002762755102040816, |
|
"loss": 4.731, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00027551020408163264, |
|
"loss": 4.4291, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002747448979591837, |
|
"loss": 4.7664, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00027397959183673466, |
|
"loss": 4.79, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002732142857142857, |
|
"loss": 4.8222, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002724489795918367, |
|
"loss": 4.8898, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00027168367346938773, |
|
"loss": 4.513, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002709183673469387, |
|
"loss": 4.7453, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00027015306122448975, |
|
"loss": 4.8368, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002693877551020408, |
|
"loss": 4.7447, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00026862244897959183, |
|
"loss": 4.8438, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00026785714285714287, |
|
"loss": 5.2433, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00026709183673469386, |
|
"loss": 5.3212, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002663265306122449, |
|
"loss": 5.1616, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002655612244897959, |
|
"loss": 5.7126, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002647959183673469, |
|
"loss": 8.5064, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002640306122448979, |
|
"loss": 4.518, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00026326530612244894, |
|
"loss": 5.9617, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002625, |
|
"loss": 4.7309, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.000261734693877551, |
|
"loss": 4.7899, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.000260969387755102, |
|
"loss": 4.7302, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00026020408163265305, |
|
"loss": 4.5338, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00025943877551020403, |
|
"loss": 4.5405, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00025867346938775507, |
|
"loss": 4.6252, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002579081632653061, |
|
"loss": 4.4507, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002571428571428571, |
|
"loss": 4.6526, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00025637755102040814, |
|
"loss": 4.5054, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002556122448979592, |
|
"loss": 4.7053, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002548469387755102, |
|
"loss": 4.9174, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002540816326530612, |
|
"loss": 4.6808, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002533163265306122, |
|
"loss": 4.6278, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002525510204081632, |
|
"loss": 4.6145, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00025178571428571426, |
|
"loss": 4.6322, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002510204081632653, |
|
"loss": 4.6631, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002502551020408163, |
|
"loss": 4.6991, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00024948979591836733, |
|
"loss": 4.4694, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00024872448979591837, |
|
"loss": 4.5414, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00024795918367346935, |
|
"loss": 4.6524, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002471938775510204, |
|
"loss": 4.5379, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002464285714285714, |
|
"loss": 4.5682, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002456632653061224, |
|
"loss": 4.4738, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00024489795918367346, |
|
"loss": 4.5086, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00024413265306122447, |
|
"loss": 5.1461, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00024336734693877548, |
|
"loss": 4.4839, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002426020408163265, |
|
"loss": 4.6375, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00024183673469387753, |
|
"loss": 4.8622, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00024107142857142857, |
|
"loss": 4.1603, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00024030612244897956, |
|
"loss": 4.847, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00023954081632653057, |
|
"loss": 4.7087, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002387755102040816, |
|
"loss": 4.492, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00023801020408163265, |
|
"loss": 4.7726, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00023724489795918366, |
|
"loss": 4.5406, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023647959183673467, |
|
"loss": 4.8607, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023571428571428569, |
|
"loss": 4.6273, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023494897959183673, |
|
"loss": 4.7729, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023418367346938774, |
|
"loss": 4.5205, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023341836734693875, |
|
"loss": 4.484, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023265306122448976, |
|
"loss": 4.6765, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0002318877551020408, |
|
"loss": 4.5714, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023112244897959181, |
|
"loss": 5.1497, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00023035714285714285, |
|
"loss": 5.0901, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00022959183673469384, |
|
"loss": 4.5892, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00022882653061224488, |
|
"loss": 4.7668, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002280612244897959, |
|
"loss": 4.556, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00022729591836734693, |
|
"loss": 4.5433, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00022653061224489791, |
|
"loss": 5.3044, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00022576530612244895, |
|
"loss": 5.2622, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.000225, |
|
"loss": 4.5595, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.000224234693877551, |
|
"loss": 4.678, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00022346938775510205, |
|
"loss": 4.5891, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00022270408163265303, |
|
"loss": 4.6629, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00022193877551020407, |
|
"loss": 4.582, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00022117346938775508, |
|
"loss": 4.5646, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00022040816326530612, |
|
"loss": 4.6629, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002196428571428571, |
|
"loss": 4.886, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00021887755102040815, |
|
"loss": 4.5766, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00021811224489795916, |
|
"loss": 4.5959, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002173469387755102, |
|
"loss": 4.6078, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002165816326530612, |
|
"loss": 4.481, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00021581632653061222, |
|
"loss": 4.7335, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00021505102040816324, |
|
"loss": 4.5534, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00021428571428571427, |
|
"loss": 4.9747, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002135204081632653, |
|
"loss": 4.5713, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002127551020408163, |
|
"loss": 4.5623, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002119897959183673, |
|
"loss": 4.3925, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00021122448979591835, |
|
"loss": 4.3779, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002104591836734694, |
|
"loss": 4.3789, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002096938775510204, |
|
"loss": 4.5137, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002089285714285714, |
|
"loss": 4.7276, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00020816326530612243, |
|
"loss": 4.49, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00020739795918367347, |
|
"loss": 4.6151, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00020663265306122448, |
|
"loss": 4.4768, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002058673469387755, |
|
"loss": 4.8337, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002051020408163265, |
|
"loss": 4.637, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00020433673469387754, |
|
"loss": 4.5035, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00020357142857142856, |
|
"loss": 4.3709, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00020280612244897957, |
|
"loss": 4.5253, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00020204081632653058, |
|
"loss": 5.0382, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00020127551020408162, |
|
"loss": 4.6007, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00020051020408163263, |
|
"loss": 4.7532, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00019974489795918367, |
|
"loss": 4.5891, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00019897959183673466, |
|
"loss": 4.4885, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001982142857142857, |
|
"loss": 4.636, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001974489795918367, |
|
"loss": 4.5268, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00019668367346938775, |
|
"loss": 4.6191, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00019591836734693873, |
|
"loss": 4.3703, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00019515306122448977, |
|
"loss": 4.7706, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001943877551020408, |
|
"loss": 4.5403, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00019362244897959182, |
|
"loss": 4.4686, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00019285714285714286, |
|
"loss": 5.3291, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00019209183673469385, |
|
"loss": 5.1482, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001913265306122449, |
|
"loss": 4.4342, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001905612244897959, |
|
"loss": 4.2783, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00018979591836734694, |
|
"loss": 4.6092, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018903061224489793, |
|
"loss": 4.4284, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018826530612244896, |
|
"loss": 4.4722, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018749999999999998, |
|
"loss": 4.8462, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018673469387755102, |
|
"loss": 4.7313, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018596938775510203, |
|
"loss": 4.8217, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018520408163265304, |
|
"loss": 4.7025, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018443877551020405, |
|
"loss": 4.6611, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001836734693877551, |
|
"loss": 4.8046, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001829081632653061, |
|
"loss": 4.5228, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018214285714285712, |
|
"loss": 4.6526, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018137755102040813, |
|
"loss": 4.42, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018061224489795917, |
|
"loss": 4.6764, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001798469387755102, |
|
"loss": 4.4985, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00017908163265306122, |
|
"loss": 4.6511, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001783163265306122, |
|
"loss": 4.3388, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00017755102040816325, |
|
"loss": 4.3057, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00017678571428571428, |
|
"loss": 4.981, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001760204081632653, |
|
"loss": 4.5279, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001752551020408163, |
|
"loss": 4.316, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00017448979591836732, |
|
"loss": 4.615, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00017372448979591836, |
|
"loss": 4.3881, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00017295918367346937, |
|
"loss": 4.5504, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001721938775510204, |
|
"loss": 4.6914, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001714285714285714, |
|
"loss": 4.6686, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00017066326530612244, |
|
"loss": 4.3039, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00016989795918367345, |
|
"loss": 4.4468, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001691326530612245, |
|
"loss": 4.3383, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016836734693877547, |
|
"loss": 4.4286, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016760204081632651, |
|
"loss": 4.3745, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016683673469387753, |
|
"loss": 4.9497, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016607142857142857, |
|
"loss": 4.3681, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016530612244897955, |
|
"loss": 4.21, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001645408163265306, |
|
"loss": 4.2797, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016377551020408163, |
|
"loss": 4.4616, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016301020408163264, |
|
"loss": 4.677, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00016224489795918368, |
|
"loss": 4.3418, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00016147959183673467, |
|
"loss": 4.627, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001607142857142857, |
|
"loss": 4.6252, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00015994897959183672, |
|
"loss": 4.6252, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00015918367346938776, |
|
"loss": 4.5539, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00015841836734693874, |
|
"loss": 4.5075, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00015765306122448978, |
|
"loss": 4.5828, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001568877551020408, |
|
"loss": 4.5769, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00015612244897959183, |
|
"loss": 4.8288, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00015535714285714285, |
|
"loss": 4.3714, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00015459183673469386, |
|
"loss": 4.5635, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00015382653061224487, |
|
"loss": 4.3405, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0001530612244897959, |
|
"loss": 5.1176, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00015229591836734692, |
|
"loss": 5.0306, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00015153061224489794, |
|
"loss": 4.527, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00015076530612244895, |
|
"loss": 4.9182, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00015, |
|
"loss": 4.534, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00014923469387755103, |
|
"loss": 4.8521, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00014846938775510204, |
|
"loss": 4.6326, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00014770408163265305, |
|
"loss": 4.2919, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00014693877551020406, |
|
"loss": 4.562, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001461734693877551, |
|
"loss": 4.346, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00014540816326530611, |
|
"loss": 4.3445, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00014464285714285713, |
|
"loss": 4.7065, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00014387755102040814, |
|
"loss": 4.6758, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00014311224489795918, |
|
"loss": 4.659, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001423469387755102, |
|
"loss": 4.6311, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001415816326530612, |
|
"loss": 4.5146, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00014081632653061224, |
|
"loss": 4.5062, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00014005102040816326, |
|
"loss": 4.5418, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00013928571428571427, |
|
"loss": 4.3461, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00013852040816326528, |
|
"loss": 4.3042, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00013775510204081632, |
|
"loss": 4.6424, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00013698979591836733, |
|
"loss": 4.7171, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00013622448979591834, |
|
"loss": 4.2692, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00013545918367346936, |
|
"loss": 4.8031, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0001346938775510204, |
|
"loss": 4.359, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00013392857142857144, |
|
"loss": 4.1378, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00013316326530612245, |
|
"loss": 4.4067, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00013239795918367346, |
|
"loss": 4.6956, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00013163265306122447, |
|
"loss": 4.2525, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001308673469387755, |
|
"loss": 4.396, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00013010204081632652, |
|
"loss": 4.5159, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00012933673469387754, |
|
"loss": 4.3099, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00012857142857142855, |
|
"loss": 4.6627, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001278061224489796, |
|
"loss": 4.4086, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001270408163265306, |
|
"loss": 4.3537, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001262755102040816, |
|
"loss": 4.303, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00012551020408163265, |
|
"loss": 4.5849, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00012474489795918366, |
|
"loss": 4.9582, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00012397959183673468, |
|
"loss": 4.5712, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001232142857142857, |
|
"loss": 4.3395, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00012244897959183673, |
|
"loss": 4.5477, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00012168367346938774, |
|
"loss": 4.2483, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00012091836734693877, |
|
"loss": 4.423, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00012015306122448978, |
|
"loss": 4.4887, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0001193877551020408, |
|
"loss": 4.4166, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00011862244897959183, |
|
"loss": 4.3083, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00011785714285714284, |
|
"loss": 4.5407, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00011709183673469387, |
|
"loss": 4.7349, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00011632653061224488, |
|
"loss": 4.4709, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00011556122448979591, |
|
"loss": 4.1676, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00011479591836734692, |
|
"loss": 4.4411, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00011403061224489795, |
|
"loss": 4.3631, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00011326530612244896, |
|
"loss": 4.8217, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001125, |
|
"loss": 4.4436, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00011173469387755102, |
|
"loss": 4.5294, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00011096938775510204, |
|
"loss": 4.8539, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00011020408163265306, |
|
"loss": 4.4265, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00010943877551020407, |
|
"loss": 4.4376, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001086734693877551, |
|
"loss": 4.419, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00010790816326530611, |
|
"loss": 4.4154, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010714285714285714, |
|
"loss": 4.3623, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010637755102040815, |
|
"loss": 4.5445, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010561224489795918, |
|
"loss": 4.6167, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001048469387755102, |
|
"loss": 4.4716, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010408163265306121, |
|
"loss": 4.4618, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010331632653061224, |
|
"loss": 4.3187, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010255102040816325, |
|
"loss": 4.3345, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010178571428571428, |
|
"loss": 4.3305, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00010102040816326529, |
|
"loss": 4.3747, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00010025510204081632, |
|
"loss": 4.3711, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.948979591836733e-05, |
|
"loss": 4.2536, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.872448979591835e-05, |
|
"loss": 4.6226, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.795918367346937e-05, |
|
"loss": 4.3495, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.71938775510204e-05, |
|
"loss": 4.3953, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.642857142857143e-05, |
|
"loss": 4.4752, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.566326530612244e-05, |
|
"loss": 4.4922, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.489795918367347e-05, |
|
"loss": 4.4253, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.413265306122448e-05, |
|
"loss": 4.2783, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.336734693877551e-05, |
|
"loss": 4.5397, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.260204081632652e-05, |
|
"loss": 4.6664, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.183673469387755e-05, |
|
"loss": 4.1661, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.107142857142856e-05, |
|
"loss": 4.1963, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.030612244897958e-05, |
|
"loss": 4.8492, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.954081632653061e-05, |
|
"loss": 4.1818, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.877551020408162e-05, |
|
"loss": 4.5544, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.801020408163265e-05, |
|
"loss": 4.3203, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.724489795918366e-05, |
|
"loss": 4.5191, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.647959183673469e-05, |
|
"loss": 4.4704, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.57142857142857e-05, |
|
"loss": 4.4288, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.494897959183672e-05, |
|
"loss": 4.5226, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.418367346938774e-05, |
|
"loss": 4.2287, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.341836734693876e-05, |
|
"loss": 4.3641, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.265306122448978e-05, |
|
"loss": 4.2329, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.188775510204081e-05, |
|
"loss": 4.471, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.112244897959184e-05, |
|
"loss": 4.3837, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.035714285714285e-05, |
|
"loss": 4.267, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.959183673469388e-05, |
|
"loss": 4.4014, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.882653061224489e-05, |
|
"loss": 4.3438, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.806122448979592e-05, |
|
"loss": 4.542, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.729591836734693e-05, |
|
"loss": 4.3979, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.653061224489796e-05, |
|
"loss": 4.5004, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.576530612244897e-05, |
|
"loss": 4.6188, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.3081, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.423469387755102e-05, |
|
"loss": 4.5427, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.346938775510203e-05, |
|
"loss": 4.5724, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.270408163265306e-05, |
|
"loss": 4.996, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.193877551020407e-05, |
|
"loss": 4.6813, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.11734693877551e-05, |
|
"loss": 4.4014, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.040816326530612e-05, |
|
"loss": 4.3033, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.964285714285713e-05, |
|
"loss": 4.4386, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.887755102040816e-05, |
|
"loss": 4.4227, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.811224489795917e-05, |
|
"loss": 4.1591, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.73469387755102e-05, |
|
"loss": 4.4486, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.658163265306122e-05, |
|
"loss": 4.3162, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.581632653061224e-05, |
|
"loss": 4.2139, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.505102040816326e-05, |
|
"loss": 4.5535, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.428571428571427e-05, |
|
"loss": 4.1688, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.35204081632653e-05, |
|
"loss": 4.519, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.275510204081633e-05, |
|
"loss": 4.4361, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.198979591836734e-05, |
|
"loss": 4.1804, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.122448979591836e-05, |
|
"loss": 4.2692, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.045918367346938e-05, |
|
"loss": 4.2193, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.96938775510204e-05, |
|
"loss": 4.2585, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.892857142857142e-05, |
|
"loss": 4.3902, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.816326530612244e-05, |
|
"loss": 4.2958, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.739795918367346e-05, |
|
"loss": 4.2298, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.663265306122448e-05, |
|
"loss": 4.3006, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.586734693877551e-05, |
|
"loss": 4.3207, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.510204081632653e-05, |
|
"loss": 4.1856, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.433673469387755e-05, |
|
"loss": 4.346, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.357142857142857e-05, |
|
"loss": 4.3479, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.280612244897959e-05, |
|
"loss": 4.6312, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.204081632653061e-05, |
|
"loss": 4.4575, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.1275510204081626e-05, |
|
"loss": 4.3191, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.0510204081632645e-05, |
|
"loss": 4.1908, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.9744897959183664e-05, |
|
"loss": 4.2245, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.897959183673468e-05, |
|
"loss": 4.4717, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.8214285714285716e-05, |
|
"loss": 4.1618, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.7448979591836735e-05, |
|
"loss": 4.2323, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.6683673469387754e-05, |
|
"loss": 4.4067, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.591836734693877e-05, |
|
"loss": 4.2481, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.515306122448979e-05, |
|
"loss": 4.2898, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.438775510204081e-05, |
|
"loss": 4.3192, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.362244897959183e-05, |
|
"loss": 4.1546, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.285714285714285e-05, |
|
"loss": 4.6285, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.209183673469387e-05, |
|
"loss": 4.1972, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.132653061224489e-05, |
|
"loss": 4.541, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.056122448979592e-05, |
|
"loss": 4.518, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.979591836734694e-05, |
|
"loss": 4.3209, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.903061224489796e-05, |
|
"loss": 4.5801, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.826530612244898e-05, |
|
"loss": 4.4673, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.75e-05, |
|
"loss": 4.6158, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.6734693877551016e-05, |
|
"loss": 5.7137, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.5969387755102035e-05, |
|
"loss": 4.3646, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.520408163265306e-05, |
|
"loss": 4.9115, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.443877551020408e-05, |
|
"loss": 4.6824, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.36734693877551e-05, |
|
"loss": 4.4915, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.290816326530612e-05, |
|
"loss": 4.3668, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.214285714285714e-05, |
|
"loss": 4.4653, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.137755102040816e-05, |
|
"loss": 4.2881, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.061224489795918e-05, |
|
"loss": 4.2501, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.98469387755102e-05, |
|
"loss": 4.2627, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.908163265306122e-05, |
|
"loss": 4.357, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.831632653061224e-05, |
|
"loss": 4.5015, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.7551020408163265e-05, |
|
"loss": 4.3649, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.6785714285714284e-05, |
|
"loss": 4.1966, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.6020408163265303e-05, |
|
"loss": 4.334, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.5255102040816323e-05, |
|
"loss": 4.6841, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.448979591836734e-05, |
|
"loss": 4.2356, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.3724489795918367e-05, |
|
"loss": 4.4959, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.2959183673469387e-05, |
|
"loss": 4.6351, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.2193877551020406e-05, |
|
"loss": 4.449, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.1428571428571425e-05, |
|
"loss": 4.2843, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.0663265306122444e-05, |
|
"loss": 4.367, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.989795918367347e-05, |
|
"loss": 4.2656, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.913265306122449e-05, |
|
"loss": 4.273, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8367346938775508e-05, |
|
"loss": 4.4547, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.760204081632653e-05, |
|
"loss": 4.3189, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.683673469387755e-05, |
|
"loss": 4.3244, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.607142857142857e-05, |
|
"loss": 4.381, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.530612244897959e-05, |
|
"loss": 4.3439, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.454081632653061e-05, |
|
"loss": 4.4997, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3775510204081633e-05, |
|
"loss": 4.332, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3010204081632652e-05, |
|
"loss": 4.2311, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.224489795918367e-05, |
|
"loss": 4.3291, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.1479591836734693e-05, |
|
"loss": 4.3849, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0714285714285712e-05, |
|
"loss": 4.9003, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.948979591836735e-06, |
|
"loss": 4.171, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.183673469387754e-06, |
|
"loss": 4.3592, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.418367346938775e-06, |
|
"loss": 4.2932, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.653061224489796e-06, |
|
"loss": 4.3174, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.887755102040816e-06, |
|
"loss": 4.3816, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.122448979591835e-06, |
|
"loss": 4.4197, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.357142857142856e-06, |
|
"loss": 4.2511, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.591836734693877e-06, |
|
"loss": 4.4924, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.826530612244898e-06, |
|
"loss": 4.4857, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.0612244897959177e-06, |
|
"loss": 4.3505, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.2959183673469385e-06, |
|
"loss": 4.5082, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5306122448979589e-06, |
|
"loss": 4.0072, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 892, |
|
"total_flos": 0.0, |
|
"train_loss": 4.5466820565574375, |
|
"train_runtime": 5895.7357, |
|
"train_samples_per_second": 4.84, |
|
"train_steps_per_second": 0.151 |
|
} |
|
], |
|
"max_steps": 892, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|