|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 1115, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 8.7788, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.0, |
|
"loss": 8.857, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6e-07, |
|
"loss": 8.7704, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2e-06, |
|
"loss": 8.6839, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8e-06, |
|
"loss": 8.7517, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.4e-06, |
|
"loss": 8.3985, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9999999999999997e-06, |
|
"loss": 8.4278, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.6e-06, |
|
"loss": 8.0949, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.2e-06, |
|
"loss": 7.8106, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.8e-06, |
|
"loss": 7.4337, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.399999999999999e-06, |
|
"loss": 7.22, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.999999999999999e-06, |
|
"loss": 6.9989, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.599999999999999e-06, |
|
"loss": 6.8282, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.2e-06, |
|
"loss": 6.6831, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.799999999999998e-06, |
|
"loss": 6.4666, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.4e-06, |
|
"loss": 6.2614, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.999999999999999e-06, |
|
"loss": 6.1788, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.6e-06, |
|
"loss": 6.0435, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.02e-05, |
|
"loss": 5.8492, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0799999999999998e-05, |
|
"loss": 5.8448, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.14e-05, |
|
"loss": 5.6986, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1999999999999999e-05, |
|
"loss": 5.673, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.26e-05, |
|
"loss": 5.4552, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3199999999999997e-05, |
|
"loss": 5.4442, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3799999999999998e-05, |
|
"loss": 5.2917, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.44e-05, |
|
"loss": 5.29, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4999999999999999e-05, |
|
"loss": 5.1356, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.5599999999999996e-05, |
|
"loss": 5.1709, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.6199999999999997e-05, |
|
"loss": 4.9962, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.68e-05, |
|
"loss": 5.1114, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.74e-05, |
|
"loss": 5.1065, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.7999999999999997e-05, |
|
"loss": 4.9779, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.8599999999999998e-05, |
|
"loss": 4.9887, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.92e-05, |
|
"loss": 4.9495, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.98e-05, |
|
"loss": 4.8838, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.9061, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.9915, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.1599999999999996e-05, |
|
"loss": 4.8265, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.2199999999999998e-05, |
|
"loss": 4.9207, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.28e-05, |
|
"loss": 4.9041, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.34e-05, |
|
"loss": 4.8838, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.3999999999999997e-05, |
|
"loss": 4.9159, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 4.8392, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.52e-05, |
|
"loss": 4.9044, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.5799999999999997e-05, |
|
"loss": 4.8615, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.6399999999999995e-05, |
|
"loss": 4.9846, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.6999999999999996e-05, |
|
"loss": 4.8641, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.7599999999999997e-05, |
|
"loss": 4.8026, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.8971, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.6302, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.94e-05, |
|
"loss": 6.4289, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 6.2769, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.06e-05, |
|
"loss": 5.9356, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.119999999999999e-05, |
|
"loss": 5.4623, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.1799999999999994e-05, |
|
"loss": 5.1183, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.2399999999999995e-05, |
|
"loss": 4.866, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.2999999999999996e-05, |
|
"loss": 4.8544, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.36e-05, |
|
"loss": 4.8141, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.42e-05, |
|
"loss": 4.8047, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.8444, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.539999999999999e-05, |
|
"loss": 4.8777, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.5999999999999994e-05, |
|
"loss": 4.7374, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.6599999999999995e-05, |
|
"loss": 4.8447, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.7199999999999996e-05, |
|
"loss": 4.7271, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.78e-05, |
|
"loss": 4.691, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.84e-05, |
|
"loss": 4.7024, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.9e-05, |
|
"loss": 4.6623, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.96e-05, |
|
"loss": 4.6917, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.7466, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.6977, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.6366, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.6913, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.259999999999999e-05, |
|
"loss": 4.5726, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.319999999999999e-05, |
|
"loss": 4.6529, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.3799999999999994e-05, |
|
"loss": 4.7602, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4399999999999995e-05, |
|
"loss": 4.6657, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 4.5708, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.56e-05, |
|
"loss": 4.6321, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.62e-05, |
|
"loss": 4.5891, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.68e-05, |
|
"loss": 4.695, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.7399999999999993e-05, |
|
"loss": 4.5544, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.7999999999999994e-05, |
|
"loss": 4.6184, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8599999999999995e-05, |
|
"loss": 4.6035, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.9199999999999997e-05, |
|
"loss": 4.6037, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.98e-05, |
|
"loss": 4.6785, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.04e-05, |
|
"loss": 4.4987, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.1e-05, |
|
"loss": 4.6057, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.1599999999999994e-05, |
|
"loss": 4.5589, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.2199999999999995e-05, |
|
"loss": 4.6228, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.279999999999999e-05, |
|
"loss": 4.6534, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.339999999999999e-05, |
|
"loss": 4.6014, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.399999999999999e-05, |
|
"loss": 4.5941, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.459999999999999e-05, |
|
"loss": 4.5625, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.519999999999999e-05, |
|
"loss": 4.6271, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.5799999999999994e-05, |
|
"loss": 4.6085, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.5724, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.6734, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.7297, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.601, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.5277, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.94e-05, |
|
"loss": 5.9227, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 5.8449, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 5.5435, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.12e-05, |
|
"loss": 5.1179, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.9785, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.239999999999999e-05, |
|
"loss": 4.8924, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.299999999999999e-05, |
|
"loss": 4.7039, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.359999999999999e-05, |
|
"loss": 4.7125, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.419999999999999e-05, |
|
"loss": 4.6977, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.479999999999999e-05, |
|
"loss": 4.6824, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.539999999999999e-05, |
|
"loss": 4.6191, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.599999999999999e-05, |
|
"loss": 4.6363, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.659999999999999e-05, |
|
"loss": 4.6369, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.72e-05, |
|
"loss": 4.5246, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.78e-05, |
|
"loss": 4.5943, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.84e-05, |
|
"loss": 4.5545, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.5898, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.5237, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.5241, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.079999999999999e-05, |
|
"loss": 4.5408, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.139999999999999e-05, |
|
"loss": 4.4809, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.199999999999999e-05, |
|
"loss": 4.5278, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.259999999999999e-05, |
|
"loss": 4.4764, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.319999999999999e-05, |
|
"loss": 4.4472, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.379999999999999e-05, |
|
"loss": 4.5971, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.439999999999999e-05, |
|
"loss": 4.4813, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.5e-05, |
|
"loss": 4.5371, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.56e-05, |
|
"loss": 4.5538, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.62e-05, |
|
"loss": 4.5222, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.68e-05, |
|
"loss": 4.5567, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.74e-05, |
|
"loss": 4.5519, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.8e-05, |
|
"loss": 4.3829, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.86e-05, |
|
"loss": 4.3992, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.92e-05, |
|
"loss": 4.4999, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.5518, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.48, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.5714, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.4858, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.4397, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.5127, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.5398, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.5412, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.459999999999998e-05, |
|
"loss": 4.6863, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.519999999999998e-05, |
|
"loss": 4.5778, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.579999999999998e-05, |
|
"loss": 4.4345, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.639999999999999e-05, |
|
"loss": 4.4066, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 8.699999999999999e-05, |
|
"loss": 4.3866, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 8.759999999999999e-05, |
|
"loss": 4.6398, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.819999999999999e-05, |
|
"loss": 4.4105, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.879999999999999e-05, |
|
"loss": 4.4866, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.939999999999999e-05, |
|
"loss": 6.0924, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 5.8545, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.059999999999999e-05, |
|
"loss": 5.3625, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.12e-05, |
|
"loss": 4.9468, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.18e-05, |
|
"loss": 4.8548, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.24e-05, |
|
"loss": 4.7858, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.3e-05, |
|
"loss": 4.6516, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.36e-05, |
|
"loss": 4.6538, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.419999999999999e-05, |
|
"loss": 4.5987, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.479999999999999e-05, |
|
"loss": 4.6631, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.539999999999999e-05, |
|
"loss": 4.4868, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.599999999999999e-05, |
|
"loss": 4.5287, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.659999999999999e-05, |
|
"loss": 4.5528, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.719999999999999e-05, |
|
"loss": 4.5033, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.779999999999999e-05, |
|
"loss": 4.5908, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.839999999999999e-05, |
|
"loss": 4.4931, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.9e-05, |
|
"loss": 4.519, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.96e-05, |
|
"loss": 4.5015, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001002, |
|
"loss": 4.3689, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001008, |
|
"loss": 4.5586, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0001014, |
|
"loss": 4.4859, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.000102, |
|
"loss": 4.3494, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0001026, |
|
"loss": 4.4217, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00010319999999999999, |
|
"loss": 4.3855, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00010379999999999999, |
|
"loss": 4.4697, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00010439999999999999, |
|
"loss": 4.4196, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00010499999999999999, |
|
"loss": 4.438, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010559999999999998, |
|
"loss": 4.3658, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00010619999999999998, |
|
"loss": 4.4174, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010679999999999998, |
|
"loss": 4.4003, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00010739999999999998, |
|
"loss": 4.5487, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00010799999999999998, |
|
"loss": 4.35, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00010859999999999998, |
|
"loss": 4.5236, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00010919999999999998, |
|
"loss": 4.4523, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00010979999999999999, |
|
"loss": 4.4892, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00011039999999999999, |
|
"loss": 4.3919, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00011099999999999999, |
|
"loss": 4.4118, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00011159999999999999, |
|
"loss": 4.5025, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00011219999999999999, |
|
"loss": 4.4032, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00011279999999999999, |
|
"loss": 4.4338, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00011339999999999999, |
|
"loss": 4.4694, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 4.3931, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0001146, |
|
"loss": 4.3904, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0001152, |
|
"loss": 4.4158, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0001158, |
|
"loss": 4.4491, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001164, |
|
"loss": 4.4745, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.000117, |
|
"loss": 4.4689, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001176, |
|
"loss": 4.4095, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001182, |
|
"loss": 4.461, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001188, |
|
"loss": 4.2483, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001194, |
|
"loss": 5.7607, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 5.7126, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00012059999999999999, |
|
"loss": 5.3124, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00012119999999999999, |
|
"loss": 4.9813, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00012179999999999999, |
|
"loss": 4.7145, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001224, |
|
"loss": 4.7888, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00012299999999999998, |
|
"loss": 4.6485, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001236, |
|
"loss": 4.6296, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00012419999999999998, |
|
"loss": 4.4968, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00012479999999999997, |
|
"loss": 4.514, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00012539999999999999, |
|
"loss": 4.5182, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00012599999999999997, |
|
"loss": 4.534, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0001266, |
|
"loss": 4.5318, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00012719999999999997, |
|
"loss": 4.4489, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0001278, |
|
"loss": 4.3867, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00012839999999999998, |
|
"loss": 4.5466, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.000129, |
|
"loss": 4.3823, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00012959999999999998, |
|
"loss": 4.3786, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.0001302, |
|
"loss": 4.4267, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00013079999999999998, |
|
"loss": 4.433, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0001314, |
|
"loss": 4.4993, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00013199999999999998, |
|
"loss": 4.3747, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0001326, |
|
"loss": 4.3646, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00013319999999999999, |
|
"loss": 5.5865, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0001338, |
|
"loss": 5.3673, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0001344, |
|
"loss": 5.1522, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.000135, |
|
"loss": 4.7286, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0001356, |
|
"loss": 4.6139, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0001362, |
|
"loss": 4.5005, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0001368, |
|
"loss": 4.4463, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0001374, |
|
"loss": 4.4191, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.000138, |
|
"loss": 4.3203, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0001386, |
|
"loss": 4.4397, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0001392, |
|
"loss": 4.3437, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00013979999999999998, |
|
"loss": 4.3979, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0001404, |
|
"loss": 4.4071, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00014099999999999998, |
|
"loss": 4.3001, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00014159999999999997, |
|
"loss": 4.36, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0001422, |
|
"loss": 4.3493, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00014279999999999997, |
|
"loss": 4.4577, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.0001434, |
|
"loss": 4.2136, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00014399999999999998, |
|
"loss": 4.2935, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0001446, |
|
"loss": 4.2193, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00014519999999999998, |
|
"loss": 4.3509, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0001458, |
|
"loss": 4.2148, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00014639999999999998, |
|
"loss": 4.2056, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.000147, |
|
"loss": 4.2163, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00014759999999999998, |
|
"loss": 4.2646, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.0001482, |
|
"loss": 4.1475, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00014879999999999998, |
|
"loss": 4.1661, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.0001494, |
|
"loss": 4.0668, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015, |
|
"loss": 4.0718, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015059999999999997, |
|
"loss": 4.1521, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.0001512, |
|
"loss": 4.0864, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00015179999999999998, |
|
"loss": 4.0792, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0001524, |
|
"loss": 4.0559, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00015299999999999998, |
|
"loss": 4.0964, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0001536, |
|
"loss": 4.1454, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00015419999999999998, |
|
"loss": 4.0752, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0001548, |
|
"loss": 4.0471, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00015539999999999998, |
|
"loss": 4.1324, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.000156, |
|
"loss": 4.0548, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00015659999999999998, |
|
"loss": 3.9025, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0001572, |
|
"loss": 4.1022, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0001578, |
|
"loss": 3.984, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0001584, |
|
"loss": 4.0924, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.000159, |
|
"loss": 3.967, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.0001596, |
|
"loss": 4.06, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.0001602, |
|
"loss": 3.8391, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.0001608, |
|
"loss": 3.8725, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.0001614, |
|
"loss": 3.947, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.000162, |
|
"loss": 3.8932, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.0001626, |
|
"loss": 3.6977, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0001632, |
|
"loss": 5.8216, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0001638, |
|
"loss": 5.3449, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.0001644, |
|
"loss": 4.9906, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.000165, |
|
"loss": 4.7723, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.0001656, |
|
"loss": 4.5841, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.0001662, |
|
"loss": 4.5655, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001668, |
|
"loss": 4.5075, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001674, |
|
"loss": 4.3671, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.000168, |
|
"loss": 4.4138, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.0001686, |
|
"loss": 4.3305, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00016919999999999997, |
|
"loss": 4.2737, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00016979999999999998, |
|
"loss": 4.3387, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00017039999999999997, |
|
"loss": 4.218, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00017099999999999998, |
|
"loss": 4.2599, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00017159999999999997, |
|
"loss": 4.2436, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00017219999999999998, |
|
"loss": 4.1241, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00017279999999999997, |
|
"loss": 4.2546, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00017339999999999996, |
|
"loss": 4.1435, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00017399999999999997, |
|
"loss": 4.0901, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00017459999999999996, |
|
"loss": 4.1774, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00017519999999999998, |
|
"loss": 4.1511, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00017579999999999996, |
|
"loss": 4.1222, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00017639999999999998, |
|
"loss": 4.0986, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00017699999999999997, |
|
"loss": 4.0735, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00017759999999999998, |
|
"loss": 4.0722, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00017819999999999997, |
|
"loss": 4.162, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00017879999999999998, |
|
"loss": 4.1414, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00017939999999999997, |
|
"loss": 4.1157, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 4.0798, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00018059999999999997, |
|
"loss": 4.1804, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00018119999999999999, |
|
"loss": 4.0958, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00018179999999999997, |
|
"loss": 4.0506, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.0001824, |
|
"loss": 4.1091, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00018299999999999998, |
|
"loss": 4.1218, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.0001836, |
|
"loss": 4.1636, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00018419999999999998, |
|
"loss": 4.08, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.0001848, |
|
"loss": 4.0701, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00018539999999999998, |
|
"loss": 3.9545, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.000186, |
|
"loss": 3.8738, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00018659999999999998, |
|
"loss": 4.0669, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0001872, |
|
"loss": 4.0076, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00018779999999999998, |
|
"loss": 4.0221, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00018839999999999997, |
|
"loss": 3.9464, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00018899999999999999, |
|
"loss": 3.771, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00018959999999999997, |
|
"loss": 3.8719, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.0001902, |
|
"loss": 3.9347, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00019079999999999998, |
|
"loss": 3.8062, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.0001914, |
|
"loss": 3.7106, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00019199999999999998, |
|
"loss": 3.8163, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0001926, |
|
"loss": 3.6449, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00019319999999999998, |
|
"loss": 5.7573, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.0001938, |
|
"loss": 5.6261, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00019439999999999998, |
|
"loss": 5.3271, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.000195, |
|
"loss": 4.9718, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00019559999999999998, |
|
"loss": 4.8634, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00019559999999999998, |
|
"loss": 5.2309, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.0001962, |
|
"loss": 5.1864, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00019679999999999999, |
|
"loss": 7.7546, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.0001974, |
|
"loss": 7.6626, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.000198, |
|
"loss": 7.7001, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.0001986, |
|
"loss": 7.4637, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.0001992, |
|
"loss": 7.272, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.0001998, |
|
"loss": 7.0608, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.0002004, |
|
"loss": 6.811, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.000201, |
|
"loss": 6.8756, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.0002016, |
|
"loss": 6.0821, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.0002022, |
|
"loss": 5.2635, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.0002028, |
|
"loss": 5.1346, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00020339999999999998, |
|
"loss": 4.9469, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.000204, |
|
"loss": 4.7854, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00020459999999999999, |
|
"loss": 4.4968, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.0002052, |
|
"loss": 4.5434, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.0002058, |
|
"loss": 4.4593, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00020639999999999998, |
|
"loss": 4.4982, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00020699999999999996, |
|
"loss": 4.5314, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00020759999999999998, |
|
"loss": 4.4604, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00020819999999999996, |
|
"loss": 4.3669, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00020879999999999998, |
|
"loss": 4.2684, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00020939999999999997, |
|
"loss": 4.3108, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00020999999999999998, |
|
"loss": 4.3765, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00021059999999999997, |
|
"loss": 4.5628, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00021119999999999996, |
|
"loss": 4.3952, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00021179999999999997, |
|
"loss": 4.2404, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00021239999999999996, |
|
"loss": 4.1993, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00021299999999999997, |
|
"loss": 4.24, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00021359999999999996, |
|
"loss": 4.1699, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00021419999999999998, |
|
"loss": 4.2247, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00021479999999999996, |
|
"loss": 4.2105, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00021539999999999998, |
|
"loss": 4.0637, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00021599999999999996, |
|
"loss": 4.1486, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00021659999999999998, |
|
"loss": 4.0632, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00021719999999999997, |
|
"loss": 4.0394, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00021779999999999998, |
|
"loss": 4.1295, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00021839999999999997, |
|
"loss": 4.1444, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00021899999999999998, |
|
"loss": 4.2003, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00021959999999999997, |
|
"loss": 4.0887, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 0.00022019999999999999, |
|
"loss": 3.9279, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 0.00022079999999999997, |
|
"loss": 3.9932, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 0.0002214, |
|
"loss": 3.7584, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 0.0002214, |
|
"loss": 3.4602, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.00022199999999999998, |
|
"loss": 5.1713, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.0002226, |
|
"loss": 4.9749, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 0.00022319999999999998, |
|
"loss": 4.77, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 0.0002238, |
|
"loss": 4.525, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 0.00022439999999999998, |
|
"loss": 4.4688, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 0.000225, |
|
"loss": 4.3824, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 0.00022559999999999998, |
|
"loss": 4.4258, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.00022619999999999997, |
|
"loss": 4.4924, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.00022679999999999998, |
|
"loss": 4.3423, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 0.00022739999999999997, |
|
"loss": 4.439, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 0.00022799999999999999, |
|
"loss": 4.2055, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 0.00022859999999999997, |
|
"loss": 4.1395, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 0.0002292, |
|
"loss": 4.1919, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.00022979999999999997, |
|
"loss": 4.1744, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.0002304, |
|
"loss": 4.1852, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.00023099999999999998, |
|
"loss": 4.1599, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.0002316, |
|
"loss": 4.1255, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.00023219999999999998, |
|
"loss": 4.1317, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 0.0002328, |
|
"loss": 4.1679, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 0.00023339999999999998, |
|
"loss": 4.1555, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 0.000234, |
|
"loss": 4.1475, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 0.00023459999999999998, |
|
"loss": 4.1145, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.0002352, |
|
"loss": 4.0143, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.00023579999999999999, |
|
"loss": 4.1586, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.0002364, |
|
"loss": 4.0736, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.000237, |
|
"loss": 4.1268, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.0002376, |
|
"loss": 4.1166, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 0.0002382, |
|
"loss": 4.0384, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 0.0002388, |
|
"loss": 3.9957, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 0.0002394, |
|
"loss": 4.0793, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 4.1288, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.0002406, |
|
"loss": 3.9214, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.00024119999999999998, |
|
"loss": 4.0041, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 0.0002418, |
|
"loss": 4.0388, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 0.00024239999999999998, |
|
"loss": 3.989, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 0.000243, |
|
"loss": 4.0942, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 0.00024359999999999999, |
|
"loss": 3.9554, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 0.00024419999999999997, |
|
"loss": 4.0312, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 0.0002448, |
|
"loss": 3.9571, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 0.00024539999999999995, |
|
"loss": 4.0576, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.00024599999999999996, |
|
"loss": 3.9382, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.0002466, |
|
"loss": 3.8844, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.0002472, |
|
"loss": 3.8587, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.00024779999999999995, |
|
"loss": 3.8616, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.00024839999999999997, |
|
"loss": 3.8594, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 0.000249, |
|
"loss": 3.698, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 0.00024959999999999994, |
|
"loss": 3.7176, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.00025019999999999996, |
|
"loss": 3.5745, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.00025079999999999997, |
|
"loss": 3.5771, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.0002514, |
|
"loss": 3.1711, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.00025199999999999995, |
|
"loss": 5.6223, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.00025259999999999996, |
|
"loss": 5.5827, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.0002532, |
|
"loss": 5.1421, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.0002538, |
|
"loss": 4.695, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 0.00025439999999999995, |
|
"loss": 4.5779, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 0.00025499999999999996, |
|
"loss": 4.5219, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 0.0002556, |
|
"loss": 4.2696, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 0.0002562, |
|
"loss": 4.1901, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 0.00025679999999999995, |
|
"loss": 4.1369, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 0.00025739999999999997, |
|
"loss": 4.0599, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 0.000258, |
|
"loss": 4.1342, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 0.0002586, |
|
"loss": 4.1208, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.00025919999999999996, |
|
"loss": 4.0045, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.00025979999999999997, |
|
"loss": 4.0492, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.0002604, |
|
"loss": 4.0202, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.000261, |
|
"loss": 3.9223, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.00026159999999999996, |
|
"loss": 3.9732, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 0.0002622, |
|
"loss": 3.9046, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 0.0002628, |
|
"loss": 3.9394, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.00026339999999999995, |
|
"loss": 3.8511, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.00026399999999999997, |
|
"loss": 3.7997, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0002646, |
|
"loss": 3.6102, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0002652, |
|
"loss": 3.2133, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.00026579999999999996, |
|
"loss": 5.3176, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 0.00026639999999999997, |
|
"loss": 5.2469, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 0.000267, |
|
"loss": 4.9693, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 0.0002676, |
|
"loss": 4.5121, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 0.00026819999999999996, |
|
"loss": 4.5022, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 0.0002688, |
|
"loss": 4.2283, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 0.0002694, |
|
"loss": 4.0816, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 0.00027, |
|
"loss": 4.091, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 0.00027059999999999996, |
|
"loss": 4.0024, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 0.0002712, |
|
"loss": 3.9794, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 0.0002718, |
|
"loss": 3.8077, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 0.0002724, |
|
"loss": 3.86, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 0.00027299999999999997, |
|
"loss": 3.6727, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 0.0002736, |
|
"loss": 3.7923, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 0.0002742, |
|
"loss": 3.7753, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 0.0002748, |
|
"loss": 3.7465, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 0.00027539999999999997, |
|
"loss": 3.712, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 0.000276, |
|
"loss": 3.5746, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 0.0002766, |
|
"loss": 3.6256, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 0.0002772, |
|
"loss": 3.5238, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 0.0002778, |
|
"loss": 3.4411, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 0.0002784, |
|
"loss": 3.5065, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 0.000279, |
|
"loss": 3.5197, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 0.00027959999999999997, |
|
"loss": 3.4927, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 0.0002802, |
|
"loss": 3.5846, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.0002808, |
|
"loss": 3.4224, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.00028139999999999996, |
|
"loss": 3.4171, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 0.00028199999999999997, |
|
"loss": 3.3167, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 0.0002826, |
|
"loss": 3.4456, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 0.00028319999999999994, |
|
"loss": 3.4501, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 0.00028379999999999996, |
|
"loss": 3.3677, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 0.0002844, |
|
"loss": 3.2927, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.000285, |
|
"loss": 3.3387, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.00028559999999999995, |
|
"loss": 3.3384, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 0.00028619999999999996, |
|
"loss": 3.2779, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 0.0002868, |
|
"loss": 3.2192, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 0.00028739999999999994, |
|
"loss": 3.2844, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 0.00028799999999999995, |
|
"loss": 3.178, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 0.00028859999999999997, |
|
"loss": 3.179, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 0.0002892, |
|
"loss": 3.1362, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 0.00028979999999999994, |
|
"loss": 3.1162, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 0.00029039999999999996, |
|
"loss": 3.0807, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 0.00029099999999999997, |
|
"loss": 3.0098, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 0.0002916, |
|
"loss": 2.8276, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 0.00029219999999999995, |
|
"loss": 2.8082, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 0.00029279999999999996, |
|
"loss": 2.7419, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 0.0002934, |
|
"loss": 2.6181, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 0.000294, |
|
"loss": 2.5094, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 0.00029459999999999995, |
|
"loss": 2.455, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 0.00029519999999999997, |
|
"loss": 2.1926, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 0.0002958, |
|
"loss": 6.3152, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 0.0002964, |
|
"loss": 5.1198, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 0.00029699999999999996, |
|
"loss": 4.7265, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 0.00029759999999999997, |
|
"loss": 4.3539, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"eval_loss": 4.050220966339111, |
|
"eval_runtime": 594.0081, |
|
"eval_samples_per_second": 4.448, |
|
"eval_steps_per_second": 0.557, |
|
"eval_wer": 1.7867314557715193, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 0.0002982, |
|
"loss": 4.1344, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 0.0002988, |
|
"loss": 4.2398, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.00029939999999999996, |
|
"loss": 4.0204, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.0003, |
|
"loss": 4.032, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.0002995121951219512, |
|
"loss": 3.9013, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 0.0002990243902439024, |
|
"loss": 3.7927, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 0.0002985365853658536, |
|
"loss": 3.7671, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 0.00029804878048780484, |
|
"loss": 3.7971, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 0.00029756097560975606, |
|
"loss": 3.7401, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.0002970731707317073, |
|
"loss": 3.5998, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.0002965853658536585, |
|
"loss": 3.6632, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 0.0002960975609756097, |
|
"loss": 3.6384, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 0.0002956097560975609, |
|
"loss": 3.6145, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 0.0002951219512195122, |
|
"loss": 3.5137, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 0.00029463414634146336, |
|
"loss": 3.5113, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 0.0002941463414634146, |
|
"loss": 3.4298, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 0.00029365853658536585, |
|
"loss": 3.3688, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 0.00029317073170731706, |
|
"loss": 3.3792, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 0.0002926829268292683, |
|
"loss": 3.4692, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 0.0002921951219512195, |
|
"loss": 3.3661, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 0.0002917073170731707, |
|
"loss": 3.4411, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 0.00029121951219512193, |
|
"loss": 3.509, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 0.00029073170731707315, |
|
"loss": 3.3465, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 0.00029024390243902437, |
|
"loss": 3.2509, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 0.0002897560975609756, |
|
"loss": 3.1902, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 0.0002892682926829268, |
|
"loss": 3.2894, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 0.000288780487804878, |
|
"loss": 3.2102, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 0.00028829268292682923, |
|
"loss": 2.9926, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 0.00028780487804878045, |
|
"loss": 3.1141, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 0.00028731707317073167, |
|
"loss": 2.9693, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 0.0002868292682926829, |
|
"loss": 3.0834, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 0.0002863414634146341, |
|
"loss": 3.0293, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 0.00028585365853658537, |
|
"loss": 2.9632, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 0.00028536585365853654, |
|
"loss": 2.8581, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 0.00028487804878048775, |
|
"loss": 2.6841, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 0.000284390243902439, |
|
"loss": 2.7088, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 0.0002839024390243902, |
|
"loss": 2.8209, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 0.00028341463414634146, |
|
"loss": 2.5995, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 0.0002829268292682927, |
|
"loss": 2.7186, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 0.00028243902439024384, |
|
"loss": 2.3942, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 0.0002819512195121951, |
|
"loss": 2.4166, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 0.0002814634146341463, |
|
"loss": 2.1934, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 0.00028097560975609754, |
|
"loss": 2.0537, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 0.00028048780487804876, |
|
"loss": 1.8673, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 0.00028, |
|
"loss": 1.7229, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 0.0002795121951219512, |
|
"loss": 1.6225, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 0.0002790243902439024, |
|
"loss": 6.0962, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 0.0002785365853658536, |
|
"loss": 5.1113, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 0.00027804878048780484, |
|
"loss": 4.4441, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 0.00027756097560975606, |
|
"loss": 3.93, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 0.0002770731707317073, |
|
"loss": 3.6004, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.00027658536585365855, |
|
"loss": 3.4879, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.0002760975609756097, |
|
"loss": 3.3519, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.00027560975609756093, |
|
"loss": 3.2981, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 0.0002751219512195122, |
|
"loss": 3.1892, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 0.00027463414634146336, |
|
"loss": 3.1885, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 0.00027414634146341463, |
|
"loss": 2.9167, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 0.00027365853658536585, |
|
"loss": 2.8781, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 0.000273170731707317, |
|
"loss": 2.8923, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 0.0002726829268292683, |
|
"loss": 2.6902, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.0002721951219512195, |
|
"loss": 2.6606, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.0002717073170731707, |
|
"loss": 2.5854, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.00027121951219512193, |
|
"loss": 2.4889, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.00027073170731707315, |
|
"loss": 2.3526, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.00027024390243902437, |
|
"loss": 2.3056, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.0002697560975609756, |
|
"loss": 2.2212, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.0002692682926829268, |
|
"loss": 2.1994, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.000268780487804878, |
|
"loss": 2.1812, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.00026829268292682924, |
|
"loss": 2.058, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.00026780487804878045, |
|
"loss": 1.9894, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.0002673170731707317, |
|
"loss": 2.0365, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.0002668292682926829, |
|
"loss": 1.9143, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.0002663414634146341, |
|
"loss": 1.7815, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.0002658536585365854, |
|
"loss": 1.728, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 0.00026536585365853654, |
|
"loss": 1.7936, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 0.0002648780487804878, |
|
"loss": 1.7649, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 0.000264390243902439, |
|
"loss": 1.6668, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 0.0002639024390243902, |
|
"loss": 1.5596, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 0.00026341463414634146, |
|
"loss": 1.5518, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 0.0002629268292682927, |
|
"loss": 1.5524, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 0.0002624390243902439, |
|
"loss": 1.426, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 0.0002619512195121951, |
|
"loss": 1.4211, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 0.0002614634146341463, |
|
"loss": 1.4552, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 0.00026097560975609754, |
|
"loss": 1.3657, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 0.00026048780487804876, |
|
"loss": 1.3157, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 0.00026, |
|
"loss": 1.2056, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 0.0002595121951219512, |
|
"loss": 1.1727, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 0.0002590243902439024, |
|
"loss": 1.1081, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 0.00025853658536585363, |
|
"loss": 1.1073, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 0.00025804878048780484, |
|
"loss": 1.0629, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 0.00025756097560975606, |
|
"loss": 1.0085, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 0.0002570731707317073, |
|
"loss": 0.9956, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 0.0002565853658536585, |
|
"loss": 0.9413, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 0.0002560975609756097, |
|
"loss": 0.9303, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 0.000255609756097561, |
|
"loss": 0.917, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 0.0002551219512195122, |
|
"loss": 1.081, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 0.00025463414634146336, |
|
"loss": 9.2655, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 0.00025414634146341463, |
|
"loss": 6.3665, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 0.00025365853658536585, |
|
"loss": 4.6114, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 0.00025317073170731707, |
|
"loss": 3.6587, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 0.0002526829268292683, |
|
"loss": 3.0331, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 0.0002521951219512195, |
|
"loss": 2.6576, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 0.0002517073170731707, |
|
"loss": 2.4141, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 0.00025121951219512194, |
|
"loss": 2.2096, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 0.00025073170731707315, |
|
"loss": 2.0265, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 0.00025024390243902437, |
|
"loss": 1.9003, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 0.0002497560975609756, |
|
"loss": 1.7156, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 0.0002492682926829268, |
|
"loss": 1.6201, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 0.000248780487804878, |
|
"loss": 1.4503, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 0.00024829268292682924, |
|
"loss": 1.4161, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 0.00024780487804878045, |
|
"loss": 1.2728, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 0.00024731707317073167, |
|
"loss": 1.2627, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 0.0002468292682926829, |
|
"loss": 1.301, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 0.00024634146341463416, |
|
"loss": 1.1987, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 0.0002458536585365853, |
|
"loss": 1.0851, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 0.00024536585365853654, |
|
"loss": 1.207, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 0.0002448780487804878, |
|
"loss": 1.0918, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 0.00024439024390243897, |
|
"loss": 1.1469, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 0.00024390243902439022, |
|
"loss": 0.9882, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 0.00024341463414634146, |
|
"loss": 1.0057, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 0.00024292682926829268, |
|
"loss": 0.9869, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 0.00024243902439024387, |
|
"loss": 0.9118, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 0.0002419512195121951, |
|
"loss": 0.8895, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 0.00024146341463414633, |
|
"loss": 0.7842, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 0.00024097560975609755, |
|
"loss": 0.8535, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 0.00024048780487804876, |
|
"loss": 0.9101, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 0.9084, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 0.0002395121951219512, |
|
"loss": 0.8409, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 0.0002390243902439024, |
|
"loss": 0.8037, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 0.00023853658536585366, |
|
"loss": 0.7994, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 0.00023804878048780485, |
|
"loss": 0.8201, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 0.00023756097560975606, |
|
"loss": 0.7229, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 0.0002370731707317073, |
|
"loss": 0.7606, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 0.0002365853658536585, |
|
"loss": 0.7244, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 0.00023609756097560974, |
|
"loss": 0.7032, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 0.00023560975609756096, |
|
"loss": 0.6894, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 0.00023512195121951215, |
|
"loss": 0.7164, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 0.0002346341463414634, |
|
"loss": 0.6915, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 0.0002341463414634146, |
|
"loss": 0.7157, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 0.00023365853658536583, |
|
"loss": 0.7308, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 0.00023317073170731704, |
|
"loss": 0.6428, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 0.0002326829268292683, |
|
"loss": 0.6652, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 0.00023219512195121948, |
|
"loss": 0.6669, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 0.0002317073170731707, |
|
"loss": 0.6834, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 0.00023121951219512194, |
|
"loss": 0.703, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 0.00023073170731707315, |
|
"loss": 0.792, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 0.00023024390243902437, |
|
"loss": 7.3397, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 0.0002297560975609756, |
|
"loss": 6.4753, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 0.00022926829268292683, |
|
"loss": 4.5254, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 0.00022878048780487802, |
|
"loss": 3.1011, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 0.00022829268292682924, |
|
"loss": 2.1706, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 0.00022780487804878048, |
|
"loss": 1.8403, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 0.00022731707317073167, |
|
"loss": 1.7287, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 0.00022682926829268292, |
|
"loss": 1.5158, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 0.00022634146341463413, |
|
"loss": 1.2994, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 0.00022585365853658532, |
|
"loss": 1.099, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 0.00022536585365853657, |
|
"loss": 0.9544, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 0.00022487804878048778, |
|
"loss": 0.97, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 0.000224390243902439, |
|
"loss": 0.9019, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 0.00022390243902439022, |
|
"loss": 0.873, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 0.00022341463414634146, |
|
"loss": 0.8081, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 0.00022292682926829265, |
|
"loss": 0.8081, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 0.00022243902439024387, |
|
"loss": 0.697, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 0.0002219512195121951, |
|
"loss": 0.663, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 0.0002214634146341463, |
|
"loss": 0.6758, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.00022097560975609755, |
|
"loss": 0.6657, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.00022048780487804876, |
|
"loss": 0.5622, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.00021999999999999995, |
|
"loss": 0.607, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0002195121951219512, |
|
"loss": 0.6879, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.00021902439024390241, |
|
"loss": 3.5432, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 0.00021853658536585366, |
|
"loss": 3.2184, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 0.00021804878048780485, |
|
"loss": 1.9385, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 0.0002175609756097561, |
|
"loss": 1.2234, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 0.0002170731707317073, |
|
"loss": 1.2986, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 0.0002165853658536585, |
|
"loss": 1.1385, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 0.00021609756097560974, |
|
"loss": 1.0251, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 0.00021560975609756096, |
|
"loss": 0.8948, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 0.00021512195121951218, |
|
"loss": 0.6973, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 0.0002146341463414634, |
|
"loss": 0.7551, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 0.00021414634146341464, |
|
"loss": 0.6294, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 0.00021365853658536583, |
|
"loss": 0.6347, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 0.00021317073170731704, |
|
"loss": 0.6635, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 0.0002126829268292683, |
|
"loss": 0.5707, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 0.00021219512195121948, |
|
"loss": 0.5126, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 0.00021170731707317072, |
|
"loss": 0.6055, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 0.00021121951219512194, |
|
"loss": 0.5535, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 0.00021073170731707313, |
|
"loss": 0.5278, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 0.00021024390243902437, |
|
"loss": 0.5102, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 0.0002097560975609756, |
|
"loss": 0.5113, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 0.0002092682926829268, |
|
"loss": 0.4592, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 0.00020878048780487802, |
|
"loss": 0.4637, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 0.00020829268292682927, |
|
"loss": 0.4902, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 0.00020780487804878046, |
|
"loss": 0.4212, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 0.00020731707317073167, |
|
"loss": 0.4117, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 0.00020682926829268292, |
|
"loss": 0.4027, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 0.0002063414634146341, |
|
"loss": 0.4618, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 0.00020585365853658535, |
|
"loss": 0.3627, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 0.00020536585365853657, |
|
"loss": 0.4113, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 0.0002048780487804878, |
|
"loss": 0.3982, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 0.000204390243902439, |
|
"loss": 0.3762, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 0.00020390243902439022, |
|
"loss": 0.3667, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 0.00020341463414634146, |
|
"loss": 0.3957, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 0.00020292682926829265, |
|
"loss": 0.3483, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 0.0002024390243902439, |
|
"loss": 0.3318, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 0.00020195121951219511, |
|
"loss": 0.4213, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 0.0002014634146341463, |
|
"loss": 0.368, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 0.00020097560975609755, |
|
"loss": 0.3552, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 0.00020048780487804877, |
|
"loss": 0.3594, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 0.00019999999999999998, |
|
"loss": 0.3226, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 0.0001995121951219512, |
|
"loss": 0.3709, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 0.00019902439024390244, |
|
"loss": 0.3987, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 0.00019853658536585363, |
|
"loss": 0.3575, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 0.00019804878048780485, |
|
"loss": 0.3317, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 0.0001975609756097561, |
|
"loss": 0.2524, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 0.00019707317073170728, |
|
"loss": 0.3431, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 0.00019658536585365853, |
|
"loss": 0.3492, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 0.00019609756097560974, |
|
"loss": 0.2618, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 0.00019560975609756093, |
|
"loss": 0.3719, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 0.00019512195121951218, |
|
"loss": 0.3291, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 0.0001946341463414634, |
|
"loss": 3.3472, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 0.0001941463414634146, |
|
"loss": 2.2989, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 0.00019365853658536583, |
|
"loss": 1.2984, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 0.00019317073170731707, |
|
"loss": 0.9822, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 0.0001926829268292683, |
|
"loss": 0.9058, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 0.00019219512195121948, |
|
"loss": 0.8659, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 0.00019170731707317072, |
|
"loss": 0.7585, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 0.00019121951219512194, |
|
"loss": 0.7159, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 0.00019073170731707316, |
|
"loss": 0.643, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 0.00019024390243902437, |
|
"loss": 0.5668, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 0.00018975609756097562, |
|
"loss": 0.5185, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 0.0001892682926829268, |
|
"loss": 0.5032, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 0.00018878048780487803, |
|
"loss": 0.478, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 0.00018829268292682927, |
|
"loss": 0.4333, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 0.00018780487804878046, |
|
"loss": 0.4642, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 0.0001873170731707317, |
|
"loss": 0.4117, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 0.00018682926829268292, |
|
"loss": 0.4252, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 0.0001863414634146341, |
|
"loss": 0.3947, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 0.00018585365853658535, |
|
"loss": 0.4007, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 0.00018536585365853657, |
|
"loss": 0.4026, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 0.0001848780487804878, |
|
"loss": 0.3837, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 0.000184390243902439, |
|
"loss": 0.3415, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 0.00018390243902439025, |
|
"loss": 0.4264, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 0.00018341463414634144, |
|
"loss": 0.3209, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 0.00018292682926829266, |
|
"loss": 0.3521, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 0.0001824390243902439, |
|
"loss": 0.3439, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 0.0001819512195121951, |
|
"loss": 0.3283, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 0.00018146341463414633, |
|
"loss": 0.3709, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 0.00018097560975609755, |
|
"loss": 0.3629, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 0.00018048780487804877, |
|
"loss": 0.3261, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 0.3054, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 0.0001795121951219512, |
|
"loss": 0.3341, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 0.00017902439024390245, |
|
"loss": 0.3435, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 0.00017853658536585363, |
|
"loss": 0.3293, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 0.00017804878048780485, |
|
"loss": 0.3783, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 0.0001775609756097561, |
|
"loss": 0.3352, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 0.00017707317073170729, |
|
"loss": 0.275, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 0.00017658536585365853, |
|
"loss": 0.3101, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 0.00017609756097560975, |
|
"loss": 0.322, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 0.00017560975609756094, |
|
"loss": 0.2583, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 0.00017512195121951218, |
|
"loss": 0.2986, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 0.0001746341463414634, |
|
"loss": 0.2965, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 0.00017414634146341461, |
|
"loss": 0.3305, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 0.00017365853658536583, |
|
"loss": 0.2637, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 0.00017317073170731708, |
|
"loss": 0.2815, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 0.00017268292682926826, |
|
"loss": 0.3337, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 0.00017219512195121948, |
|
"loss": 0.3071, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 0.00017170731707317073, |
|
"loss": 0.2576, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 0.00017121951219512192, |
|
"loss": 0.2627, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 0.00017073170731707316, |
|
"loss": 0.2915, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 0.00017024390243902438, |
|
"loss": 1.9187, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 0.00016975609756097557, |
|
"loss": 1.4796, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 0.0001692682926829268, |
|
"loss": 0.9913, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 0.00016878048780487803, |
|
"loss": 0.6812, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 0.00016829268292682927, |
|
"loss": 0.6404, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 0.00016780487804878046, |
|
"loss": 0.6294, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 0.0001673170731707317, |
|
"loss": 0.5524, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 0.00016682926829268292, |
|
"loss": 0.4834, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 0.0001663414634146341, |
|
"loss": 0.4867, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 0.00016585365853658536, |
|
"loss": 0.4394, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 0.00016536585365853657, |
|
"loss": 0.4228, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 0.0001648780487804878, |
|
"loss": 0.3963, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 0.000164390243902439, |
|
"loss": 0.3693, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 0.00016390243902439025, |
|
"loss": 0.3423, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 0.00016341463414634144, |
|
"loss": 0.3681, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 0.00016292682926829266, |
|
"loss": 0.3444, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 0.0001624390243902439, |
|
"loss": 0.3625, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 0.0001619512195121951, |
|
"loss": 0.335, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 0.00016146341463414634, |
|
"loss": 0.3001, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 0.00016097560975609755, |
|
"loss": 0.3523, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 0.00016048780487804874, |
|
"loss": 0.3084, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 0.00015999999999999999, |
|
"loss": 0.3502, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 0.0001595121951219512, |
|
"loss": 0.3246, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 0.00015902439024390242, |
|
"loss": 0.3061, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 0.00015853658536585364, |
|
"loss": 0.3581, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 0.00015804878048780488, |
|
"loss": 0.3109, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 0.00015756097560975607, |
|
"loss": 0.2814, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 0.0001570731707317073, |
|
"loss": 0.2509, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 0.00015658536585365853, |
|
"loss": 0.3046, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 0.00015609756097560975, |
|
"loss": 0.2995, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 0.00015560975609756097, |
|
"loss": 0.2679, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 0.00015512195121951218, |
|
"loss": 0.3044, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 0.00015463414634146343, |
|
"loss": 0.286, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 0.00015414634146341462, |
|
"loss": 0.2883, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 0.00015365853658536583, |
|
"loss": 0.3195, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 0.00015317073170731708, |
|
"loss": 0.284, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 0.00015268292682926827, |
|
"loss": 0.2843, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 0.0001521951219512195, |
|
"loss": 0.2142, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 0.00015170731707317073, |
|
"loss": 0.2466, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 0.00015121951219512192, |
|
"loss": 0.2987, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 0.00015073170731707316, |
|
"loss": 0.2849, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 0.00015024390243902438, |
|
"loss": 0.2537, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 0.0001497560975609756, |
|
"loss": 0.2193, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 0.0001492682926829268, |
|
"loss": 0.2516, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 0.00014878048780487803, |
|
"loss": 0.2595, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 0.00014829268292682925, |
|
"loss": 0.2648, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 0.00014780487804878046, |
|
"loss": 0.2563, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 0.00014731707317073168, |
|
"loss": 0.2177, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 0.00014682926829268292, |
|
"loss": 0.2533, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 0.00014634146341463414, |
|
"loss": 0.292, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 0.00014585365853658536, |
|
"loss": 1.3898, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 0.00014536585365853657, |
|
"loss": 1.0223, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 0.0001448780487804878, |
|
"loss": 0.6619, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 0.000144390243902439, |
|
"loss": 0.5068, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 0.00014390243902439023, |
|
"loss": 0.473, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 0.00014341463414634144, |
|
"loss": 0.4847, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 0.00014292682926829269, |
|
"loss": 0.4595, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 0.00014243902439024388, |
|
"loss": 0.3926, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 0.0001419512195121951, |
|
"loss": 0.3979, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 0.00014146341463414634, |
|
"loss": 0.3735, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 0.00014097560975609755, |
|
"loss": 0.3384, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 0.00014048780487804877, |
|
"loss": 0.3536, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 0.00014, |
|
"loss": 0.3151, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 0.0001395121951219512, |
|
"loss": 0.3147, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 0.00013902439024390242, |
|
"loss": 0.3133, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 0.00013853658536585364, |
|
"loss": 0.255, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 0.00013804878048780486, |
|
"loss": 0.2868, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 0.0001375609756097561, |
|
"loss": 0.2783, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 0.00013707317073170732, |
|
"loss": 0.2877, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 0.0001365853658536585, |
|
"loss": 0.2775, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 0.00013609756097560975, |
|
"loss": 0.2804, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 0.00013560975609756097, |
|
"loss": 0.2744, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 0.00013512195121951218, |
|
"loss": 0.2385, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 0.0001346341463414634, |
|
"loss": 0.2474, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 0.00013414634146341462, |
|
"loss": 0.2529, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 0.00013365853658536586, |
|
"loss": 0.2812, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 0.00013317073170731705, |
|
"loss": 0.2556, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 0.00013268292682926827, |
|
"loss": 0.2324, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 0.0001321951219512195, |
|
"loss": 0.2562, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 0.00013170731707317073, |
|
"loss": 0.2345, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 0.00013121951219512195, |
|
"loss": 0.2582, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 0.00013073170731707316, |
|
"loss": 0.2171, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 0.00013024390243902438, |
|
"loss": 0.2607, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 0.0001297560975609756, |
|
"loss": 0.227, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 0.00012926829268292681, |
|
"loss": 0.2222, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 0.00012878048780487803, |
|
"loss": 0.2358, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 0.00012829268292682925, |
|
"loss": 0.235, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 0.0001278048780487805, |
|
"loss": 0.2213, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 0.00012731707317073168, |
|
"loss": 0.2178, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 0.00012682926829268293, |
|
"loss": 0.2105, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 0.00012634146341463414, |
|
"loss": 0.2181, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 0.00012585365853658536, |
|
"loss": 0.2648, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 0.00012536585365853658, |
|
"loss": 0.2673, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 0.0001248780487804878, |
|
"loss": 0.2179, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 0.000124390243902439, |
|
"loss": 0.1762, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 0.00012390243902439023, |
|
"loss": 0.2107, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 0.00012341463414634144, |
|
"loss": 0.2415, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 0.00012292682926829266, |
|
"loss": 0.265, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 0.0001224390243902439, |
|
"loss": 0.2246, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 0.00012195121951219511, |
|
"loss": 0.3635, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 0.00012146341463414634, |
|
"loss": 0.7013, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 0.00012097560975609756, |
|
"loss": 0.536, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 0.00012048780487804877, |
|
"loss": 0.398, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 0.3576, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 0.0001195121951219512, |
|
"loss": 0.3146, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 0.00011902439024390242, |
|
"loss": 0.3146, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 0.00011853658536585365, |
|
"loss": 0.3472, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 0.00011804878048780487, |
|
"loss": 0.2634, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 0.00011756097560975607, |
|
"loss": 0.3482, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 0.0001170731707317073, |
|
"loss": 0.2643, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 0.00011658536585365852, |
|
"loss": 0.2699, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 0.00011609756097560974, |
|
"loss": 0.2764, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 0.00011560975609756097, |
|
"loss": 0.2324, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 0.00011512195121951219, |
|
"loss": 0.2683, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 0.00011463414634146342, |
|
"loss": 0.2209, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 0.00011414634146341462, |
|
"loss": 0.2287, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 0.00011365853658536584, |
|
"loss": 0.2358, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 0.00011317073170731707, |
|
"loss": 0.2162, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 0.00011268292682926828, |
|
"loss": 0.2329, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 0.0001121951219512195, |
|
"loss": 0.2235, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 0.00011170731707317073, |
|
"loss": 0.2409, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 0.00011121951219512193, |
|
"loss": 0.1879, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 0.00011073170731707315, |
|
"loss": 0.263, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 0.00011024390243902438, |
|
"loss": 0.4579, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 0.0001097560975609756, |
|
"loss": 0.4033, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 0.00010926829268292683, |
|
"loss": 0.313, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 0.00010878048780487805, |
|
"loss": 0.2394, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 0.00010829268292682925, |
|
"loss": 0.2394, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 0.00010780487804878048, |
|
"loss": 0.2715, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 0.0001073170731707317, |
|
"loss": 0.24, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 0.00010682926829268291, |
|
"loss": 0.2086, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 0.00010634146341463414, |
|
"loss": 0.2325, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 0.00010585365853658536, |
|
"loss": 0.1837, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 0.00010536585365853656, |
|
"loss": 0.1641, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 0.0001048780487804878, |
|
"loss": 0.169, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 0.00010439024390243901, |
|
"loss": 0.1444, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 0.00010390243902439023, |
|
"loss": 0.1955, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 0.00010341463414634146, |
|
"loss": 0.1712, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 0.00010292682926829268, |
|
"loss": 0.1723, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 0.0001024390243902439, |
|
"loss": 0.1577, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 0.00010195121951219511, |
|
"loss": 0.1411, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 0.00010146341463414633, |
|
"loss": 0.147, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 0.00010097560975609756, |
|
"loss": 0.1688, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 0.00010048780487804877, |
|
"loss": 0.1425, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 9.999999999999999e-05, |
|
"loss": 0.184, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 9.951219512195122e-05, |
|
"loss": 0.1604, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 9.902439024390243e-05, |
|
"loss": 0.1326, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 9.853658536585364e-05, |
|
"loss": 0.1505, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 9.804878048780487e-05, |
|
"loss": 0.1165, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 9.756097560975609e-05, |
|
"loss": 0.1217, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 9.70731707317073e-05, |
|
"loss": 0.1376, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 9.658536585365854e-05, |
|
"loss": 0.1416, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 9.609756097560974e-05, |
|
"loss": 0.1483, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 9.560975609756097e-05, |
|
"loss": 0.1441, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 9.512195121951219e-05, |
|
"loss": 0.1459, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 9.46341463414634e-05, |
|
"loss": 0.138, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 9.414634146341463e-05, |
|
"loss": 0.1196, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 9.365853658536585e-05, |
|
"loss": 0.1039, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 9.317073170731706e-05, |
|
"loss": 0.1461, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 9.268292682926829e-05, |
|
"loss": 0.156, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 9.21951219512195e-05, |
|
"loss": 0.1005, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 9.170731707317072e-05, |
|
"loss": 0.1227, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 9.121951219512195e-05, |
|
"loss": 0.1158, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 9.073170731707317e-05, |
|
"loss": 0.1069, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 9.024390243902438e-05, |
|
"loss": 0.1131, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 8.97560975609756e-05, |
|
"loss": 0.0864, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 8.926829268292682e-05, |
|
"loss": 0.0965, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 8.878048780487805e-05, |
|
"loss": 0.0969, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 8.829268292682926e-05, |
|
"loss": 0.1018, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 8.780487804878047e-05, |
|
"loss": 0.0772, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 8.73170731707317e-05, |
|
"loss": 0.0811, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 8.682926829268292e-05, |
|
"loss": 0.108, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 8.634146341463413e-05, |
|
"loss": 0.1689, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 8.585365853658536e-05, |
|
"loss": 0.4178, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 8.536585365853658e-05, |
|
"loss": 0.3293, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 8.487804878048778e-05, |
|
"loss": 0.3048, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 8.439024390243901e-05, |
|
"loss": 0.2403, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 8.390243902439023e-05, |
|
"loss": 0.2498, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 8.341463414634146e-05, |
|
"loss": 0.1854, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 8.292682926829268e-05, |
|
"loss": 0.1997, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 8.24390243902439e-05, |
|
"loss": 0.2298, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 8.195121951219513e-05, |
|
"loss": 0.1632, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 8.146341463414633e-05, |
|
"loss": 0.1737, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 8.097560975609755e-05, |
|
"loss": 0.1601, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 8.048780487804878e-05, |
|
"loss": 0.1663, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 7.999999999999999e-05, |
|
"loss": 0.187, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 7.951219512195121e-05, |
|
"loss": 0.1445, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 7.902439024390244e-05, |
|
"loss": 0.1499, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 7.853658536585364e-05, |
|
"loss": 0.1406, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 7.804878048780487e-05, |
|
"loss": 0.1639, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 7.756097560975609e-05, |
|
"loss": 0.1295, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 7.707317073170731e-05, |
|
"loss": 0.1336, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 7.658536585365854e-05, |
|
"loss": 0.1529, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 7.609756097560976e-05, |
|
"loss": 0.1371, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 7.560975609756096e-05, |
|
"loss": 0.1307, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 7.512195121951219e-05, |
|
"loss": 0.1316, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 7.46341463414634e-05, |
|
"loss": 0.1358, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 7.414634146341462e-05, |
|
"loss": 0.1215, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 7.365853658536584e-05, |
|
"loss": 0.1351, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 7.317073170731707e-05, |
|
"loss": 0.121, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 7.268292682926829e-05, |
|
"loss": 0.1392, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 7.21951219512195e-05, |
|
"loss": 0.106, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 7.170731707317072e-05, |
|
"loss": 0.1224, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 7.121951219512194e-05, |
|
"loss": 0.1107, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 7.073170731707317e-05, |
|
"loss": 0.1096, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 7.024390243902439e-05, |
|
"loss": 0.1294, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 6.97560975609756e-05, |
|
"loss": 0.1206, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 6.926829268292682e-05, |
|
"loss": 0.1198, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 6.878048780487805e-05, |
|
"loss": 0.1112, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 6.829268292682925e-05, |
|
"loss": 0.1085, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 6.780487804878048e-05, |
|
"loss": 0.089, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 6.73170731707317e-05, |
|
"loss": 0.0981, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 6.682926829268293e-05, |
|
"loss": 0.1129, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 6.634146341463413e-05, |
|
"loss": 0.1026, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 6.585365853658536e-05, |
|
"loss": 0.0927, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 6.536585365853658e-05, |
|
"loss": 0.0987, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 6.48780487804878e-05, |
|
"loss": 0.1299, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.439024390243902e-05, |
|
"loss": 0.0781, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.390243902439025e-05, |
|
"loss": 0.1048, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.341463414634146e-05, |
|
"loss": 0.0572, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.292682926829268e-05, |
|
"loss": 0.1033, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.24390243902439e-05, |
|
"loss": 0.1029, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.195121951219511e-05, |
|
"loss": 0.1846, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.146341463414633e-05, |
|
"loss": 0.3495, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.0975609756097554e-05, |
|
"loss": 0.3143, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.048780487804878e-05, |
|
"loss": 0.2743, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 0.1964, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.951219512195121e-05, |
|
"loss": 0.2446, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.9024390243902435e-05, |
|
"loss": 0.1735, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.853658536585365e-05, |
|
"loss": 0.2045, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.804878048780487e-05, |
|
"loss": 0.1399, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"eval_loss": 0.36502909660339355, |
|
"eval_runtime": 567.0865, |
|
"eval_samples_per_second": 4.659, |
|
"eval_steps_per_second": 0.584, |
|
"eval_wer": 0.11207854026180088, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.756097560975609e-05, |
|
"loss": 0.1683, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.707317073170731e-05, |
|
"loss": 0.1527, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.6585365853658533e-05, |
|
"loss": 0.1248, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.609756097560975e-05, |
|
"loss": 0.1299, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.560975609756097e-05, |
|
"loss": 0.1563, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.512195121951219e-05, |
|
"loss": 0.1239, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.4634146341463415e-05, |
|
"loss": 0.1481, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.4146341463414625e-05, |
|
"loss": 0.1316, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.365853658536585e-05, |
|
"loss": 0.1313, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 5.317073170731707e-05, |
|
"loss": 0.1185, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 5.268292682926828e-05, |
|
"loss": 0.1288, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 5.2195121951219506e-05, |
|
"loss": 0.1273, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 5.170731707317073e-05, |
|
"loss": 0.1418, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 5.121951219512195e-05, |
|
"loss": 0.1133, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 5.0731707317073163e-05, |
|
"loss": 0.1178, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 5.024390243902439e-05, |
|
"loss": 0.097, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.975609756097561e-05, |
|
"loss": 0.1185, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.926829268292682e-05, |
|
"loss": 0.1349, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.8780487804878045e-05, |
|
"loss": 0.1152, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.829268292682927e-05, |
|
"loss": 0.1227, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.7804878048780485e-05, |
|
"loss": 0.1017, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.73170731707317e-05, |
|
"loss": 0.0982, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.6829268292682926e-05, |
|
"loss": 0.1289, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.634146341463414e-05, |
|
"loss": 0.0997, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.585365853658536e-05, |
|
"loss": 0.0879, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.536585365853658e-05, |
|
"loss": 0.0836, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.48780487804878e-05, |
|
"loss": 0.1062, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.4390243902439024e-05, |
|
"loss": 0.0964, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.3902439024390234e-05, |
|
"loss": 0.0981, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.341463414634146e-05, |
|
"loss": 0.068, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.292682926829268e-05, |
|
"loss": 0.1051, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 4.243902439024389e-05, |
|
"loss": 0.0968, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 4.1951219512195115e-05, |
|
"loss": 0.1041, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.146341463414634e-05, |
|
"loss": 0.0835, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.097560975609756e-05, |
|
"loss": 0.1127, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 4.048780487804877e-05, |
|
"loss": 0.1029, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.9999999999999996e-05, |
|
"loss": 0.0855, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.951219512195122e-05, |
|
"loss": 0.0733, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.902439024390244e-05, |
|
"loss": 0.0936, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.8536585365853654e-05, |
|
"loss": 0.0868, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.804878048780488e-05, |
|
"loss": 0.094, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.7560975609756095e-05, |
|
"loss": 0.1837, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.707317073170731e-05, |
|
"loss": 0.3259, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.6585365853658535e-05, |
|
"loss": 0.2573, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.609756097560975e-05, |
|
"loss": 0.2468, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.560975609756097e-05, |
|
"loss": 0.2143, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.512195121951219e-05, |
|
"loss": 0.1886, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.463414634146341e-05, |
|
"loss": 0.2147, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.4146341463414627e-05, |
|
"loss": 0.182, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.365853658536585e-05, |
|
"loss": 0.1671, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.317073170731707e-05, |
|
"loss": 0.15, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.268292682926829e-05, |
|
"loss": 0.1564, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.219512195121951e-05, |
|
"loss": 0.1517, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.170731707317073e-05, |
|
"loss": 0.165, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.121951219512195e-05, |
|
"loss": 0.1202, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.0731707317073165e-05, |
|
"loss": 0.1199, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.024390243902439e-05, |
|
"loss": 0.1425, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.9756097560975606e-05, |
|
"loss": 0.1322, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.9268292682926826e-05, |
|
"loss": 0.1356, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.8780487804878046e-05, |
|
"loss": 0.1227, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.8292682926829267e-05, |
|
"loss": 0.1052, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.7804878048780484e-05, |
|
"loss": 0.1255, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.7317073170731707e-05, |
|
"loss": 0.0951, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.6829268292682924e-05, |
|
"loss": 0.1074, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.634146341463414e-05, |
|
"loss": 0.1143, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.5853658536585365e-05, |
|
"loss": 0.1112, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.5365853658536582e-05, |
|
"loss": 0.1014, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.4878048780487805e-05, |
|
"loss": 0.0987, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.4390243902439022e-05, |
|
"loss": 0.0907, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.3902439024390243e-05, |
|
"loss": 0.1075, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.3414634146341463e-05, |
|
"loss": 0.1121, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.292682926829268e-05, |
|
"loss": 0.0914, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.24390243902439e-05, |
|
"loss": 0.1099, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.1951219512195117e-05, |
|
"loss": 0.0843, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.146341463414634e-05, |
|
"loss": 0.1039, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.0975609756097558e-05, |
|
"loss": 0.1008, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.048780487804878e-05, |
|
"loss": 0.0954, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 0.0986, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.951219512195122e-05, |
|
"loss": 0.0843, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.902439024390244e-05, |
|
"loss": 0.0825, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.8536585365853656e-05, |
|
"loss": 0.0804, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.8048780487804876e-05, |
|
"loss": 0.0896, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.7560975609756096e-05, |
|
"loss": 0.0916, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.7073170731707313e-05, |
|
"loss": 0.0811, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.6585365853658534e-05, |
|
"loss": 0.0858, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.6097560975609754e-05, |
|
"loss": 0.0718, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.5609756097560974e-05, |
|
"loss": 0.0726, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.5121951219512194e-05, |
|
"loss": 0.0752, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.4634146341463413e-05, |
|
"loss": 0.1133, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.4146341463414633e-05, |
|
"loss": 0.0928, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.3658536585365854e-05, |
|
"loss": 0.1092, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.317073170731707e-05, |
|
"loss": 0.1081, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.2682926829268291e-05, |
|
"loss": 0.2064, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.2195121951219511e-05, |
|
"loss": 0.1688, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.1707317073170731e-05, |
|
"loss": 0.189, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.121951219512195e-05, |
|
"loss": 0.1708, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.073170731707317e-05, |
|
"loss": 0.1589, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.024390243902439e-05, |
|
"loss": 0.1198, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 9.75609756097561e-06, |
|
"loss": 0.1327, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 9.268292682926828e-06, |
|
"loss": 0.1172, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 8.780487804878048e-06, |
|
"loss": 0.1378, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 8.292682926829267e-06, |
|
"loss": 0.1146, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 7.804878048780487e-06, |
|
"loss": 0.1076, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 7.3170731707317065e-06, |
|
"loss": 0.0927, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 6.829268292682927e-06, |
|
"loss": 0.104, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 6.3414634146341454e-06, |
|
"loss": 0.1055, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 0.0898, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 5.365853658536585e-06, |
|
"loss": 0.1014, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 4.878048780487805e-06, |
|
"loss": 0.1008, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 4.390243902439024e-06, |
|
"loss": 0.0901, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 3.9024390243902435e-06, |
|
"loss": 0.1198, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.4146341463414634e-06, |
|
"loss": 0.0944, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 0.0886, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.4390243902439023e-06, |
|
"loss": 0.0888, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.9512195121951218e-06, |
|
"loss": 0.2069, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 1115, |
|
"total_flos": 0.0, |
|
"train_loss": 2.498668275362574, |
|
"train_runtime": 25980.5507, |
|
"train_samples_per_second": 5.492, |
|
"train_steps_per_second": 0.043 |
|
} |
|
], |
|
"max_steps": 1115, |
|
"num_train_epochs": 5, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|