| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.996876951905059, |
| "eval_steps": 500, |
| "global_step": 4000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.6666666666666667e-06, |
| "loss": 3.6509, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.3333333333333333e-06, |
| "loss": 3.8724, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 5e-06, |
| "loss": 3.5813, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 3.7054, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 3.5469, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1e-05, |
| "loss": 3.3915, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.1666666666666668e-05, |
| "loss": 2.9396, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 2.9861, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.5e-05, |
| "loss": 2.5603, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 2.4441, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8333333333333333e-05, |
| "loss": 2.076, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2e-05, |
| "loss": 1.6749, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1666666666666667e-05, |
| "loss": 1.8523, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3333333333333336e-05, |
| "loss": 1.4658, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5e-05, |
| "loss": 1.439, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 1.2239, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8333333333333335e-05, |
| "loss": 1.1319, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3e-05, |
| "loss": 1.1712, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1666666666666666e-05, |
| "loss": 0.8938, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3333333333333335e-05, |
| "loss": 0.8565, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.5e-05, |
| "loss": 0.9579, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.6666666666666666e-05, |
| "loss": 0.9346, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.8333333333333334e-05, |
| "loss": 0.972, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4e-05, |
| "loss": 0.8496, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.166666666666667e-05, |
| "loss": 0.8676, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.3333333333333334e-05, |
| "loss": 0.9331, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.5e-05, |
| "loss": 1.0395, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.666666666666667e-05, |
| "loss": 0.7737, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.8333333333333334e-05, |
| "loss": 0.6609, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5e-05, |
| "loss": 0.8102, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5.166666666666667e-05, |
| "loss": 1.0367, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5.333333333333333e-05, |
| "loss": 1.1354, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5.500000000000001e-05, |
| "loss": 0.9955, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5.666666666666667e-05, |
| "loss": 0.763, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5.833333333333334e-05, |
| "loss": 0.8098, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 6e-05, |
| "loss": 0.8816, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 6.166666666666667e-05, |
| "loss": 0.9731, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 6.333333333333333e-05, |
| "loss": 0.9083, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 6.500000000000001e-05, |
| "loss": 1.0748, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 6.666666666666667e-05, |
| "loss": 0.7668, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 6.833333333333333e-05, |
| "loss": 0.9761, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 7e-05, |
| "loss": 0.8724, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 7.166666666666667e-05, |
| "loss": 0.9374, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 7.333333333333333e-05, |
| "loss": 0.7787, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 7.500000000000001e-05, |
| "loss": 0.8452, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 7.666666666666667e-05, |
| "loss": 0.8609, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 7.833333333333333e-05, |
| "loss": 0.8426, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 8e-05, |
| "loss": 0.7829, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 8.166666666666667e-05, |
| "loss": 0.8437, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 8.333333333333334e-05, |
| "loss": 0.6821, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 8.5e-05, |
| "loss": 0.8261, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 8.666666666666667e-05, |
| "loss": 0.9523, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 8.833333333333333e-05, |
| "loss": 0.5803, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 9e-05, |
| "loss": 0.6982, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 9.166666666666667e-05, |
| "loss": 0.7902, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 9.333333333333334e-05, |
| "loss": 0.7706, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 9.5e-05, |
| "loss": 0.8136, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 9.666666666666667e-05, |
| "loss": 0.8685, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 9.833333333333333e-05, |
| "loss": 0.8771, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 0.0001, |
| "loss": 0.7583, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00010166666666666667, |
| "loss": 0.8556, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00010333333333333334, |
| "loss": 0.8078, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.000105, |
| "loss": 0.9972, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00010666666666666667, |
| "loss": 0.8898, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00010833333333333333, |
| "loss": 0.7768, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00011000000000000002, |
| "loss": 0.7157, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00011166666666666668, |
| "loss": 0.7087, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 0.00011333333333333334, |
| "loss": 0.7918, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00011499999999999999, |
| "loss": 0.8944, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00011666666666666668, |
| "loss": 0.834, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00011833333333333334, |
| "loss": 0.779, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00012, |
| "loss": 0.8211, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00012166666666666667, |
| "loss": 0.6121, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00012333333333333334, |
| "loss": 0.9408, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.000125, |
| "loss": 0.9269, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00012666666666666666, |
| "loss": 0.7972, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00012833333333333335, |
| "loss": 0.8971, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00013000000000000002, |
| "loss": 0.8007, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00013166666666666668, |
| "loss": 0.7287, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00013333333333333334, |
| "loss": 0.956, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00013500000000000003, |
| "loss": 0.7603, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00013666666666666666, |
| "loss": 0.8792, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00013833333333333333, |
| "loss": 0.8505, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00014, |
| "loss": 0.8082, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00014166666666666668, |
| "loss": 0.7888, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00014333333333333334, |
| "loss": 0.8227, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.000145, |
| "loss": 0.9284, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00014666666666666666, |
| "loss": 0.7486, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00014833333333333335, |
| "loss": 0.9621, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00015000000000000001, |
| "loss": 0.8752, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00015166666666666668, |
| "loss": 0.8513, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00015333333333333334, |
| "loss": 0.7876, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.000155, |
| "loss": 0.897, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00015666666666666666, |
| "loss": 0.7827, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00015833333333333332, |
| "loss": 0.7312, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00016, |
| "loss": 0.7903, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00016166666666666668, |
| "loss": 1.0508, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00016333333333333334, |
| "loss": 0.6758, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.000165, |
| "loss": 0.9915, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001666666666666667, |
| "loss": 0.8401, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00016833333333333335, |
| "loss": 0.9372, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00017, |
| "loss": 0.66, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00017166666666666667, |
| "loss": 0.7643, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00017333333333333334, |
| "loss": 0.8651, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.000175, |
| "loss": 0.7401, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00017666666666666666, |
| "loss": 0.859, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00017833333333333335, |
| "loss": 0.9477, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00018, |
| "loss": 0.9452, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00018166666666666667, |
| "loss": 0.757, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00018333333333333334, |
| "loss": 0.8329, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00018500000000000002, |
| "loss": 0.7289, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001866666666666667, |
| "loss": 0.9971, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00018833333333333335, |
| "loss": 0.7959, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00019, |
| "loss": 0.7427, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00019166666666666667, |
| "loss": 0.7877, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00019333333333333333, |
| "loss": 0.9469, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.000195, |
| "loss": 0.8896, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00019666666666666666, |
| "loss": 0.6943, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00019833333333333335, |
| "loss": 0.7861, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0002, |
| "loss": 0.8211, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00019999996722020187, |
| "loss": 0.8396, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00019999986888082893, |
| "loss": 0.8346, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00019999970498194573, |
| "loss": 0.9672, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00019999947552365961, |
| "loss": 0.7467, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019999918050612108, |
| "loss": 0.8402, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001999988199295235, |
| "loss": 0.8485, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019999839379410332, |
| "loss": 0.8158, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019999790210013988, |
| "loss": 0.8045, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019999734484795555, |
| "loss": 0.7998, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019999672203791565, |
| "loss": 0.7494, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00019999603367042848, |
| "loss": 0.8717, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001999952797459453, |
| "loss": 0.7429, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00019999446026496053, |
| "loss": 0.7994, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00019999357522801123, |
| "loss": 0.7339, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00019999262463567773, |
| "loss": 0.8113, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001999916084885832, |
| "loss": 0.7465, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00019999052678739383, |
| "loss": 0.8394, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001999893795328188, |
| "loss": 0.7644, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00019998816672561022, |
| "loss": 0.6748, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00019998688836656323, |
| "loss": 0.9333, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019998554445651586, |
| "loss": 0.6719, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019998413499634925, |
| "loss": 0.8254, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019998265998698737, |
| "loss": 0.8694, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001999811194293973, |
| "loss": 0.9538, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001999795133245889, |
| "loss": 0.7601, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019997784167361527, |
| "loss": 0.8085, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019997610447757222, |
| "loss": 0.8888, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019997430173759875, |
| "loss": 0.9201, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019997243345487665, |
| "loss": 0.7918, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001999704996306308, |
| "loss": 0.716, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.000199968500266129, |
| "loss": 0.8795, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019996643536268204, |
| "loss": 0.7813, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001999643049216436, |
| "loss": 0.8511, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019996210894441047, |
| "loss": 0.8148, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019995984743242226, |
| "loss": 0.8995, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019995752038716168, |
| "loss": 0.7269, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019995512781015423, |
| "loss": 0.7255, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019995266970296855, |
| "loss": 0.8454, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019995014606721614, |
| "loss": 0.848, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019994755690455152, |
| "loss": 1.0063, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019994490221667205, |
| "loss": 0.8999, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019994218200531822, |
| "loss": 0.8054, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019993939627227335, |
| "loss": 0.8107, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001999365450193638, |
| "loss": 0.8921, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019993362824845875, |
| "loss": 0.8313, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001999306459614705, |
| "loss": 1.2375, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001999275981603542, |
| "loss": 0.8284, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019992448484710797, |
| "loss": 0.7206, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001999213060237729, |
| "loss": 0.7933, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.000199918061692433, |
| "loss": 0.8639, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019991475185521528, |
| "loss": 0.8101, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019991137651428957, |
| "loss": 0.8494, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001999079356718688, |
| "loss": 0.858, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019990442933020877, |
| "loss": 0.7337, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019990085749160822, |
| "loss": 0.8705, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001998972201584088, |
| "loss": 0.8827, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019989351733299513, |
| "loss": 0.7868, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019988974901779483, |
| "loss": 0.7252, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019988591521527833, |
| "loss": 0.9468, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001998820159279591, |
| "loss": 0.9483, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019987805115839345, |
| "loss": 0.734, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019987402090918067, |
| "loss": 0.8512, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019986992518296304, |
| "loss": 0.7809, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019986576398242566, |
| "loss": 0.7592, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019986153731029656, |
| "loss": 0.7912, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019985724516934677, |
| "loss": 0.8442, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001998528875623902, |
| "loss": 0.6838, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001998484644922837, |
| "loss": 0.7564, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019984397596192697, |
| "loss": 0.7553, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001998394219742627, |
| "loss": 0.9651, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019983480253227644, |
| "loss": 0.9166, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019983011763899673, |
| "loss": 0.8302, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001998253672974949, |
| "loss": 0.7953, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001998205515108853, |
| "loss": 0.7255, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019981567028232514, |
| "loss": 0.8917, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001998107236150145, |
| "loss": 0.8844, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001998057115121964, |
| "loss": 0.7281, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019980063397715683, |
| "loss": 0.7675, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001997954910132245, |
| "loss": 0.7229, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019979028262377118, |
| "loss": 0.6383, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019978500881221142, |
| "loss": 0.8268, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019977966958200277, |
| "loss": 0.9039, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019977426493664554, |
| "loss": 0.9156, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001997687948796831, |
| "loss": 0.7017, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019976325941470146, |
| "loss": 0.8159, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019975765854532974, |
| "loss": 0.9077, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019975199227523983, |
| "loss": 0.8053, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019974626060814647, |
| "loss": 0.8769, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019974046354780738, |
| "loss": 0.666, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019973460109802305, |
| "loss": 0.9434, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019972867326263692, |
| "loss": 0.889, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001997226800455352, |
| "loss": 0.7187, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019971662145064706, |
| "loss": 1.0446, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019971049748194447, |
| "loss": 0.7703, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001997043081434423, |
| "loss": 0.8324, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019969805343919821, |
| "loss": 0.776, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001996917333733128, |
| "loss": 1.1579, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019968534794992949, |
| "loss": 0.6975, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001996788971732345, |
| "loss": 0.7621, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019967238104745696, |
| "loss": 0.833, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001996657995768688, |
| "loss": 0.7199, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019965915276578478, |
| "loss": 0.9218, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001996524406185626, |
| "loss": 0.7541, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019964566313960264, |
| "loss": 0.803, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019963882033334826, |
| "loss": 0.7668, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001996319122042855, |
| "loss": 0.9887, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019962493875694335, |
| "loss": 0.7719, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019961789999589356, |
| "loss": 0.8839, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001996107959257507, |
| "loss": 0.8461, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019960362655117218, |
| "loss": 0.8146, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019959639187685824, |
| "loss": 0.6732, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019958909190755187, |
| "loss": 0.8548, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019958172664803889, |
| "loss": 0.9678, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019957429610314797, |
| "loss": 0.9546, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019956680027775051, |
| "loss": 0.8935, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001995592391767608, |
| "loss": 0.8817, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001995516128051358, |
| "loss": 0.7528, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001995439211678754, |
| "loss": 0.9104, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001995361642700221, |
| "loss": 0.8783, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001995283421166614, |
| "loss": 0.8012, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001995204547129214, |
| "loss": 1.0638, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019951250206397313, |
| "loss": 0.8671, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001995044841750302, |
| "loss": 0.8329, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019949640105134918, |
| "loss": 0.9014, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019948825269822934, |
| "loss": 0.7816, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019948003912101273, |
| "loss": 0.8477, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.000199471760325084, |
| "loss": 0.7748, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019946341631587087, |
| "loss": 0.819, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019945500709884353, |
| "loss": 0.8127, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019944653267951504, |
| "loss": 0.7812, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019943799306344125, |
| "loss": 0.899, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019942938825622065, |
| "loss": 0.8734, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001994207182634945, |
| "loss": 0.7949, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001994119830909469, |
| "loss": 0.8557, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019940318274430449, |
| "loss": 0.8166, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001993943172293368, |
| "loss": 0.8648, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019938538655185598, |
| "loss": 0.7316, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019937639071771702, |
| "loss": 0.7482, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019936732973281748, |
| "loss": 0.935, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019935820360309777, |
| "loss": 0.725, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019934901233454092, |
| "loss": 0.9583, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019933975593317262, |
| "loss": 0.8431, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019933043440506146, |
| "loss": 0.8585, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019932104775631846, |
| "loss": 0.816, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019931159599309757, |
| "loss": 0.8116, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019930207912159529, |
| "loss": 0.9234, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.0001992924971480508, |
| "loss": 0.7882, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.0001992828500787461, |
| "loss": 0.7768, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019927313792000569, |
| "loss": 0.8658, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019926336067819684, |
| "loss": 0.7786, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001992535183597295, |
| "loss": 1.1715, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019924361097105623, |
| "loss": 0.8415, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019923363851867228, |
| "loss": 0.8126, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019922360100911552, |
| "loss": 1.0141, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019921349844896654, |
| "loss": 0.7947, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019920333084484857, |
| "loss": 0.7944, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019919309820342737, |
| "loss": 0.7804, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019918280053141143, |
| "loss": 0.7211, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001991724378355519, |
| "loss": 0.7582, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019916201012264254, |
| "loss": 0.812, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019915151739951964, |
| "loss": 0.7592, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019914095967306223, |
| "loss": 0.7329, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019913033695019195, |
| "loss": 0.8257, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019911964923787295, |
| "loss": 1.0124, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019910889654311208, |
| "loss": 0.7226, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001990980788729588, |
| "loss": 1.0185, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019908719623450505, |
| "loss": 0.7683, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001990762486348855, |
| "loss": 0.753, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019906523608127734, |
| "loss": 1.0254, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019905415858090036, |
| "loss": 0.9128, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001990430161410169, |
| "loss": 0.9133, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019903180876893194, |
| "loss": 0.846, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019902053647199294, |
| "loss": 0.7402, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019900919925759002, |
| "loss": 0.7582, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019899779713315575, |
| "loss": 0.6967, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019898633010616542, |
| "loss": 0.9939, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019897479818413666, |
| "loss": 0.7255, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019896320137462983, |
| "loss": 0.9029, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019895153968524766, |
| "loss": 1.0026, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019893981312363562, |
| "loss": 0.8897, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001989280216974815, |
| "loss": 0.8939, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001989161654145158, |
| "loss": 0.8435, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00019890424428251137, |
| "loss": 0.755, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00019889225830928365, |
| "loss": 0.7969, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00019888020750269067, |
| "loss": 0.744, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00019886809187063284, |
| "loss": 0.7558, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00019885591142105316, |
| "loss": 0.9373, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00019884366616193706, |
| "loss": 0.831, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00019883135610131245, |
| "loss": 0.792, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00019881898124724981, |
| "loss": 0.8501, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.000198806541607862, |
| "loss": 0.7597, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001987940371913044, |
| "loss": 0.9629, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001987814680057749, |
| "loss": 1.0733, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00019876883405951377, |
| "loss": 0.9062, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001987561353608038, |
| "loss": 1.0391, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001987433719179702, |
| "loss": 0.8664, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019873054373938058, |
| "loss": 0.8682, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019871765083344508, |
| "loss": 0.956, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019870469320861628, |
| "loss": 0.8336, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019869167087338907, |
| "loss": 0.8174, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001986785838363009, |
| "loss": 0.8553, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019866543210593154, |
| "loss": 0.7485, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019865221569090324, |
| "loss": 0.9945, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019863893459988062, |
| "loss": 0.8085, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019862558884157068, |
| "loss": 0.6068, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001986121784247229, |
| "loss": 0.9127, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019859870335812903, |
| "loss": 0.83, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019858516365062334, |
| "loss": 0.8814, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019857155931108235, |
| "loss": 0.8979, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019855789034842504, |
| "loss": 0.7509, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019854415677161272, |
| "loss": 0.7644, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019853035858964906, |
| "loss": 0.6914, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.0001985164958115801, |
| "loss": 0.7919, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019850256844649423, |
| "loss": 0.7903, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019848857650352214, |
| "loss": 1.0045, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019847451999183694, |
| "loss": 0.8523, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.000198460398920654, |
| "loss": 0.8218, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.000198446213299231, |
| "loss": 0.6326, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019843196313686803, |
| "loss": 0.7977, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019841764844290744, |
| "loss": 0.9348, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019840326922673383, |
| "loss": 1.7291, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019838882549777425, |
| "loss": 0.8635, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019837431726549783, |
| "loss": 0.9324, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001983597445394162, |
| "loss": 2.045, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019834510732908315, |
| "loss": 1.344, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019833040564409476, |
| "loss": 0.6456, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019831563949408945, |
| "loss": 1.0397, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019830080888874778, |
| "loss": 1.5751, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019828591383779266, |
| "loss": 1.0358, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019827095435098925, |
| "loss": 0.9787, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019825593043814492, |
| "loss": 0.8297, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019824084210910925, |
| "loss": 0.8415, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019822568937377414, |
| "loss": 0.9048, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.0001982104722420736, |
| "loss": 1.5912, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.000198195190723984, |
| "loss": 0.9067, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019817984482952376, |
| "loss": 0.8979, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019816443456875364, |
| "loss": 0.8965, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019814895995177653, |
| "loss": 6.3219, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019813342098873752, |
| "loss": 1.2196, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001981178176898239, |
| "loss": 1.8199, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019810215006526517, |
| "loss": 2.4463, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019808641812533285, |
| "loss": 1.2921, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019807062188034085, |
| "loss": 1.7375, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019805476134064507, |
| "loss": 1.1926, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001980388365166436, |
| "loss": 1.374, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00019802284741877673, |
| "loss": 3.0384, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00019800679405752685, |
| "loss": 1.0249, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00019799067644341844, |
| "loss": 1.376, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001979744945870182, |
| "loss": 2.1662, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001979582484989348, |
| "loss": 2.3576, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001979419381898192, |
| "loss": 2.4151, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00019792556367036432, |
| "loss": 1.1511, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019790912495130524, |
| "loss": 1.0831, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019789262204341916, |
| "loss": 0.9942, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019787605495752528, |
| "loss": 1.0966, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001978594237044849, |
| "loss": 1.1319, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001978427282952014, |
| "loss": 1.2988, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019782596874062027, |
| "loss": 1.4181, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019780914505172897, |
| "loss": 2.0592, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019779225723955707, |
| "loss": 1.7524, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001977753053151761, |
| "loss": 1.8455, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019775828928969975, |
| "loss": 1.2825, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019774120917428358, |
| "loss": 1.0853, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001977240649801253, |
| "loss": 1.0713, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019770685671846456, |
| "loss": 1.0005, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019768958440058302, |
| "loss": 1.0666, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019767224803780433, |
| "loss": 0.9201, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019765484764149415, |
| "loss": 0.8887, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001976373832230601, |
| "loss": 0.7805, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001976198547939518, |
| "loss": 1.3601, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001976022623656608, |
| "loss": 0.8677, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019758460594972068, |
| "loss": 1.132, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019756688555770685, |
| "loss": 1.1691, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019754910120123675, |
| "loss": 1.5748, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001975312528919697, |
| "loss": 1.8913, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019751334064160706, |
| "loss": 0.9006, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019749536446189193, |
| "loss": 1.1157, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019747732436460952, |
| "loss": 0.8818, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019745922036158676, |
| "loss": 0.7211, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019744105246469263, |
| "loss": 0.9109, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019742282068583786, |
| "loss": 0.9297, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019740452503697517, |
| "loss": 1.0601, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019738616553009912, |
| "loss": 0.8584, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019736774217724614, |
| "loss": 0.7266, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019734925499049447, |
| "loss": 0.9935, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019733070398196423, |
| "loss": 1.0451, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001973120891638174, |
| "loss": 0.8234, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019729341054825782, |
| "loss": 1.1063, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019727466814753109, |
| "loss": 0.9603, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001972558619739246, |
| "loss": 1.0679, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019723699203976766, |
| "loss": 0.6256, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019721805835743134, |
| "loss": 0.6386, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001971990609393284, |
| "loss": 0.9978, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019717999979791356, |
| "loss": 0.6842, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019716087494568317, |
| "loss": 1.0653, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019714168639517544, |
| "loss": 0.9135, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001971224341589703, |
| "loss": 1.0789, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001971031182496894, |
| "loss": 0.887, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019708373867999624, |
| "loss": 0.8216, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019706429546259593, |
| "loss": 0.7361, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001970447886102354, |
| "loss": 1.0434, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019702521813570322, |
| "loss": 0.7624, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019700558405182976, |
| "loss": 0.7764, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019698588637148703, |
| "loss": 0.8014, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019696612510758876, |
| "loss": 0.8821, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019694630027309034, |
| "loss": 0.6782, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019692641188098886, |
| "loss": 0.8908, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019690645994432305, |
| "loss": 0.6177, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019688644447617335, |
| "loss": 0.8054, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019686636548966178, |
| "loss": 0.7882, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019684622299795211, |
| "loss": 0.7332, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001968260170142496, |
| "loss": 0.8468, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019680574755180126, |
| "loss": 1.1906, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019678541462389562, |
| "loss": 0.8267, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019676501824386294, |
| "loss": 0.8112, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019674455842507492, |
| "loss": 0.8315, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.000196724035180945, |
| "loss": 0.7515, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001967034485249281, |
| "loss": 0.8663, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001966827984705208, |
| "loss": 0.8226, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019666208503126112, |
| "loss": 0.9114, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019664130822072876, |
| "loss": 0.8126, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019662046805254488, |
| "loss": 0.8235, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019659956454037223, |
| "loss": 0.9065, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019657859769791505, |
| "loss": 0.6858, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019655756753891916, |
| "loss": 0.6228, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019653647407717178, |
| "loss": 0.7511, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019651531732650174, |
| "loss": 0.7122, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019649409730077935, |
| "loss": 0.8545, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001964728140139163, |
| "loss": 0.8005, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001964514674798659, |
| "loss": 0.6779, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019643005771262278, |
| "loss": 0.6613, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019640858472622316, |
| "loss": 1.033, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001963870485347446, |
| "loss": 0.8656, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001963654491523062, |
| "loss": 0.9862, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019634378659306832, |
| "loss": 0.7311, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019632206087123296, |
| "loss": 0.814, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019630027200104335, |
| "loss": 0.7113, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001962784199967842, |
| "loss": 0.7396, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019625650487278162, |
| "loss": 0.7611, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019623452664340306, |
| "loss": 0.8698, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019621248532305732, |
| "loss": 0.8915, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019619038092619464, |
| "loss": 0.8448, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019616821346730659, |
| "loss": 0.8052, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.000196145982960926, |
| "loss": 0.7653, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.0001961236894216272, |
| "loss": 0.8072, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019610133286402563, |
| "loss": 0.8389, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019607891330277827, |
| "loss": 0.7022, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019605643075258321, |
| "loss": 0.7545, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019603388522817996, |
| "loss": 0.7835, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019601127674434928, |
| "loss": 0.8487, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001959886053159132, |
| "loss": 0.7551, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019596587095773495, |
| "loss": 0.9614, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019594307368471915, |
| "loss": 0.7785, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019592021351181162, |
| "loss": 0.8786, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019589729045399934, |
| "loss": 0.8092, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001958743045263106, |
| "loss": 0.7172, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019585125574381488, |
| "loss": 0.9828, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001958281441216229, |
| "loss": 0.8684, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019580496967488647, |
| "loss": 0.7016, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019578173241879872, |
| "loss": 0.9614, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019575843236859387, |
| "loss": 1.0601, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001957350695395474, |
| "loss": 0.8885, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019571164394697584, |
| "loss": 0.5665, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001956881556062369, |
| "loss": 0.7965, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019566460453272945, |
| "loss": 0.8825, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001956409907418935, |
| "loss": 0.7037, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019561731424921016, |
| "loss": 0.8967, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019559357507020162, |
| "loss": 0.8995, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019556977322043116, |
| "loss": 1.0905, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001955459087155033, |
| "loss": 0.7101, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019552198157106335, |
| "loss": 0.6981, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019549799180279792, |
| "loss": 1.1289, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019547393942643466, |
| "loss": 0.6762, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019544982445774217, |
| "loss": 0.8395, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.0001954256469125301, |
| "loss": 0.8327, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019540140680664913, |
| "loss": 0.8213, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019537710415599104, |
| "loss": 0.8692, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019535273897648857, |
| "loss": 0.7718, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019532831128411533, |
| "loss": 0.748, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.0001953038210948861, |
| "loss": 0.7272, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.0001952792684248565, |
| "loss": 0.6905, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019525465329012324, |
| "loss": 0.7664, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.0001952299757068238, |
| "loss": 0.8215, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019520523569113677, |
| "loss": 0.8606, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019518043325928157, |
| "loss": 0.995, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019515556842751862, |
| "loss": 0.9492, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019513064121214914, |
| "loss": 0.7856, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019510565162951537, |
| "loss": 0.8124, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019508059969600033, |
| "loss": 0.7319, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019505548542802804, |
| "loss": 0.781, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.0001950303088420632, |
| "loss": 0.7732, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.0001950050699546116, |
| "loss": 0.763, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019497976878221968, |
| "loss": 0.6412, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019495440534147477, |
| "loss": 0.7791, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019492897964900512, |
| "loss": 0.843, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019490349172147963, |
| "loss": 0.8018, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019487794157560813, |
| "loss": 0.8308, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019485232922814117, |
| "loss": 0.7731, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.0001948266546958701, |
| "loss": 0.8159, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019480091799562704, |
| "loss": 1.0027, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.0001947751191442849, |
| "loss": 0.7653, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019474925815875729, |
| "loss": 0.8923, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019472333505599854, |
| "loss": 0.6521, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019469734985300371, |
| "loss": 0.6805, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019467130256680868, |
| "loss": 0.7221, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.0001946451932144899, |
| "loss": 0.7155, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019461902181316454, |
| "loss": 0.7742, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019459278837999046, |
| "loss": 0.6602, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019456649293216622, |
| "loss": 0.7018, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019454013548693102, |
| "loss": 0.8856, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019451371606156464, |
| "loss": 0.6427, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019448723467338763, |
| "loss": 0.8967, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019446069133976102, |
| "loss": 0.6785, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.0001944340860780865, |
| "loss": 0.8484, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019440741890580643, |
| "loss": 0.6799, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019438068984040365, |
| "loss": 0.8792, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019435389889940166, |
| "loss": 0.6023, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019432704610036446, |
| "loss": 0.9402, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.0001943001314608967, |
| "loss": 1.1932, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019427315499864344, |
| "loss": 0.7145, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019424611673129035, |
| "loss": 0.7456, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019421901667656365, |
| "loss": 0.7625, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019419185485223, |
| "loss": 0.641, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019416463127609656, |
| "loss": 0.7488, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019413734596601104, |
| "loss": 0.8391, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019410999893986156, |
| "loss": 0.8041, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019408259021557668, |
| "loss": 0.847, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.0001940551198111255, |
| "loss": 0.6587, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019402758774451754, |
| "loss": 1.034, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019399999403380266, |
| "loss": 0.774, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019397233869707114, |
| "loss": 0.7039, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019394462175245381, |
| "loss": 0.8096, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019391684321812171, |
| "loss": 0.787, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019388900311228638, |
| "loss": 1.0114, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019386110145319963, |
| "loss": 0.74, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.0001938331382591537, |
| "loss": 0.8035, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.0001938051135484812, |
| "loss": 0.8018, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019377702733955495, |
| "loss": 0.7815, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019374887965078815, |
| "loss": 0.8881, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019372067050063438, |
| "loss": 0.7119, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019369239990758738, |
| "loss": 0.8882, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019366406789018126, |
| "loss": 0.824, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019363567446699037, |
| "loss": 1.1157, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019360721965662933, |
| "loss": 0.7087, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.000193578703477753, |
| "loss": 0.8689, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019355012594905646, |
| "loss": 0.6984, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019352148708927508, |
| "loss": 0.7447, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019349278691718427, |
| "loss": 0.7454, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019346402545159983, |
| "loss": 0.8421, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019343520271137763, |
| "loss": 0.8498, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019340631871541376, |
| "loss": 0.755, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019337737348264447, |
| "loss": 0.7672, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.0001933483670320461, |
| "loss": 0.8534, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019331929938263515, |
| "loss": 0.7932, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.0001932901705534683, |
| "loss": 0.8736, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019326098056364222, |
| "loss": 0.6475, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019323172943229387, |
| "loss": 0.8494, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019320241717860005, |
| "loss": 1.0929, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019317304382177782, |
| "loss": 0.7348, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019314360938108425, |
| "loss": 0.8822, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.0001931141138758164, |
| "loss": 0.7978, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019308455732531139, |
| "loss": 0.7134, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019305493974894642, |
| "loss": 0.8921, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019302526116613864, |
| "loss": 0.6289, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019299552159634517, |
| "loss": 0.8326, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019296572105906323, |
| "loss": 0.7289, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019293585957382987, |
| "loss": 0.9605, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019290593716022217, |
| "loss": 0.7592, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019287595383785715, |
| "loss": 0.8413, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019284590962639176, |
| "loss": 0.857, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019281580454552288, |
| "loss": 0.7063, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019278563861498723, |
| "loss": 0.7862, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019275541185456154, |
| "loss": 0.7532, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001927251242840623, |
| "loss": 0.7902, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001926947759233459, |
| "loss": 0.7466, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019266436679230865, |
| "loss": 0.8959, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019263389691088665, |
| "loss": 0.901, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.0001926033662990558, |
| "loss": 0.8807, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019257277497683188, |
| "loss": 0.8533, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019254212296427044, |
| "loss": 0.7948, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.0001925114102814668, |
| "loss": 0.8897, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019248063694855602, |
| "loss": 0.7787, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019244980298571305, |
| "loss": 0.7488, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019241890841315248, |
| "loss": 0.6055, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001923879532511287, |
| "loss": 0.8675, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001923569375199357, |
| "loss": 0.7248, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019232586123990738, |
| "loss": 0.8154, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001922947244314172, |
| "loss": 0.8757, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019226352711487822, |
| "loss": 0.915, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001922322693107434, |
| "loss": 0.8014, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019220095103950516, |
| "loss": 0.7581, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001921695723216957, |
| "loss": 0.7049, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0001921381331778867, |
| "loss": 0.6386, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00019210663362868955, |
| "loss": 0.7832, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0001920750736947553, |
| "loss": 0.7197, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00019204345339677442, |
| "loss": 0.8425, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0001920117727554771, |
| "loss": 0.8899, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00019198003179163306, |
| "loss": 0.7559, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0001919482305260515, |
| "loss": 0.9371, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00019191636897958122, |
| "loss": 0.8289, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00019188444717311055, |
| "loss": 0.8349, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00019185246512756727, |
| "loss": 0.8907, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00019182042286391865, |
| "loss": 0.8293, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00019178832040317155, |
| "loss": 0.778, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0001917561577663721, |
| "loss": 0.818, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0001917239349746061, |
| "loss": 0.8333, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00019169165204899864, |
| "loss": 0.8751, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0001916593090107143, |
| "loss": 0.7381, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.000191626905880957, |
| "loss": 1.0315, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00019159444268097012, |
| "loss": 0.744, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0001915619194320364, |
| "loss": 0.8938, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00019152933615547798, |
| "loss": 1.0249, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00019149669287265626, |
| "loss": 0.9576, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0001914639896049721, |
| "loss": 0.8832, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00019143122637386566, |
| "loss": 0.8184, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0001913984032008163, |
| "loss": 0.808, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.0001913655201073428, |
| "loss": 0.8897, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.0001913325771150032, |
| "loss": 0.7782, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00019129957424539472, |
| "loss": 0.6742, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00019126651152015403, |
| "loss": 0.7387, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.0001912333889609568, |
| "loss": 0.6528, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00019120020658951813, |
| "loss": 0.8312, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00019116696442759222, |
| "loss": 0.7113, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.0001911336624969725, |
| "loss": 0.6102, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019110030081949156, |
| "loss": 0.8695, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019106687941702118, |
| "loss": 0.6868, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.0001910333983114723, |
| "loss": 0.8666, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019099985752479506, |
| "loss": 0.6739, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019096625707897856, |
| "loss": 0.6963, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019093259699605125, |
| "loss": 0.7062, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019089887729808037, |
| "loss": 0.8559, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019086509800717258, |
| "loss": 0.8813, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019083125914547337, |
| "loss": 0.6612, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019079736073516736, |
| "loss": 0.7493, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.0001907634027984782, |
| "loss": 0.7552, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019072938535766865, |
| "loss": 0.8365, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019069530843504031, |
| "loss": 0.8745, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019066117205293392, |
| "loss": 0.882, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.0001906269762337292, |
| "loss": 0.791, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.0001905927209998447, |
| "loss": 0.8376, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019055840637373806, |
| "loss": 0.7178, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019052403237790582, |
| "loss": 0.7847, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.0001904895990348834, |
| "loss": 0.7411, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.0001904551063672452, |
| "loss": 0.7904, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019042055439760444, |
| "loss": 0.8403, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.0001903859431486133, |
| "loss": 0.6887, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019035127264296268, |
| "loss": 1.0925, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019031654290338254, |
| "loss": 0.7751, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00019028175395264147, |
| "loss": 1.0082, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.000190246905813547, |
| "loss": 0.65, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00019021199850894545, |
| "loss": 0.7327, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00019017703206172185, |
| "loss": 0.8718, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.0001901420064948001, |
| "loss": 0.7973, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00019010692183114286, |
| "loss": 0.7636, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.0001900717780937514, |
| "loss": 0.8395, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.0001900365753056659, |
| "loss": 0.817, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.0001900013134899651, |
| "loss": 0.8808, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00018996599266976656, |
| "loss": 0.6495, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00018993061286822643, |
| "loss": 0.9393, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00018989517410853955, |
| "loss": 0.8133, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00018985967641393944, |
| "loss": 0.7114, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.0001898241198076983, |
| "loss": 0.6773, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00018978850431312686, |
| "loss": 0.7994, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00018975282995357446, |
| "loss": 0.7968, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.0001897170967524291, |
| "loss": 0.7133, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00018968130473311732, |
| "loss": 0.9301, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00018964545391910424, |
| "loss": 0.7954, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00018960954433389345, |
| "loss": 0.7357, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.0001895735760010272, |
| "loss": 0.8854, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00018953754894408616, |
| "loss": 0.8041, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.0001895014631866895, |
| "loss": 0.8201, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00018946531875249493, |
| "loss": 0.7836, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00018942911566519857, |
| "loss": 0.7724, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00018939285394853502, |
| "loss": 0.8421, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.0001893565336262773, |
| "loss": 1.0076, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00018932015472223693, |
| "loss": 0.8518, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.0001892837172602637, |
| "loss": 0.7997, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.0001892472212642459, |
| "loss": 0.7941, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00018921066675811016, |
| "loss": 0.8069, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00018917405376582145, |
| "loss": 0.7401, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00018913738231138313, |
| "loss": 0.6356, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.0001891006524188368, |
| "loss": 0.7705, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.0001890638641122625, |
| "loss": 0.6332, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.0001890270174157784, |
| "loss": 0.7877, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00018899011235354115, |
| "loss": 0.7323, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00018895314894974553, |
| "loss": 0.819, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00018891612722862455, |
| "loss": 0.7393, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00018887904721444953, |
| "loss": 0.5993, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00018884190893153, |
| "loss": 0.7651, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00018880471240421365, |
| "loss": 0.6474, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.0001887674576568864, |
| "loss": 0.8727, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00018873014471397224, |
| "loss": 0.7029, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00018869277359993345, |
| "loss": 0.687, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00018865534433927034, |
| "loss": 0.856, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00018861785695652142, |
| "loss": 0.8123, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00018858031147626325, |
| "loss": 0.8782, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00018854270792311047, |
| "loss": 0.8027, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0001885050463217159, |
| "loss": 0.8702, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00018846732669677025, |
| "loss": 0.9548, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00018842954907300236, |
| "loss": 0.8639, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00018839171347517913, |
| "loss": 0.77, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0001883538199281054, |
| "loss": 0.828, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00018831586845662404, |
| "loss": 0.778, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00018827785908561584, |
| "loss": 0.7944, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00018823979183999964, |
| "loss": 0.9976, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00018820166674473216, |
| "loss": 0.7956, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00018816348382480805, |
| "loss": 0.7295, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0001881252431052599, |
| "loss": 0.8993, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00018808694461115813, |
| "loss": 0.7765, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00018804858836761107, |
| "loss": 0.7637, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.000188010174399765, |
| "loss": 0.9371, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00018797170273280388, |
| "loss": 0.7909, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00018793317339194963, |
| "loss": 0.8053, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.0001878945864024619, |
| "loss": 0.6514, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.0001878559417896382, |
| "loss": 0.8744, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00018781723957881372, |
| "loss": 0.7529, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00018777847979536155, |
| "loss": 0.6732, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00018773966246469237, |
| "loss": 0.8312, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00018770078761225475, |
| "loss": 0.7741, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.0001876618552635348, |
| "loss": 0.8016, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00018762286544405643, |
| "loss": 0.7435, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00018758381817938127, |
| "loss": 0.7518, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.0001875447134951085, |
| "loss": 0.8366, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.000187505551416875, |
| "loss": 0.8691, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00018746633197035527, |
| "loss": 0.7876, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.0001874270551812614, |
| "loss": 0.7981, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00018738772107534314, |
| "loss": 0.7551, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00018734832967838775, |
| "loss": 1.1028, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018730888101622006, |
| "loss": 0.9075, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018726937511470246, |
| "loss": 0.8651, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018722981199973489, |
| "loss": 0.885, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018719019169725472, |
| "loss": 0.8346, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018715051423323685, |
| "loss": 0.8147, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018711077963369375, |
| "loss": 0.8039, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018707098792467515, |
| "loss": 0.8944, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00018703113913226847, |
| "loss": 0.8617, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00018699123328259828, |
| "loss": 0.7269, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00018695127040182675, |
| "loss": 0.907, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00018691125051615342, |
| "loss": 0.7895, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00018687117365181512, |
| "loss": 0.9355, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.0001868310398350861, |
| "loss": 0.8549, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.0001867908490922779, |
| "loss": 0.7331, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00018675060144973944, |
| "loss": 0.7482, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.0001867102969338569, |
| "loss": 0.6274, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00018666993557105377, |
| "loss": 0.7205, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00018662951738779076, |
| "loss": 0.6207, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00018658904241056594, |
| "loss": 0.8382, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00018654851066591448, |
| "loss": 0.902, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.0001865079221804088, |
| "loss": 0.8208, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00018646727698065865, |
| "loss": 0.868, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00018642657509331076, |
| "loss": 0.8533, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.0001863858165450492, |
| "loss": 0.7038, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00018634500136259502, |
| "loss": 0.8563, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.0001863041295727066, |
| "loss": 0.8869, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00018626320120217923, |
| "loss": 0.7725, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.0001862222162778454, |
| "loss": 0.7933, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.0001861811748265747, |
| "loss": 0.6886, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00018614007687527373, |
| "loss": 0.6821, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.0001860989224508861, |
| "loss": 0.7628, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00018605771158039253, |
| "loss": 0.7537, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.0001860164442908107, |
| "loss": 0.6721, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00018597512060919522, |
| "loss": 0.7666, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.0001859337405626378, |
| "loss": 0.7936, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00018589230417826697, |
| "loss": 0.7869, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00018585081148324832, |
| "loss": 0.9065, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00018580926250478426, |
| "loss": 0.9016, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.0001857676572701141, |
| "loss": 0.8175, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00018572599580651415, |
| "loss": 0.8704, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.0001856842781412974, |
| "loss": 0.8327, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00018564250430181387, |
| "loss": 0.7853, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00018560067431545022, |
| "loss": 0.6945, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00018555878820963013, |
| "loss": 0.7118, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.0001855168460118139, |
| "loss": 0.6509, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00018547484774949867, |
| "loss": 0.9692, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00018543279345021834, |
| "loss": 0.7238, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00018539068314154354, |
| "loss": 0.945, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.0001853485168510816, |
| "loss": 0.767, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00018530629460647657, |
| "loss": 0.7866, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00018526401643540922, |
| "loss": 0.94, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00018522168236559695, |
| "loss": 0.7814, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00018517929242479374, |
| "loss": 0.8348, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00018513684664079035, |
| "loss": 0.827, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00018509434504141397, |
| "loss": 0.7679, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00018505178765452853, |
| "loss": 0.7376, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0001850091745080345, |
| "loss": 0.836, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00018496650562986887, |
| "loss": 0.7615, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00018492378104800517, |
| "loss": 0.6513, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00018488100079045344, |
| "loss": 0.7145, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0001848381648852603, |
| "loss": 0.8169, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00018479527336050878, |
| "loss": 0.6889, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0001847523262443184, |
| "loss": 0.6449, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00018470932356484508, |
| "loss": 0.7576, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018466626535028121, |
| "loss": 0.7149, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018462315162885563, |
| "loss": 0.7683, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018457998242883344, |
| "loss": 0.8299, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018453675777851627, |
| "loss": 0.8349, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018449347770624198, |
| "loss": 0.7851, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018445014224038485, |
| "loss": 0.5875, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.0001844067514093554, |
| "loss": 0.7516, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018436330524160047, |
| "loss": 0.6599, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018431980376560324, |
| "loss": 0.7259, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018427624700988307, |
| "loss": 0.8296, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.0001842326350029956, |
| "loss": 0.7817, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.0001841889677735327, |
| "loss": 0.7002, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018414524535012244, |
| "loss": 0.8322, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.000184101467761429, |
| "loss": 0.6261, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018405763503615284, |
| "loss": 0.7882, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018401374720303056, |
| "loss": 0.7369, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018396980429083475, |
| "loss": 0.582, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018392580632837423, |
| "loss": 0.7864, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018388175334449396, |
| "loss": 0.8018, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018383764536807485, |
| "loss": 0.8805, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018379348242803385, |
| "loss": 0.7097, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.0001837492645533241, |
| "loss": 0.8331, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018370499177293464, |
| "loss": 0.9276, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.0001836606641158905, |
| "loss": 0.7617, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.0001836162816112527, |
| "loss": 0.7274, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018357184428811828, |
| "loss": 0.6609, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018352735217562013, |
| "loss": 0.7582, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018348280530292713, |
| "loss": 0.719, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.000183438203699244, |
| "loss": 0.8751, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.0001833935473938114, |
| "loss": 0.739, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018334883641590575, |
| "loss": 0.76, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018330407079483952, |
| "loss": 0.7853, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018325925055996076, |
| "loss": 0.8082, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018321437574065347, |
| "loss": 0.7367, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018316944636633743, |
| "loss": 0.7813, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.0001831244624664681, |
| "loss": 0.8694, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018307942407053674, |
| "loss": 0.8614, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.0001830343312080704, |
| "loss": 0.7012, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018298918390863175, |
| "loss": 0.9371, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018294398220181917, |
| "loss": 0.9336, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018289872611726673, |
| "loss": 0.7229, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018285341568464414, |
| "loss": 0.7194, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018280805093365672, |
| "loss": 0.5526, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.0001827626318940454, |
| "loss": 0.7177, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018271715859558677, |
| "loss": 0.7777, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018267163106809288, |
| "loss": 0.6419, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018262604934141146, |
| "loss": 0.8032, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018258041344542566, |
| "loss": 0.7169, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.0001825347234100542, |
| "loss": 1.02, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.0001824889792652513, |
| "loss": 0.8615, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018244318104100658, |
| "loss": 0.8184, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018239732876734527, |
| "loss": 0.834, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018235142247432782, |
| "loss": 0.7535, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018230546219205032, |
| "loss": 0.8242, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018225944795064404, |
| "loss": 0.7736, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018221337978027583, |
| "loss": 0.811, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00018216725771114773, |
| "loss": 0.81, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.0001821210817734972, |
| "loss": 0.7197, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.000182074851997597, |
| "loss": 0.9024, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00018202856841375518, |
| "loss": 0.6468, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.0001819822310523151, |
| "loss": 0.7431, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.0001819358399436553, |
| "loss": 0.6912, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00018188939511818965, |
| "loss": 0.7594, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00018184289660636715, |
| "loss": 0.6277, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00018179634443867207, |
| "loss": 0.7571, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.0001817497386456238, |
| "loss": 0.8256, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.0001817030792577769, |
| "loss": 0.7068, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.0001816563663057211, |
| "loss": 0.6752, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00018160959982008122, |
| "loss": 0.8982, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.0001815627798315172, |
| "loss": 0.8652, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00018151590637072396, |
| "loss": 0.6783, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00018146897946843163, |
| "loss": 0.6434, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00018142199915540527, |
| "loss": 0.7475, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00018137496546244498, |
| "loss": 0.7745, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.0001813278784203859, |
| "loss": 0.8309, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.000181280738060098, |
| "loss": 0.6819, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.0001812335444124864, |
| "loss": 0.6933, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00018118629750849105, |
| "loss": 0.5936, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.0001811389973790868, |
| "loss": 0.7178, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.0001810916440552835, |
| "loss": 0.8156, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.0001810442375681257, |
| "loss": 0.9001, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00018099677794869296, |
| "loss": 0.815, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.0001809492652280996, |
| "loss": 0.7136, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00018090169943749476, |
| "loss": 0.7121, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.0001808540806080624, |
| "loss": 0.9789, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.0001808064087710212, |
| "loss": 0.6437, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00018075868395762466, |
| "loss": 0.8143, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00018071090619916093, |
| "loss": 0.7523, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018066307552695298, |
| "loss": 0.8618, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018061519197235836, |
| "loss": 0.7881, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018056725556676932, |
| "loss": 0.5721, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018051926634161282, |
| "loss": 0.8736, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018047122432835038, |
| "loss": 0.7147, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018042312955847818, |
| "loss": 0.723, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.0001803749820635269, |
| "loss": 0.5867, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018032678187506187, |
| "loss": 0.8015, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00018027852902468294, |
| "loss": 0.7489, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0001802302235440245, |
| "loss": 0.6513, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0001801818654647554, |
| "loss": 0.7774, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00018013345481857903, |
| "loss": 0.7811, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0001800849916372332, |
| "loss": 0.7134, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00018003647595249013, |
| "loss": 0.7153, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0001799879077961566, |
| "loss": 0.7692, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0001799392872000736, |
| "loss": 0.7273, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00017989061419611667, |
| "loss": 0.6219, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00017984188881619564, |
| "loss": 0.7013, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.0001797931110922546, |
| "loss": 0.7175, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00017974428105627208, |
| "loss": 0.8797, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00017969539874026087, |
| "loss": 0.7152, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00017964646417626797, |
| "loss": 0.9644, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00017959747739637472, |
| "loss": 0.8494, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00017954843843269664, |
| "loss": 0.7647, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00017949934731738347, |
| "loss": 0.6678, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00017945020408261916, |
| "loss": 0.6749, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00017940100876062182, |
| "loss": 0.655, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.0001793517613836437, |
| "loss": 0.7376, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00017930246198397115, |
| "loss": 0.7938, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.0001792531105939247, |
| "loss": 0.7795, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00017920370724585895, |
| "loss": 0.7104, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00017915425197216245, |
| "loss": 0.6705, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00017910474480525794, |
| "loss": 0.7389, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00017905518577760208, |
| "loss": 0.8265, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.0001790055749216856, |
| "loss": 0.8465, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00017895591227003315, |
| "loss": 0.6147, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00017890619785520338, |
| "loss": 0.8657, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.0001788564317097889, |
| "loss": 0.7079, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00017880661386641614, |
| "loss": 0.9004, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00017875674435774547, |
| "loss": 0.8611, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00017870682321647114, |
| "loss": 0.8298, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.0001786568504753213, |
| "loss": 0.6698, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00017860682616705785, |
| "loss": 0.85, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00017855675032447648, |
| "loss": 0.7686, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00017850662298040678, |
| "loss": 0.5775, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00017845644416771198, |
| "loss": 0.7421, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00017840621391928917, |
| "loss": 1.0828, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00017835593226806903, |
| "loss": 0.6465, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00017830559924701604, |
| "loss": 0.8314, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.0001782552148891283, |
| "loss": 0.7396, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00017820477922743762, |
| "loss": 0.9174, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00017815429229500946, |
| "loss": 0.7693, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00017810375412494276, |
| "loss": 0.7952, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00017805316475037018, |
| "loss": 0.7138, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00017800252420445788, |
| "loss": 0.6924, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00017795183252040567, |
| "loss": 0.6545, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00017790108973144678, |
| "loss": 1.0184, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00017785029587084794, |
| "loss": 0.7687, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00017779945097190942, |
| "loss": 0.6105, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00017774855506796496, |
| "loss": 0.83, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00017769760819238165, |
| "loss": 0.7243, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0001776466103785601, |
| "loss": 0.8047, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00017759556165993428, |
| "loss": 0.8478, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0001775444620699715, |
| "loss": 0.757, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.0001774933116421725, |
| "loss": 0.6599, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00017744211041007118, |
| "loss": 0.842, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00017739085840723498, |
| "loss": 0.7881, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.0001773395556672644, |
| "loss": 0.7875, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00017728820222379338, |
| "loss": 0.6974, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00017723679811048904, |
| "loss": 0.7585, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00017718534336105165, |
| "loss": 0.8348, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00017713383800921478, |
| "loss": 0.6044, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.0001770822820887451, |
| "loss": 0.7534, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.0001770306756334425, |
| "loss": 0.7952, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00017697901867713995, |
| "loss": 0.8525, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00017692731125370354, |
| "loss": 0.7961, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00017687555339703244, |
| "loss": 1.0254, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00017682374514105888, |
| "loss": 0.7888, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00017677188651974817, |
| "loss": 0.8766, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00017671997756709863, |
| "loss": 0.7805, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.0001766680183171415, |
| "loss": 0.5599, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.0001766160088039411, |
| "loss": 0.7275, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00017656394906159464, |
| "loss": 0.6505, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00017651183912423228, |
| "loss": 0.8124, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.0001764596790260171, |
| "loss": 0.8291, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00017640746880114505, |
| "loss": 0.7428, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.0001763552084838449, |
| "loss": 0.6476, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00017630289810837834, |
| "loss": 0.7687, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0001762505377090398, |
| "loss": 0.8618, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00017619812732015664, |
| "loss": 0.7814, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0001761456669760888, |
| "loss": 0.7989, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0001760931567112291, |
| "loss": 0.8835, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0001760405965600031, |
| "loss": 0.8057, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.000175987986556869, |
| "loss": 0.7426, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00017593532673631766, |
| "loss": 0.6891, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00017588261713287267, |
| "loss": 0.7632, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00017582985778109026, |
| "loss": 0.7792, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.0001757770487155592, |
| "loss": 0.674, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.000175724189970901, |
| "loss": 0.6418, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00017567128158176953, |
| "loss": 0.7169, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00017561832358285138, |
| "loss": 0.7671, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00017556531600886554, |
| "loss": 0.7621, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00017551225889456365, |
| "loss": 0.7583, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00017545915227472965, |
| "loss": 0.8833, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00017540599618418007, |
| "loss": 0.725, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.0001753527906577638, |
| "loss": 0.7384, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00017529953573036224, |
| "loss": 1.037, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00017524623143688902, |
| "loss": 0.6814, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.0001751928778122903, |
| "loss": 0.7488, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00017513947489154443, |
| "loss": 0.8158, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00017508602270966216, |
| "loss": 0.6863, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00017503252130168657, |
| "loss": 0.6875, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.0001749789707026929, |
| "loss": 0.8221, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.0001749253709477888, |
| "loss": 0.9427, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00017487172207211396, |
| "loss": 0.7597, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00017481802411084042, |
| "loss": 0.7965, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.0001747642770991723, |
| "loss": 0.6991, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00017471048107234598, |
| "loss": 0.7611, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00017465663606562988, |
| "loss": 0.7399, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.0001746027421143246, |
| "loss": 0.6661, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.0001745487992537628, |
| "loss": 0.8229, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00017449480751930912, |
| "loss": 0.7213, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00017444076694636041, |
| "loss": 0.73, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00017438667757034546, |
| "loss": 0.9427, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00017433253942672496, |
| "loss": 0.6311, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00017427835255099172, |
| "loss": 0.8465, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00017422411697867046, |
| "loss": 0.9116, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00017416983274531775, |
| "loss": 0.8761, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00017411549988652212, |
| "loss": 0.7848, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.000174061118437904, |
| "loss": 0.7138, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.0001740066884351156, |
| "loss": 0.6889, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.0001739522099138411, |
| "loss": 0.7285, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.0001738976829097963, |
| "loss": 0.7797, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00017384310745872895, |
| "loss": 0.6561, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00017378848359641847, |
| "loss": 0.9094, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00017373381135867604, |
| "loss": 0.7241, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00017367909078134453, |
| "loss": 0.7219, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00017362432190029862, |
| "loss": 0.8374, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.0001735695047514445, |
| "loss": 0.8104, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00017351463937072004, |
| "loss": 0.9037, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00017345972579409488, |
| "loss": 0.7776, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00017340476405756998, |
| "loss": 0.7263, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00017334975419717815, |
| "loss": 0.6951, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.0001732946962489836, |
| "loss": 0.8177, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00017323959024908209, |
| "loss": 0.7882, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.0001731844362336009, |
| "loss": 0.7204, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.0001731292342386988, |
| "loss": 0.68, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00017307398430056593, |
| "loss": 0.8076, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00017301868645542401, |
| "loss": 0.8671, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00017296334073952605, |
| "loss": 0.8185, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00017290794718915643, |
| "loss": 0.6169, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.000172852505840631, |
| "loss": 0.8338, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00017279701673029687, |
| "loss": 0.7491, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00017274147989453247, |
| "loss": 0.859, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.0001726858953697475, |
| "loss": 0.8591, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00017263026319238301, |
| "loss": 0.7975, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00017257458339891118, |
| "loss": 0.8403, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00017251885602583545, |
| "loss": 0.6687, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00017246308110969053, |
| "loss": 0.7616, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00017240725868704218, |
| "loss": 0.6852, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00017235138879448733, |
| "loss": 0.8182, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.0001722954714686541, |
| "loss": 1.0798, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00017223950674620164, |
| "loss": 0.7648, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00017218349466382023, |
| "loss": 0.6719, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00017212743525823112, |
| "loss": 0.6476, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00017207132856618667, |
| "loss": 0.7717, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00017201517462447013, |
| "loss": 0.8554, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.0001719589734698959, |
| "loss": 0.8765, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00017190272513930915, |
| "loss": 0.7007, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.0001718464296695861, |
| "loss": 0.8068, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.0001717900870976338, |
| "loss": 0.67, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00017173369746039025, |
| "loss": 0.7765, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00017167726079482426, |
| "loss": 0.8964, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00017162077713793545, |
| "loss": 0.7219, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.0001715642465267543, |
| "loss": 0.7028, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00017150766899834204, |
| "loss": 0.7724, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00017145104458979074, |
| "loss": 0.7078, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.000171394373338223, |
| "loss": 0.8496, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00017133765528079239, |
| "loss": 0.9062, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00017128089045468294, |
| "loss": 0.7347, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.0001712240788971095, |
| "loss": 0.8078, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00017116722064531748, |
| "loss": 0.7295, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00017111031573658294, |
| "loss": 0.8403, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00017105336420821247, |
| "loss": 0.6432, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00017099636609754329, |
| "loss": 0.644, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.0001709393214419431, |
| "loss": 0.847, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00017088223027881023, |
| "loss": 0.7044, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.0001708250926455733, |
| "loss": 0.8025, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00017076790857969163, |
| "loss": 0.7007, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00017071067811865476, |
| "loss": 0.6679, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.0001706534012999828, |
| "loss": 0.7731, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00017059607816122618, |
| "loss": 0.753, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017053870873996572, |
| "loss": 0.7222, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017048129307381266, |
| "loss": 0.7462, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017042383120040834, |
| "loss": 0.6969, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017036632315742462, |
| "loss": 0.9344, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017030876898256354, |
| "loss": 0.7562, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017025116871355735, |
| "loss": 0.8233, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.0001701935223881686, |
| "loss": 0.6335, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017013583004418993, |
| "loss": 0.736, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00017007809171944423, |
| "loss": 0.7096, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00017002030745178455, |
| "loss": 0.7125, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00016996247727909397, |
| "loss": 0.6483, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00016990460123928575, |
| "loss": 0.8764, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00016984667937030318, |
| "loss": 0.8556, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.0001697887117101196, |
| "loss": 0.6606, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00016973069829673837, |
| "loss": 0.8142, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00016967263916819287, |
| "loss": 0.7089, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.0001696145343625464, |
| "loss": 0.7784, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00016955638391789228, |
| "loss": 0.8951, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00016949818787235366, |
| "loss": 0.792, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00016943994626408363, |
| "loss": 0.8853, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.0001693816591312652, |
| "loss": 0.9822, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00016932332651211116, |
| "loss": 0.746, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00016926494844486412, |
| "loss": 0.7391, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.0001692065249677965, |
| "loss": 0.9108, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016914805611921056, |
| "loss": 0.7977, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016908954193743816, |
| "loss": 1.0081, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016903098246084098, |
| "loss": 0.6629, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016897237772781044, |
| "loss": 0.6262, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016891372777676748, |
| "loss": 0.7589, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016885503264616283, |
| "loss": 0.7699, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016879629237447676, |
| "loss": 0.7152, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00016873750700021915, |
| "loss": 0.8177, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00016867867656192946, |
| "loss": 0.6904, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.0001686198010981767, |
| "loss": 0.6232, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00016856088064755938, |
| "loss": 0.6911, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00016850191524870546, |
| "loss": 0.6231, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.0001684429049402725, |
| "loss": 0.7527, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00016838384976094738, |
| "loss": 0.8166, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00016832474974944642, |
| "loss": 0.8718, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00016826560494451537, |
| "loss": 0.8681, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00016820641538492934, |
| "loss": 0.6555, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00016814718110949275, |
| "loss": 0.8215, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00016808790215703935, |
| "loss": 0.6504, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00016802857856643215, |
| "loss": 0.7562, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.0001679692103765635, |
| "loss": 0.6297, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00016790979762635496, |
| "loss": 0.7602, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00016785034035475725, |
| "loss": 0.7967, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00016779083860075033, |
| "loss": 0.8625, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0001677312924033433, |
| "loss": 0.7811, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00016767170180157444, |
| "loss": 0.749, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00016761206683451108, |
| "loss": 0.806, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00016755238754124965, |
| "loss": 0.8395, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0001674926639609157, |
| "loss": 0.6652, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0001674328961326637, |
| "loss": 0.8078, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00016737308409567727, |
| "loss": 0.7759, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00016731322788916892, |
| "loss": 0.8967, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.0001672533275523801, |
| "loss": 0.7249, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00016719338312458124, |
| "loss": 0.6671, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00016713339464507172, |
| "loss": 0.7989, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00016707336215317968, |
| "loss": 0.702, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.0001670132856882622, |
| "loss": 0.6556, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00016695316528970517, |
| "loss": 0.7699, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00016689300099692332, |
| "loss": 0.8325, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00016683279284936004, |
| "loss": 0.7143, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016677254088648757, |
| "loss": 0.7666, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016671224514780693, |
| "loss": 0.7173, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016665190567284764, |
| "loss": 0.6302, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016659152250116812, |
| "loss": 0.7839, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016653109567235527, |
| "loss": 0.6993, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016647062522602473, |
| "loss": 0.6919, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016641011120182065, |
| "loss": 0.7547, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00016634955363941574, |
| "loss": 0.7296, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00016628895257851135, |
| "loss": 0.8771, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.0001662283080588373, |
| "loss": 0.6203, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.0001661676201201518, |
| "loss": 0.8908, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00016610688880224178, |
| "loss": 0.716, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.0001660461141449223, |
| "loss": 0.7355, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.000165985296188037, |
| "loss": 0.747, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00016592443497145793, |
| "loss": 0.7416, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.0001658635305350855, |
| "loss": 0.729, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.0001658025829188483, |
| "loss": 0.7984, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.0001657415921627034, |
| "loss": 0.7819, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.0001656805583066361, |
| "loss": 0.8155, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00016561948139065996, |
| "loss": 0.8535, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00016555836145481674, |
| "loss": 0.7974, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.0001654971985391764, |
| "loss": 0.7551, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00016543599268383714, |
| "loss": 0.7544, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00016537474392892528, |
| "loss": 0.6385, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.0001653134523145952, |
| "loss": 0.826, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00016525211788102946, |
| "loss": 0.9611, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.0001651907406684387, |
| "loss": 0.718, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00016512932071706152, |
| "loss": 0.8885, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00016506785806716465, |
| "loss": 0.7014, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00016500635275904272, |
| "loss": 0.7309, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00016494480483301836, |
| "loss": 0.7353, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.0001648832143294422, |
| "loss": 0.9406, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00016482158128869258, |
| "loss": 1.0239, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00016475990575117605, |
| "loss": 0.7183, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00016469818775732668, |
| "loss": 0.7745, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.0001646364273476067, |
| "loss": 0.6641, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00016457462456250584, |
| "loss": 0.7575, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00016451277944254185, |
| "loss": 0.8114, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.0001644508920282601, |
| "loss": 0.7699, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00016438896236023375, |
| "loss": 0.7707, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00016432699047906363, |
| "loss": 0.737, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00016426497642537825, |
| "loss": 0.8471, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00016420292023983378, |
| "loss": 0.9261, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.000164140821963114, |
| "loss": 0.7724, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00016407868163593027, |
| "loss": 0.7028, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.0001640164992990216, |
| "loss": 0.7371, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00016395427499315442, |
| "loss": 0.6352, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00016389200875912278, |
| "loss": 0.7697, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.0001638297006377481, |
| "loss": 0.873, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.0001637673506698794, |
| "loss": 0.7766, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00016370495889639304, |
| "loss": 0.7073, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00016364252535819282, |
| "loss": 0.7323, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00016358005009620992, |
| "loss": 0.786, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00016351753315140287, |
| "loss": 0.7615, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.0001634549745647575, |
| "loss": 0.8211, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.000163392374377287, |
| "loss": 0.745, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016332973263003176, |
| "loss": 0.6718, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016326704936405953, |
| "loss": 0.9161, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016320432462046516, |
| "loss": 0.8297, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016314155844037074, |
| "loss": 0.7377, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016307875086492548, |
| "loss": 0.5759, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016301590193530584, |
| "loss": 0.795, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016295301169271531, |
| "loss": 0.7401, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00016289008017838445, |
| "loss": 0.8508, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00016282710743357096, |
| "loss": 0.6911, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00016276409349955944, |
| "loss": 0.8146, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00016270103841766167, |
| "loss": 0.6548, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.0001626379422292162, |
| "loss": 0.807, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00016257480497558873, |
| "loss": 0.7765, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.0001625116266981717, |
| "loss": 0.79, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.0001624484074383846, |
| "loss": 0.7595, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00016238514723767374, |
| "loss": 0.6465, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00016232184613751218, |
| "loss": 0.7716, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.0001622585041793999, |
| "loss": 0.9568, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00016219512140486363, |
| "loss": 0.7801, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.0001621316978554569, |
| "loss": 0.7859, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00016206823357275984, |
| "loss": 0.7454, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00016200472859837945, |
| "loss": 1.1258, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00016194118297394936, |
| "loss": 0.89, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00016187759674112973, |
| "loss": 0.8703, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00016181396994160752, |
| "loss": 0.8565, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00016175030261709615, |
| "loss": 0.8997, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.0001616865948093357, |
| "loss": 0.7286, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00016162284656009274, |
| "loss": 0.8531, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00016155905791116038, |
| "loss": 0.9182, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00016149522890435814, |
| "loss": 0.7161, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.0001614313595815321, |
| "loss": 0.7212, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00016136744998455476, |
| "loss": 0.8635, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016130350015532496, |
| "loss": 0.8798, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016123951013576794, |
| "loss": 0.7718, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016117547996783533, |
| "loss": 0.7027, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016111140969350503, |
| "loss": 0.7919, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016104729935478124, |
| "loss": 0.6695, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016098314899369446, |
| "loss": 0.7681, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016091895865230141, |
| "loss": 0.7496, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00016085472837268502, |
| "loss": 0.7029, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00016079045819695437, |
| "loss": 0.8447, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00016072614816724478, |
| "loss": 0.832, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.0001606617983257176, |
| "loss": 0.6667, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00016059740871456036, |
| "loss": 0.7167, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.0001605329793759866, |
| "loss": 0.7535, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00016046851035223593, |
| "loss": 0.8269, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00016040400168557405, |
| "loss": 0.8114, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00016033945341829248, |
| "loss": 0.7501, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00016027486559270886, |
| "loss": 0.7137, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00016021023825116672, |
| "loss": 0.7913, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00016014557143603545, |
| "loss": 0.7648, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00016008086518971037, |
| "loss": 0.7205, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00016001611955461265, |
| "loss": 0.9152, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.0001599513345731892, |
| "loss": 0.6588, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00015988651028791287, |
| "loss": 0.7564, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.0001598216467412822, |
| "loss": 0.7539, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.0001597567439758214, |
| "loss": 0.8606, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.0001596918020340805, |
| "loss": 0.6577, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00015962682095863523, |
| "loss": 0.8534, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00015956180079208682, |
| "loss": 0.8389, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00015949674157706228, |
| "loss": 0.6407, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.0001594316433562142, |
| "loss": 0.7298, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00015936650617222063, |
| "loss": 0.764, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.0001593013300677853, |
| "loss": 0.8274, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.0001592361150856374, |
| "loss": 0.6794, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.0001591708612685316, |
| "loss": 0.756, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00015910556865924802, |
| "loss": 0.9038, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00015904023730059228, |
| "loss": 0.7789, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00015897486723539533, |
| "loss": 0.7292, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00015890945850651346, |
| "loss": 0.9091, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00015884401115682847, |
| "loss": 0.818, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00015877852522924732, |
| "loss": 0.7368, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00015871300076670234, |
| "loss": 0.759, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.0001586474378121511, |
| "loss": 0.76, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.0001585818364085764, |
| "loss": 0.9963, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00015851619659898623, |
| "loss": 0.8492, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00015845051842641383, |
| "loss": 0.7923, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00015838480193391754, |
| "loss": 0.8058, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.0001583190471645808, |
| "loss": 0.7064, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00015825325416151222, |
| "loss": 0.6573, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00015818742296784535, |
| "loss": 0.7897, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00015812155362673896, |
| "loss": 0.7828, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.0001580556461813766, |
| "loss": 0.668, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.000157989700674967, |
| "loss": 0.7288, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00015792371715074376, |
| "loss": 0.7361, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.0001578576956519654, |
| "loss": 0.7025, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00015779163622191538, |
| "loss": 0.7684, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00015772553890390197, |
| "loss": 0.6602, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0001576594037412583, |
| "loss": 0.9337, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0001575932307773423, |
| "loss": 0.779, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0001575270200555367, |
| "loss": 0.8531, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.00015746077161924905, |
| "loss": 0.7833, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0001573944855119115, |
| "loss": 0.6894, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.00015732816177698098, |
| "loss": 0.8026, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.000157261800457939, |
| "loss": 0.8205, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.00015719540159829184, |
| "loss": 0.6721, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.0001571289652415703, |
| "loss": 0.753, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00015706249143132982, |
| "loss": 0.7912, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00015699598021115028, |
| "loss": 0.5783, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00015692943162463628, |
| "loss": 0.7996, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00015686284571541671, |
| "loss": 0.6549, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00015679622252714507, |
| "loss": 0.751, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00015672956210349923, |
| "loss": 0.7803, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.0001566628644881815, |
| "loss": 0.8471, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00015659612972491858, |
| "loss": 0.772, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.0001565293578574615, |
| "loss": 0.8832, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00015646254892958566, |
| "loss": 0.8665, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00015639570298509064, |
| "loss": 0.8066, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00015632882006780046, |
| "loss": 0.9439, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00015626190022156327, |
| "loss": 0.9167, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.0001561949434902514, |
| "loss": 0.8207, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00015612794991776147, |
| "loss": 0.9199, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.0001560609195480142, |
| "loss": 0.747, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00015599385242495438, |
| "loss": 0.7274, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00015592674859255096, |
| "loss": 0.8337, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00015585960809479696, |
| "loss": 0.7462, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.0001557924309757094, |
| "loss": 0.7635, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00015572521727932935, |
| "loss": 0.9531, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00015565796704972184, |
| "loss": 0.7138, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00015559068033097582, |
| "loss": 0.7285, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.0001555233571672042, |
| "loss": 0.8221, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00015545599760254382, |
| "loss": 0.6298, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00015538860168115527, |
| "loss": 0.807, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00015532116944722308, |
| "loss": 0.6649, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00015525370094495556, |
| "loss": 0.7202, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00015518619621858476, |
| "loss": 0.6853, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.0001551186553123665, |
| "loss": 0.7626, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00015505107827058036, |
| "loss": 0.6392, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.00015498346513752957, |
| "loss": 0.7727, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.000154915815957541, |
| "loss": 0.802, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0001548481307749652, |
| "loss": 0.7913, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0001547804096341763, |
| "loss": 0.7507, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.00015471265257957202, |
| "loss": 0.749, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0001546448596555736, |
| "loss": 0.8569, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.00015457703090662576, |
| "loss": 0.8356, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.00015450916637719684, |
| "loss": 0.7783, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00015444126611177848, |
| "loss": 0.9263, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00015437333015488587, |
| "loss": 0.7359, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00015430535855105753, |
| "loss": 1.0603, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00015423735134485536, |
| "loss": 0.8113, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.0001541693085808646, |
| "loss": 0.742, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00015410123030369386, |
| "loss": 0.9023, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00015403311655797492, |
| "loss": 0.685, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00015396496738836292, |
| "loss": 0.8507, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00015389678283953616, |
| "loss": 0.7255, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0001538285629561962, |
| "loss": 0.7862, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00015376030778306766, |
| "loss": 0.8411, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0001536920173648984, |
| "loss": 0.8774, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0001536236917464593, |
| "loss": 0.7173, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00015355533097254436, |
| "loss": 0.7336, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0001534869350879707, |
| "loss": 0.891, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0001534185041375783, |
| "loss": 0.6952, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00015335003816623028, |
| "loss": 0.7578, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.0001532815372188126, |
| "loss": 0.8295, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00015321300134023424, |
| "loss": 0.7273, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00015314443057542703, |
| "loss": 0.7864, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.0001530758249693457, |
| "loss": 0.7327, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00015300718456696778, |
| "loss": 0.7818, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.0001529385094132937, |
| "loss": 0.713, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00015286979955334652, |
| "loss": 0.7514, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00015280105503217225, |
| "loss": 0.6877, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00015273227589483946, |
| "loss": 0.7906, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00015266346218643947, |
| "loss": 0.8441, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00015259461395208628, |
| "loss": 0.8263, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00015252573123691654, |
| "loss": 0.7415, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00015245681408608945, |
| "loss": 0.7761, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00015238786254478682, |
| "loss": 0.9678, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.000152318876658213, |
| "loss": 0.8058, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.0001522498564715949, |
| "loss": 0.7609, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00015218080203018182, |
| "loss": 0.7379, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.0001521117133792456, |
| "loss": 0.858, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00015204259056408046, |
| "loss": 0.8597, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00015197343363000307, |
| "loss": 0.698, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.0001519042426223524, |
| "loss": 0.7371, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00015183501758648987, |
| "loss": 0.8358, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00015176575856779904, |
| "loss": 0.7387, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.0001516964656116859, |
| "loss": 0.7181, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00015162713876357858, |
| "loss": 0.7505, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00015155777806892754, |
| "loss": 1.019, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00015148838357320537, |
| "loss": 0.7116, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00015141895532190677, |
| "loss": 0.7876, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00015134949336054865, |
| "loss": 0.6581, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00015127999773467002, |
| "loss": 0.7123, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.0001512104684898319, |
| "loss": 0.6713, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00015114090567161734, |
| "loss": 0.8461, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.0001510713093256315, |
| "loss": 0.9173, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.0001510016794975015, |
| "loss": 0.6969, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00015093201623287631, |
| "loss": 0.7124, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.0001508623195774269, |
| "loss": 0.6165, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.0001507925895768461, |
| "loss": 0.8286, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00015072282627684867, |
| "loss": 0.7192, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00015065302972317108, |
| "loss": 0.9173, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00015058319996157172, |
| "loss": 0.6801, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00015051333703783068, |
| "loss": 0.6879, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.0001504434409977498, |
| "loss": 0.598, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00015037351188715265, |
| "loss": 0.7261, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.0001503035497518845, |
| "loss": 0.8939, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.0001502335546378122, |
| "loss": 0.7161, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00015016352659082428, |
| "loss": 0.7742, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00015009346565683087, |
| "loss": 0.7509, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.0001500233718817636, |
| "loss": 0.7152, |
| "step": 1413 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.0001499532453115757, |
| "loss": 0.8825, |
| "step": 1414 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00014988308599224183, |
| "loss": 0.6963, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00014981289396975817, |
| "loss": 0.8952, |
| "step": 1416 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00014974266929014234, |
| "loss": 0.7149, |
| "step": 1417 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00014967241199943332, |
| "loss": 0.6696, |
| "step": 1418 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00014960212214369155, |
| "loss": 0.5835, |
| "step": 1419 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00014953179976899878, |
| "loss": 0.7761, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.000149461444921458, |
| "loss": 0.8417, |
| "step": 1421 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00014939105764719368, |
| "loss": 0.778, |
| "step": 1422 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00014932063799235135, |
| "loss": 0.6825, |
| "step": 1423 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00014925018600309785, |
| "loss": 0.7067, |
| "step": 1424 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.0001491797017256212, |
| "loss": 0.7655, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00014910918520613073, |
| "loss": 0.8152, |
| "step": 1426 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00014903863649085668, |
| "loss": 0.7448, |
| "step": 1427 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.0001489680556260505, |
| "loss": 0.7509, |
| "step": 1428 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00014889744265798478, |
| "loss": 0.7651, |
| "step": 1429 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00014882679763295306, |
| "loss": 0.5907, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00014875612059726993, |
| "loss": 0.7993, |
| "step": 1431 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00014868541159727096, |
| "loss": 0.8609, |
| "step": 1432 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00014861467067931272, |
| "loss": 0.8653, |
| "step": 1433 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00014854389788977266, |
| "loss": 0.7315, |
| "step": 1434 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.0001484730932750491, |
| "loss": 0.714, |
| "step": 1435 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.0001484022568815613, |
| "loss": 0.7004, |
| "step": 1436 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.0001483313887557493, |
| "loss": 0.7298, |
| "step": 1437 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00014826048894407397, |
| "loss": 0.9142, |
| "step": 1438 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.0001481895574930169, |
| "loss": 0.6334, |
| "step": 1439 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00014811859444908052, |
| "loss": 0.8139, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00014804759985878786, |
| "loss": 0.6127, |
| "step": 1441 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00014797657376868273, |
| "loss": 0.9046, |
| "step": 1442 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.0001479055162253295, |
| "loss": 0.7271, |
| "step": 1443 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00014783442727531328, |
| "loss": 0.8972, |
| "step": 1444 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00014776330696523963, |
| "loss": 0.846, |
| "step": 1445 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00014769215534173475, |
| "loss": 0.9177, |
| "step": 1446 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.0001476209724514454, |
| "loss": 0.8201, |
| "step": 1447 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00014754975834103877, |
| "loss": 0.6874, |
| "step": 1448 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00014747851305720256, |
| "loss": 0.7929, |
| "step": 1449 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00014740723664664483, |
| "loss": 0.8737, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00014733592915609414, |
| "loss": 0.9325, |
| "step": 1451 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00014726459063229945, |
| "loss": 0.7913, |
| "step": 1452 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00014719322112202992, |
| "loss": 0.6657, |
| "step": 1453 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00014712182067207517, |
| "loss": 0.7492, |
| "step": 1454 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00014705038932924503, |
| "loss": 0.8277, |
| "step": 1455 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00014697892714036958, |
| "loss": 0.776, |
| "step": 1456 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00014690743415229918, |
| "loss": 0.683, |
| "step": 1457 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.0001468359104119043, |
| "loss": 0.7246, |
| "step": 1458 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.0001467643559660757, |
| "loss": 0.7263, |
| "step": 1459 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00014669277086172406, |
| "loss": 0.7353, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00014662115514578038, |
| "loss": 0.7482, |
| "step": 1461 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00014654950886519564, |
| "loss": 0.6343, |
| "step": 1462 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.0001464778320669408, |
| "loss": 0.7966, |
| "step": 1463 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00014640612479800686, |
| "loss": 0.8045, |
| "step": 1464 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00014633438710540489, |
| "loss": 0.8755, |
| "step": 1465 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00014626261903616578, |
| "loss": 0.8442, |
| "step": 1466 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.0001461908206373404, |
| "loss": 0.8017, |
| "step": 1467 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00014611899195599953, |
| "loss": 0.6298, |
| "step": 1468 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0001460471330392337, |
| "loss": 0.6288, |
| "step": 1469 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.00014597524393415335, |
| "loss": 0.922, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.00014590332468788868, |
| "loss": 0.9224, |
| "step": 1471 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.00014583137534758967, |
| "loss": 0.7782, |
| "step": 1472 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.00014575939596042604, |
| "loss": 0.6592, |
| "step": 1473 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.00014568738657358714, |
| "loss": 0.7492, |
| "step": 1474 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.00014561534723428205, |
| "loss": 0.8096, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0001455432779897395, |
| "loss": 0.677, |
| "step": 1476 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014547117888720777, |
| "loss": 0.8121, |
| "step": 1477 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014539904997395468, |
| "loss": 0.9615, |
| "step": 1478 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014532689129726777, |
| "loss": 0.7023, |
| "step": 1479 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014525470290445392, |
| "loss": 0.7383, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014518248484283948, |
| "loss": 0.5984, |
| "step": 1481 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014511023715977047, |
| "loss": 0.7832, |
| "step": 1482 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014503795990261205, |
| "loss": 0.6561, |
| "step": 1483 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00014496565311874902, |
| "loss": 0.7158, |
| "step": 1484 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00014489331685558525, |
| "loss": 0.723, |
| "step": 1485 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00014482095116054422, |
| "loss": 0.7918, |
| "step": 1486 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00014474855608106858, |
| "loss": 0.7664, |
| "step": 1487 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00014467613166462023, |
| "loss": 0.9447, |
| "step": 1488 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00014460367795868034, |
| "loss": 0.9244, |
| "step": 1489 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00014453119501074924, |
| "loss": 0.8869, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00014445868286834648, |
| "loss": 0.9234, |
| "step": 1491 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.0001443861415790107, |
| "loss": 0.7654, |
| "step": 1492 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.0001443135711902997, |
| "loss": 0.7178, |
| "step": 1493 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00014424097174979038, |
| "loss": 0.6083, |
| "step": 1494 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00014416834330507856, |
| "loss": 0.8265, |
| "step": 1495 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00014409568590377918, |
| "loss": 0.7071, |
| "step": 1496 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00014402299959352614, |
| "loss": 0.873, |
| "step": 1497 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.0001439502844219723, |
| "loss": 0.7955, |
| "step": 1498 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00014387754043678947, |
| "loss": 0.8353, |
| "step": 1499 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00014380476768566824, |
| "loss": 1.0436, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00014373196621631816, |
| "loss": 0.974, |
| "step": 1501 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00014365913607646761, |
| "loss": 0.8572, |
| "step": 1502 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.0001435862773138637, |
| "loss": 0.6742, |
| "step": 1503 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00014351338997627234, |
| "loss": 0.7399, |
| "step": 1504 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00014344047411147818, |
| "loss": 0.7241, |
| "step": 1505 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.0001433675297672846, |
| "loss": 0.6266, |
| "step": 1506 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00014329455699151354, |
| "loss": 0.769, |
| "step": 1507 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00014322155583200576, |
| "loss": 0.7495, |
| "step": 1508 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00014314852633662045, |
| "loss": 0.8028, |
| "step": 1509 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00014307546855323549, |
| "loss": 0.7671, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00014300238252974723, |
| "loss": 0.7795, |
| "step": 1511 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00014292926831407061, |
| "loss": 0.6647, |
| "step": 1512 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.000142856125954139, |
| "loss": 0.6447, |
| "step": 1513 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.0001427829554979042, |
| "loss": 0.797, |
| "step": 1514 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00014270975699333654, |
| "loss": 0.7916, |
| "step": 1515 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.0001426365304884246, |
| "loss": 0.8226, |
| "step": 1516 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.0001425632760311754, |
| "loss": 0.823, |
| "step": 1517 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.0001424899936696143, |
| "loss": 0.7964, |
| "step": 1518 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00014241668345178486, |
| "loss": 0.7626, |
| "step": 1519 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00014234334542574906, |
| "loss": 0.6746, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00014226997963958686, |
| "loss": 0.8372, |
| "step": 1521 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00014219658614139674, |
| "loss": 0.7628, |
| "step": 1522 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00014212316497929506, |
| "loss": 0.7627, |
| "step": 1523 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00014204971620141647, |
| "loss": 1.0511, |
| "step": 1524 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00014197623985591373, |
| "loss": 0.6545, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00014190273599095762, |
| "loss": 0.6587, |
| "step": 1526 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00014182920465473692, |
| "loss": 0.7417, |
| "step": 1527 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00014175564589545854, |
| "loss": 0.7774, |
| "step": 1528 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.0001416820597613473, |
| "loss": 0.7665, |
| "step": 1529 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00014160844630064595, |
| "loss": 0.9658, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.0001415348055616152, |
| "loss": 0.7743, |
| "step": 1531 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00014146113759253362, |
| "loss": 0.6876, |
| "step": 1532 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00014138744244169762, |
| "loss": 0.7549, |
| "step": 1533 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00014131372015742142, |
| "loss": 0.6643, |
| "step": 1534 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00014123997078803707, |
| "loss": 0.7086, |
| "step": 1535 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.0001411661943818944, |
| "loss": 0.7412, |
| "step": 1536 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.0001410923909873609, |
| "loss": 0.9003, |
| "step": 1537 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00014101856065282172, |
| "loss": 0.7405, |
| "step": 1538 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.0001409447034266798, |
| "loss": 0.7827, |
| "step": 1539 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00014087081935735564, |
| "loss": 0.8676, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0001407969084932873, |
| "loss": 0.7087, |
| "step": 1541 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00014072297088293042, |
| "loss": 0.869, |
| "step": 1542 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0001406490065747583, |
| "loss": 0.7207, |
| "step": 1543 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00014057501561726157, |
| "loss": 0.7011, |
| "step": 1544 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00014050099805894837, |
| "loss": 0.9816, |
| "step": 1545 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00014042695394834436, |
| "loss": 0.9845, |
| "step": 1546 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00014035288333399257, |
| "loss": 0.735, |
| "step": 1547 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0001402787862644534, |
| "loss": 0.8069, |
| "step": 1548 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00014020466278830452, |
| "loss": 0.7518, |
| "step": 1549 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00014013051295414108, |
| "loss": 0.7873, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00014005633681057536, |
| "loss": 0.616, |
| "step": 1551 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.0001399821344062369, |
| "loss": 0.7763, |
| "step": 1552 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00013990790578977257, |
| "loss": 0.8089, |
| "step": 1553 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00013983365100984633, |
| "loss": 0.7346, |
| "step": 1554 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00013975937011513932, |
| "loss": 0.7517, |
| "step": 1555 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00013968506315434974, |
| "loss": 0.7635, |
| "step": 1556 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.000139610730176193, |
| "loss": 0.827, |
| "step": 1557 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00013953637122940147, |
| "loss": 0.7624, |
| "step": 1558 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00013946198636272458, |
| "loss": 0.7902, |
| "step": 1559 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00013938757562492873, |
| "loss": 0.7368, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00013931313906479732, |
| "loss": 0.7421, |
| "step": 1561 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00013923867673113066, |
| "loss": 0.8186, |
| "step": 1562 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00013916418867274596, |
| "loss": 0.7311, |
| "step": 1563 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.0001390896749384773, |
| "loss": 0.7166, |
| "step": 1564 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00013901513557717553, |
| "loss": 0.7409, |
| "step": 1565 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.0001389405706377084, |
| "loss": 0.7116, |
| "step": 1566 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00013886598016896038, |
| "loss": 0.7832, |
| "step": 1567 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00013879136421983266, |
| "loss": 1.0161, |
| "step": 1568 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00013871672283924318, |
| "loss": 0.8046, |
| "step": 1569 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00013864205607612648, |
| "loss": 0.731, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00013856736397943386, |
| "loss": 0.7836, |
| "step": 1571 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00013849264659813312, |
| "loss": 0.8645, |
| "step": 1572 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00013841790398120868, |
| "loss": 1.0, |
| "step": 1573 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00013834313617766146, |
| "loss": 0.6616, |
| "step": 1574 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.000138268343236509, |
| "loss": 0.6907, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.0001381935252067852, |
| "loss": 0.866, |
| "step": 1576 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00013811868213754042, |
| "loss": 0.8047, |
| "step": 1577 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.0001380438140778416, |
| "loss": 0.7615, |
| "step": 1578 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00013796892107677182, |
| "loss": 0.826, |
| "step": 1579 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00013789400318343068, |
| "loss": 0.6562, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00013781906044693404, |
| "loss": 0.8871, |
| "step": 1581 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00013774409291641407, |
| "loss": 0.7366, |
| "step": 1582 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00013766910064101913, |
| "loss": 0.8775, |
| "step": 1583 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.0001375940836699139, |
| "loss": 0.6714, |
| "step": 1584 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.0001375190420522792, |
| "loss": 0.7455, |
| "step": 1585 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00013744397583731203, |
| "loss": 0.8168, |
| "step": 1586 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00013736888507422546, |
| "loss": 0.8646, |
| "step": 1587 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.0001372937698122487, |
| "loss": 0.7896, |
| "step": 1588 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.000137218630100627, |
| "loss": 0.7025, |
| "step": 1589 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00013714346598862166, |
| "loss": 0.86, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00013706827752550996, |
| "loss": 0.8737, |
| "step": 1591 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.0001369930647605852, |
| "loss": 0.8184, |
| "step": 1592 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00013691782774315653, |
| "loss": 0.8627, |
| "step": 1593 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00013684256652254906, |
| "loss": 0.7043, |
| "step": 1594 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00013676728114810367, |
| "loss": 0.8508, |
| "step": 1595 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00013669197166917723, |
| "loss": 0.6009, |
| "step": 1596 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.0001366166381351423, |
| "loss": 0.7551, |
| "step": 1597 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.0001365412805953872, |
| "loss": 0.8174, |
| "step": 1598 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00013646589909931608, |
| "loss": 0.8675, |
| "step": 1599 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00013639049369634876, |
| "loss": 0.6946, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.0001363150644359207, |
| "loss": 0.6405, |
| "step": 1601 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00013623961136748295, |
| "loss": 0.7259, |
| "step": 1602 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00013616413454050233, |
| "loss": 0.7548, |
| "step": 1603 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00013608863400446113, |
| "loss": 0.7419, |
| "step": 1604 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00013601310980885714, |
| "loss": 0.7651, |
| "step": 1605 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00013593756200320372, |
| "loss": 0.8546, |
| "step": 1606 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.0001358619906370298, |
| "loss": 0.7113, |
| "step": 1607 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00013578639575987958, |
| "loss": 0.7299, |
| "step": 1608 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.0001357107774213128, |
| "loss": 0.7393, |
| "step": 1609 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.0001356351356709045, |
| "loss": 0.756, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00013555947055824511, |
| "loss": 0.73, |
| "step": 1611 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.0001354837821329404, |
| "loss": 0.8241, |
| "step": 1612 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00013540807044461135, |
| "loss": 0.701, |
| "step": 1613 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00013533233554289433, |
| "loss": 0.7865, |
| "step": 1614 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00013525657747744072, |
| "loss": 0.6917, |
| "step": 1615 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00013518079629791724, |
| "loss": 0.709, |
| "step": 1616 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.0001351049920540058, |
| "loss": 0.7352, |
| "step": 1617 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00013502916479540326, |
| "loss": 0.8024, |
| "step": 1618 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.0001349533145718217, |
| "loss": 0.7791, |
| "step": 1619 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00013487744143298822, |
| "loss": 0.8598, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00013480154542864497, |
| "loss": 0.711, |
| "step": 1621 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00013472562660854902, |
| "loss": 0.7014, |
| "step": 1622 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00013464968502247245, |
| "loss": 0.8542, |
| "step": 1623 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.0001345737207202023, |
| "loss": 0.8301, |
| "step": 1624 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.0001344977337515404, |
| "loss": 0.8141, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00013442172416630355, |
| "loss": 0.7161, |
| "step": 1626 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00013434569201432325, |
| "loss": 0.6533, |
| "step": 1627 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.000134269637345446, |
| "loss": 0.732, |
| "step": 1628 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00013419356020953282, |
| "loss": 0.7168, |
| "step": 1629 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.0001341174606564596, |
| "loss": 0.6825, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00013404133873611696, |
| "loss": 0.7046, |
| "step": 1631 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00013396519449841005, |
| "loss": 0.8678, |
| "step": 1632 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00013388902799325874, |
| "loss": 0.9132, |
| "step": 1633 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00013381283927059752, |
| "loss": 0.7131, |
| "step": 1634 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00013373662838037537, |
| "loss": 0.7573, |
| "step": 1635 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.0001336603953725559, |
| "loss": 0.8548, |
| "step": 1636 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00013358414029711706, |
| "loss": 0.792, |
| "step": 1637 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00013350786320405144, |
| "loss": 0.7557, |
| "step": 1638 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.000133431564143366, |
| "loss": 0.7326, |
| "step": 1639 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00013335524316508208, |
| "loss": 0.776, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.0001332789003192354, |
| "loss": 0.833, |
| "step": 1641 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.000133202535655876, |
| "loss": 0.7576, |
| "step": 1642 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.0001331261492250683, |
| "loss": 0.764, |
| "step": 1643 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00013304974107689087, |
| "loss": 0.6934, |
| "step": 1644 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00013297331126143667, |
| "loss": 0.6613, |
| "step": 1645 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.0001328968598288127, |
| "loss": 0.8234, |
| "step": 1646 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00013282038682914025, |
| "loss": 0.6504, |
| "step": 1647 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00013274389231255466, |
| "loss": 0.7804, |
| "step": 1648 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.0001326673763292055, |
| "loss": 0.7235, |
| "step": 1649 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00013259083892925633, |
| "loss": 0.6231, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00013251428016288467, |
| "loss": 0.7425, |
| "step": 1651 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00013243770008028224, |
| "loss": 0.6285, |
| "step": 1652 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.0001323610987316546, |
| "loss": 0.6435, |
| "step": 1653 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00013228447616722128, |
| "loss": 0.656, |
| "step": 1654 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00013220783243721572, |
| "loss": 0.7094, |
| "step": 1655 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00013213116759188523, |
| "loss": 0.5815, |
| "step": 1656 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00013205448168149102, |
| "loss": 0.5334, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.000131977774756308, |
| "loss": 0.7491, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00013190104686662497, |
| "loss": 0.7694, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.0001318242980627444, |
| "loss": 0.8098, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.0001317475283949825, |
| "loss": 0.7439, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00013167073791366915, |
| "loss": 0.697, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00013159392666914788, |
| "loss": 0.7043, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00013151709471177588, |
| "loss": 0.604, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.0001314402420919238, |
| "loss": 0.9331, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.0001313633688599759, |
| "loss": 0.8426, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00013128647506633004, |
| "loss": 0.8403, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00013120956076139746, |
| "loss": 0.6865, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.0001311326259956028, |
| "loss": 0.807, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00013105567081938424, |
| "loss": 0.7211, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.0001309786952831933, |
| "loss": 0.6761, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00013090169943749476, |
| "loss": 0.7429, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00013082468333276682, |
| "loss": 0.6979, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00013074764701950095, |
| "loss": 0.551, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00013067059054820183, |
| "loss": 0.5818, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.0001305935139693874, |
| "loss": 0.6953, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00013051641733358868, |
| "loss": 0.8221, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00013043930069134998, |
| "loss": 0.6278, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00013036216409322864, |
| "loss": 0.941, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00013028500758979506, |
| "loss": 0.7997, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00013020783123163284, |
| "loss": 0.885, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00013013063506933837, |
| "loss": 0.8119, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00013005341915352123, |
| "loss": 0.8996, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00012997618353480377, |
| "loss": 0.8067, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00012989892826382145, |
| "loss": 0.6805, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00012982165339122246, |
| "loss": 0.7776, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.0001297443589676679, |
| "loss": 0.5873, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00012966704504383168, |
| "loss": 0.8729, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00012958971167040053, |
| "loss": 0.8561, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00012951235889807386, |
| "loss": 0.9101, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00012943498677756382, |
| "loss": 0.7356, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00012935759535959528, |
| "loss": 0.78, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00012928018469490578, |
| "loss": 0.8559, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012920275483424538, |
| "loss": 0.6114, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012912530582837682, |
| "loss": 0.6431, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012904783772807533, |
| "loss": 0.7935, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012897035058412874, |
| "loss": 0.8003, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012889284444733722, |
| "loss": 0.7724, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012881531936851355, |
| "loss": 0.8919, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012873777539848283, |
| "loss": 0.7477, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00012866021258808256, |
| "loss": 0.7821, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00012858263098816265, |
| "loss": 0.7828, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00012850503064958528, |
| "loss": 0.8093, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00012842741162322487, |
| "loss": 0.9681, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00012834977395996818, |
| "loss": 0.768, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.0001282721177107141, |
| "loss": 1.0626, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00012819444292637384, |
| "loss": 0.8518, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00012811674965787056, |
| "loss": 0.7713, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.0001280390379561398, |
| "loss": 0.6365, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.0001279613078721289, |
| "loss": 0.7617, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00012788355945679748, |
| "loss": 0.8057, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00012780579276111702, |
| "loss": 0.9079, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00012772800783607111, |
| "loss": 0.7535, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00012765020473265519, |
| "loss": 0.7586, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.0001275723835018767, |
| "loss": 1.0038, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00012749454419475487, |
| "loss": 0.7273, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.0001274166868623209, |
| "loss": 0.7104, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0001273388115556177, |
| "loss": 0.822, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0001272609183257, |
| "loss": 0.7358, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0001271830072236343, |
| "loss": 0.8069, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0001271050783004988, |
| "loss": 0.6804, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00012702713160738345, |
| "loss": 0.8252, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0001269491671953897, |
| "loss": 0.7545, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00012687118511563075, |
| "loss": 1.0889, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0001267931854192313, |
| "loss": 0.7931, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00012671516815732767, |
| "loss": 0.8303, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00012663713338106764, |
| "loss": 0.882, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.0001265590811416105, |
| "loss": 0.7592, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00012648101149012703, |
| "loss": 0.6661, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.0001264029244777993, |
| "loss": 0.7085, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.0001263248201558209, |
| "loss": 0.7846, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.0001262466985753967, |
| "loss": 0.7736, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00012616855978774281, |
| "loss": 0.8949, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00012609040384408684, |
| "loss": 0.8278, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00012601223079566743, |
| "loss": 0.7773, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.0001259340406937345, |
| "loss": 0.9449, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00012585583358954921, |
| "loss": 0.8258, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00012577760953438383, |
| "loss": 0.6397, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00012569936857952167, |
| "loss": 0.7894, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00012562111077625722, |
| "loss": 0.7743, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00012554283617589602, |
| "loss": 0.7292, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00012546454482975454, |
| "loss": 0.7304, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00012538623678916026, |
| "loss": 0.6272, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00012530791210545162, |
| "loss": 0.5835, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.000125229570829978, |
| "loss": 0.6849, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.0001251512130140996, |
| "loss": 0.7808, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00012507283870918747, |
| "loss": 0.6427, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00012499444796662353, |
| "loss": 0.7767, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.0001249160408378004, |
| "loss": 0.7942, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.0001248376173741215, |
| "loss": 0.7944, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00012475917762700092, |
| "loss": 0.7133, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.0001246807216478634, |
| "loss": 0.6308, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.0001246022494881445, |
| "loss": 0.7438, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00012452376119929007, |
| "loss": 0.7291, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00012444525683275688, |
| "loss": 0.7187, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00012436673644001197, |
| "loss": 0.7083, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00012428820007253302, |
| "loss": 0.7486, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00012420964778180814, |
| "loss": 0.6836, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00012413107961933595, |
| "loss": 0.8365, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00012405249563662537, |
| "loss": 0.7618, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00012397389588519574, |
| "loss": 0.8009, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.0001238952804165768, |
| "loss": 0.6492, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00012381664928230846, |
| "loss": 0.8065, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00012373800253394102, |
| "loss": 0.7293, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00012365934022303491, |
| "loss": 0.6439, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00012358066240116092, |
| "loss": 0.7535, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00012350196911989984, |
| "loss": 0.8839, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00012342326043084266, |
| "loss": 0.6529, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00012334453638559057, |
| "loss": 0.8266, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00012326579703575462, |
| "loss": 0.7534, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.0001231870424329561, |
| "loss": 0.7628, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00012310827262882615, |
| "loss": 0.8637, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00012302948767500596, |
| "loss": 0.7958, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.0001229506876231466, |
| "loss": 0.5572, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00012287187252490913, |
| "loss": 0.8441, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00012279304243196436, |
| "loss": 0.805, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.000122714197395993, |
| "loss": 0.6483, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00012263533746868552, |
| "loss": 0.7742, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00012255646270174217, |
| "loss": 0.6571, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00012247757314687297, |
| "loss": 0.7705, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00012239866885579753, |
| "loss": 0.8438, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.0001223197498802452, |
| "loss": 0.5883, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.000122240816271955, |
| "loss": 0.7757, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00012216186808267546, |
| "loss": 0.9212, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00012208290536416463, |
| "loss": 0.7701, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00012200392816819022, |
| "loss": 0.5605, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00012192493654652933, |
| "loss": 0.7841, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00012184593055096854, |
| "loss": 0.7709, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.0001217669102333039, |
| "loss": 0.7195, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00012168787564534078, |
| "loss": 0.584, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00012160882683889396, |
| "loss": 0.6112, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.0001215297638657875, |
| "loss": 0.8635, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00012145068677785478, |
| "loss": 0.9767, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00012137159562693838, |
| "loss": 0.6971, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.0001212924904648902, |
| "loss": 0.8747, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.0001212133713435712, |
| "loss": 0.7379, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.0001211342383148516, |
| "loss": 0.7468, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00012105509143061071, |
| "loss": 0.7411, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00012097593074273687, |
| "loss": 0.702, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00012089675630312754, |
| "loss": 0.7952, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.0001208175681636891, |
| "loss": 0.6818, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00012073836637633705, |
| "loss": 0.8502, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00012065915099299569, |
| "loss": 0.7757, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00012057992206559837, |
| "loss": 0.6375, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00012050067964608724, |
| "loss": 0.8021, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.0001204214237864133, |
| "loss": 0.7282, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00012034215453853637, |
| "loss": 0.6956, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00012026287195442503, |
| "loss": 0.9006, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00012018357608605666, |
| "loss": 0.7684, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00012010426698541728, |
| "loss": 0.659, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00012002494470450163, |
| "loss": 0.8728, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00011994560929531309, |
| "loss": 0.6111, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00011986626080986362, |
| "loss": 0.8388, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00011978689930017379, |
| "loss": 0.7989, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.0001197075248182726, |
| "loss": 0.7575, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00011962813741619777, |
| "loss": 0.7172, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00011954873714599526, |
| "loss": 0.6372, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.0001194693240597196, |
| "loss": 0.837, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00011938989820943369, |
| "loss": 0.6056, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00011931045964720881, |
| "loss": 0.7064, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00011923100842512451, |
| "loss": 0.7395, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00011915154459526875, |
| "loss": 0.6654, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00011907206820973769, |
| "loss": 0.7689, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.0001189925793206357, |
| "loss": 0.6055, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00011891307798007536, |
| "loss": 0.8137, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00011883356424017748, |
| "loss": 0.7405, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00011875403815307091, |
| "loss": 0.779, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00011867449977089265, |
| "loss": 0.9176, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00011859494914578773, |
| "loss": 0.6756, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011851538632990921, |
| "loss": 0.7424, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011843581137541819, |
| "loss": 0.7219, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011835622433448361, |
| "loss": 0.8267, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011827662525928252, |
| "loss": 0.8077, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011819701420199969, |
| "loss": 0.6856, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011811739121482777, |
| "loss": 0.8236, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011803775634996734, |
| "loss": 0.7006, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00011795810965962669, |
| "loss": 0.8708, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00011787845119602183, |
| "loss": 0.8589, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00011779878101137653, |
| "loss": 0.7354, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.0001177190991579223, |
| "loss": 0.6936, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00011763940568789817, |
| "loss": 0.7386, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00011755970065355086, |
| "loss": 0.8982, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00011747998410713475, |
| "loss": 0.8543, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00011740025610091159, |
| "loss": 0.7502, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00011732051668715081, |
| "loss": 0.6577, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00011724076591812918, |
| "loss": 0.6973, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00011716100384613104, |
| "loss": 0.8598, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00011708123052344804, |
| "loss": 0.7492, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.0001170014460023793, |
| "loss": 0.7403, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00011692165033523117, |
| "loss": 0.8144, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.0001168418435743174, |
| "loss": 0.6341, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00011676202577195901, |
| "loss": 0.8301, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00011668219698048419, |
| "loss": 0.821, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00011660235725222835, |
| "loss": 0.8346, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00011652250663953415, |
| "loss": 0.6766, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.0001164426451947513, |
| "loss": 0.6047, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00011636277297023661, |
| "loss": 0.6111, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00011628289001835404, |
| "loss": 0.6734, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00011620299639147453, |
| "loss": 0.7648, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00011612309214197599, |
| "loss": 0.853, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00011604317732224333, |
| "loss": 0.7228, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.0001159632519846684, |
| "loss": 0.806, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00011588331618164991, |
| "loss": 0.786, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00011580336996559343, |
| "loss": 0.6549, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00011572341338891144, |
| "loss": 0.661, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.0001156434465040231, |
| "loss": 0.7965, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00011556346936335438, |
| "loss": 0.6803, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00011548348201933798, |
| "loss": 0.7869, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00011540348452441327, |
| "loss": 0.8671, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011532347693102632, |
| "loss": 0.7158, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011524345929162975, |
| "loss": 0.5902, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011516343165868279, |
| "loss": 0.7299, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011508339408465127, |
| "loss": 0.7172, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011500334662200749, |
| "loss": 0.7513, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011492328932323022, |
| "loss": 0.641, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011484322224080472, |
| "loss": 0.8385, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00011476314542722266, |
| "loss": 0.8149, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011468305893498203, |
| "loss": 0.7774, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011460296281658727, |
| "loss": 0.7731, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011452285712454904, |
| "loss": 0.8845, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011444274191138431, |
| "loss": 0.635, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011436261722961628, |
| "loss": 0.9447, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011428248313177439, |
| "loss": 0.616, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011420233967039422, |
| "loss": 0.6779, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00011412218689801748, |
| "loss": 0.7719, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00011404202486719205, |
| "loss": 0.7335, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.0001139618536304718, |
| "loss": 0.75, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00011388167324041669, |
| "loss": 0.7051, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.0001138014837495926, |
| "loss": 0.8995, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00011372128521057155, |
| "loss": 0.7561, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00011364107767593126, |
| "loss": 0.8329, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00011356086119825553, |
| "loss": 0.8739, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00011348063583013398, |
| "loss": 0.716, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011340040162416197, |
| "loss": 0.8757, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011332015863294076, |
| "loss": 0.8925, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011323990690907733, |
| "loss": 0.7387, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011315964650518438, |
| "loss": 0.7511, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011307937747388034, |
| "loss": 0.7827, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011299909986778918, |
| "loss": 0.6888, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011291881373954065, |
| "loss": 0.9315, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00011283851914177, |
| "loss": 0.7057, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00011275821612711803, |
| "loss": 0.899, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00011267790474823105, |
| "loss": 0.7977, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00011259758505776092, |
| "loss": 0.8183, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00011251725710836489, |
| "loss": 0.7827, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00011243692095270564, |
| "loss": 0.6608, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.0001123565766434512, |
| "loss": 0.6641, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00011227622423327502, |
| "loss": 0.648, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00011219586377485578, |
| "loss": 0.8332, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00011211549532087749, |
| "loss": 0.9994, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00011203511892402938, |
| "loss": 0.6389, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.0001119547346370059, |
| "loss": 0.7792, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00011187434251250666, |
| "loss": 0.6804, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.0001117939426032364, |
| "loss": 0.7547, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00011171353496190498, |
| "loss": 0.7884, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00011163311964122734, |
| "loss": 0.747, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00011155269669392341, |
| "loss": 0.8856, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.0001114722661727182, |
| "loss": 0.8093, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00011139182813034157, |
| "loss": 0.7216, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00011131138261952845, |
| "loss": 0.7484, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.0001112309296930185, |
| "loss": 0.777, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00011115046940355642, |
| "loss": 0.6185, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00011107000180389162, |
| "loss": 0.7116, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00011098952694677829, |
| "loss": 0.8237, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00011090904488497549, |
| "loss": 0.6079, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00011082855567124692, |
| "loss": 0.9299, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00011074805935836096, |
| "loss": 0.8304, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00011066755599909064, |
| "loss": 0.6821, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00011058704564621371, |
| "loss": 0.6618, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.0001105065283525124, |
| "loss": 0.6658, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00011042600417077351, |
| "loss": 0.712, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00011034547315378838, |
| "loss": 0.6362, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00011026493535435282, |
| "loss": 0.6682, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00011018439082526707, |
| "loss": 0.6751, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00011010383961933581, |
| "loss": 0.7213, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00011002328178936811, |
| "loss": 0.8638, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00010994271738817734, |
| "loss": 0.7483, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00010986214646858115, |
| "loss": 0.9325, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00010978156908340158, |
| "loss": 0.7541, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00010970098528546481, |
| "loss": 0.8097, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00010962039512760128, |
| "loss": 0.7014, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010953979866264548, |
| "loss": 0.7915, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010945919594343624, |
| "loss": 0.8108, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010937858702281631, |
| "loss": 0.7255, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010929797195363259, |
| "loss": 0.7637, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010921735078873598, |
| "loss": 0.6124, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010913672358098143, |
| "loss": 0.6483, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010905609038322779, |
| "loss": 0.6482, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00010897545124833783, |
| "loss": 0.7312, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.0001088948062291783, |
| "loss": 0.7522, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.0001088141553786197, |
| "loss": 0.8263, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.0001087334987495364, |
| "loss": 0.6402, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.0001086528363948066, |
| "loss": 0.7303, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00010857216836731222, |
| "loss": 0.8417, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00010849149471993882, |
| "loss": 0.7552, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00010841081550557578, |
| "loss": 0.7288, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00010833013077711607, |
| "loss": 0.7234, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00010824944058745623, |
| "loss": 0.8175, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00010816874498949643, |
| "loss": 0.7855, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00010808804403614043, |
| "loss": 0.8037, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00010800733778029542, |
| "loss": 0.7427, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00010792662627487207, |
| "loss": 0.7333, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.0001078459095727845, |
| "loss": 0.6383, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00010776518772695034, |
| "loss": 0.7577, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00010768446079029044, |
| "loss": 0.8111, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00010760372881572905, |
| "loss": 0.7905, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00010752299185619377, |
| "loss": 0.7351, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.0001074422499646154, |
| "loss": 0.6686, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00010736150319392802, |
| "loss": 0.7804, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.0001072807515970688, |
| "loss": 0.9291, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00010719999522697828, |
| "loss": 0.6902, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00010711923413659995, |
| "loss": 0.6258, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00010703846837888045, |
| "loss": 0.7622, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010695769800676949, |
| "loss": 0.7381, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010687692307321984, |
| "loss": 0.7515, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010679614363118717, |
| "loss": 0.8314, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010671535973363018, |
| "loss": 0.9125, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010663457143351044, |
| "loss": 0.6826, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010655377878379247, |
| "loss": 0.8385, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010647298183744359, |
| "loss": 0.703, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00010639218064743392, |
| "loss": 0.7842, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00010631137526673647, |
| "loss": 0.7024, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00010623056574832687, |
| "loss": 0.6624, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.0001061497521451835, |
| "loss": 0.7331, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00010606893451028743, |
| "loss": 0.7104, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00010598811289662243, |
| "loss": 0.6324, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00010590728735717479, |
| "loss": 0.7158, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00010582645794493337, |
| "loss": 0.8078, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00010574562471288969, |
| "loss": 0.759, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010566478771403763, |
| "loss": 0.6603, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010558394700137362, |
| "loss": 0.6431, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010550310262789649, |
| "loss": 0.6772, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010542225464660749, |
| "loss": 0.8119, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010534140311051026, |
| "loss": 0.8, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010526054807261067, |
| "loss": 0.8161, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010517968958591705, |
| "loss": 0.7785, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00010509882770343983, |
| "loss": 0.7712, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010501796247819175, |
| "loss": 0.7116, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010493709396318771, |
| "loss": 0.7456, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010485622221144484, |
| "loss": 0.68, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010477534727598229, |
| "loss": 0.6633, |
| "step": 2001 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010469446920982129, |
| "loss": 0.6131, |
| "step": 2002 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010461358806598527, |
| "loss": 0.7044, |
| "step": 2003 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010453270389749957, |
| "loss": 0.6119, |
| "step": 2004 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00010445181675739144, |
| "loss": 0.7501, |
| "step": 2005 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010437092669869024, |
| "loss": 0.7921, |
| "step": 2006 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010429003377442715, |
| "loss": 0.9771, |
| "step": 2007 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010420913803763521, |
| "loss": 0.8328, |
| "step": 2008 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010412823954134938, |
| "loss": 0.8843, |
| "step": 2009 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010404733833860639, |
| "loss": 0.765, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010396643448244472, |
| "loss": 0.7074, |
| "step": 2011 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010388552802590462, |
| "loss": 0.8087, |
| "step": 2012 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00010380461902202801, |
| "loss": 0.7042, |
| "step": 2013 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010372370752385853, |
| "loss": 0.5895, |
| "step": 2014 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010364279358444144, |
| "loss": 0.7156, |
| "step": 2015 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010356187725682359, |
| "loss": 0.7299, |
| "step": 2016 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010348095859405338, |
| "loss": 0.7607, |
| "step": 2017 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010340003764918078, |
| "loss": 0.7795, |
| "step": 2018 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010331911447525724, |
| "loss": 0.6723, |
| "step": 2019 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010323818912533561, |
| "loss": 0.9001, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00010315726165247028, |
| "loss": 0.5957, |
| "step": 2021 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00010307633210971697, |
| "loss": 0.7042, |
| "step": 2022 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00010299540055013272, |
| "loss": 0.631, |
| "step": 2023 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00010291446702677599, |
| "loss": 0.5871, |
| "step": 2024 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00010283353159270643, |
| "loss": 0.8197, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.000102752594300985, |
| "loss": 0.8031, |
| "step": 2026 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00010267165520467386, |
| "loss": 0.7761, |
| "step": 2027 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00010259071435683636, |
| "loss": 0.7135, |
| "step": 2028 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00010250977181053698, |
| "loss": 0.7715, |
| "step": 2029 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00010242882761884131, |
| "loss": 0.8672, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00010234788183481607, |
| "loss": 0.9106, |
| "step": 2031 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.000102266934511529, |
| "loss": 0.8094, |
| "step": 2032 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00010218598570204883, |
| "loss": 0.8561, |
| "step": 2033 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00010210503545944521, |
| "loss": 0.7685, |
| "step": 2034 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00010202408383678888, |
| "loss": 0.7284, |
| "step": 2035 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00010194313088715135, |
| "loss": 0.7851, |
| "step": 2036 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00010186217666360507, |
| "loss": 0.5829, |
| "step": 2037 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00010178122121922324, |
| "loss": 0.5843, |
| "step": 2038 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00010170026460708, |
| "loss": 0.6555, |
| "step": 2039 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00010161930688025017, |
| "loss": 0.6592, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00010153834809180925, |
| "loss": 0.7806, |
| "step": 2041 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00010145738829483353, |
| "loss": 0.6187, |
| "step": 2042 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00010137642754239995, |
| "loss": 0.7172, |
| "step": 2043 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00010129546588758605, |
| "loss": 0.6033, |
| "step": 2044 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.0001012145033834699, |
| "loss": 0.8653, |
| "step": 2045 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00010113354008313025, |
| "loss": 0.6914, |
| "step": 2046 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.0001010525760396463, |
| "loss": 0.8284, |
| "step": 2047 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00010097161130609773, |
| "loss": 0.7278, |
| "step": 2048 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00010089064593556474, |
| "loss": 0.8752, |
| "step": 2049 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00010080967998112787, |
| "loss": 0.6988, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00010072871349586808, |
| "loss": 0.671, |
| "step": 2051 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00010064774653286661, |
| "loss": 0.7689, |
| "step": 2052 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00010056677914520518, |
| "loss": 0.9217, |
| "step": 2053 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00010048581138596563, |
| "loss": 0.8251, |
| "step": 2054 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00010040484330823006, |
| "loss": 0.5996, |
| "step": 2055 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00010032387496508089, |
| "loss": 0.8445, |
| "step": 2056 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.0001002429064096006, |
| "loss": 0.5986, |
| "step": 2057 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00010016193769487181, |
| "loss": 0.8224, |
| "step": 2058 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00010008096887397733, |
| "loss": 0.7757, |
| "step": 2059 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.0001, |
| "loss": 0.7436, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 9.991903112602269e-05, |
| "loss": 0.6999, |
| "step": 2061 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.98380623051282e-05, |
| "loss": 0.5878, |
| "step": 2062 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.975709359039944e-05, |
| "loss": 0.7301, |
| "step": 2063 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.967612503491914e-05, |
| "loss": 0.7549, |
| "step": 2064 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.959515669176996e-05, |
| "loss": 0.7683, |
| "step": 2065 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.95141886140344e-05, |
| "loss": 0.7435, |
| "step": 2066 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.943322085479484e-05, |
| "loss": 0.7437, |
| "step": 2067 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.935225346713341e-05, |
| "loss": 0.7077, |
| "step": 2068 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.927128650413196e-05, |
| "loss": 0.7834, |
| "step": 2069 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.919032001887215e-05, |
| "loss": 0.6674, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.910935406443528e-05, |
| "loss": 0.6724, |
| "step": 2071 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.902838869390229e-05, |
| "loss": 0.6665, |
| "step": 2072 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.894742396035371e-05, |
| "loss": 0.8993, |
| "step": 2073 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.886645991686976e-05, |
| "loss": 0.8569, |
| "step": 2074 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.878549661653012e-05, |
| "loss": 0.8687, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.870453411241399e-05, |
| "loss": 0.7592, |
| "step": 2076 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.862357245760006e-05, |
| "loss": 0.6714, |
| "step": 2077 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.854261170516648e-05, |
| "loss": 0.754, |
| "step": 2078 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.846165190819078e-05, |
| "loss": 0.7679, |
| "step": 2079 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.838069311974986e-05, |
| "loss": 0.6928, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.829973539292001e-05, |
| "loss": 0.6997, |
| "step": 2081 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.821877878077678e-05, |
| "loss": 0.8398, |
| "step": 2082 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.813782333639496e-05, |
| "loss": 0.6465, |
| "step": 2083 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.805686911284868e-05, |
| "loss": 0.4927, |
| "step": 2084 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.797591616321114e-05, |
| "loss": 0.8897, |
| "step": 2085 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.789496454055482e-05, |
| "loss": 0.6658, |
| "step": 2086 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.781401429795119e-05, |
| "loss": 0.6872, |
| "step": 2087 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.7733065488471e-05, |
| "loss": 0.783, |
| "step": 2088 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.765211816518394e-05, |
| "loss": 0.7792, |
| "step": 2089 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.757117238115871e-05, |
| "loss": 0.8266, |
| "step": 2090 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.749022818946306e-05, |
| "loss": 0.9758, |
| "step": 2091 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.740928564316368e-05, |
| "loss": 0.6612, |
| "step": 2092 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.732834479532619e-05, |
| "loss": 0.8427, |
| "step": 2093 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.724740569901503e-05, |
| "loss": 0.6651, |
| "step": 2094 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.716646840729361e-05, |
| "loss": 0.6584, |
| "step": 2095 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.708553297322406e-05, |
| "loss": 0.7825, |
| "step": 2096 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.70045994498673e-05, |
| "loss": 0.6608, |
| "step": 2097 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.692366789028307e-05, |
| "loss": 0.6443, |
| "step": 2098 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.684273834752976e-05, |
| "loss": 0.7893, |
| "step": 2099 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.676181087466444e-05, |
| "loss": 0.6082, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.668088552474281e-05, |
| "loss": 0.7754, |
| "step": 2101 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.659996235081926e-05, |
| "loss": 0.6626, |
| "step": 2102 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.651904140594667e-05, |
| "loss": 0.7934, |
| "step": 2103 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.643812274317644e-05, |
| "loss": 0.7163, |
| "step": 2104 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.635720641555858e-05, |
| "loss": 0.7113, |
| "step": 2105 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.627629247614152e-05, |
| "loss": 0.6598, |
| "step": 2106 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.619538097797205e-05, |
| "loss": 0.6361, |
| "step": 2107 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.611447197409543e-05, |
| "loss": 0.6359, |
| "step": 2108 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.603356551755533e-05, |
| "loss": 0.654, |
| "step": 2109 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.595266166139366e-05, |
| "loss": 0.8619, |
| "step": 2110 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.587176045865064e-05, |
| "loss": 0.8212, |
| "step": 2111 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.579086196236482e-05, |
| "loss": 0.8254, |
| "step": 2112 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.570996622557291e-05, |
| "loss": 0.6412, |
| "step": 2113 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.562907330130981e-05, |
| "loss": 0.636, |
| "step": 2114 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.554818324260859e-05, |
| "loss": 0.6904, |
| "step": 2115 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.54672961025005e-05, |
| "loss": 0.7011, |
| "step": 2116 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.538641193401477e-05, |
| "loss": 0.6656, |
| "step": 2117 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.530553079017872e-05, |
| "loss": 0.6085, |
| "step": 2118 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.522465272401777e-05, |
| "loss": 0.6016, |
| "step": 2119 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.514377778855521e-05, |
| "loss": 0.7361, |
| "step": 2120 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.506290603681229e-05, |
| "loss": 0.7609, |
| "step": 2121 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.498203752180826e-05, |
| "loss": 0.6107, |
| "step": 2122 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.490117229656018e-05, |
| "loss": 0.8273, |
| "step": 2123 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.482031041408296e-05, |
| "loss": 0.9795, |
| "step": 2124 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.473945192738933e-05, |
| "loss": 0.6879, |
| "step": 2125 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.465859688948977e-05, |
| "loss": 0.7663, |
| "step": 2126 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.45777453533925e-05, |
| "loss": 0.844, |
| "step": 2127 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.449689737210352e-05, |
| "loss": 0.8779, |
| "step": 2128 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.441605299862639e-05, |
| "loss": 0.7234, |
| "step": 2129 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.433521228596237e-05, |
| "loss": 0.713, |
| "step": 2130 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.42543752871103e-05, |
| "loss": 0.7439, |
| "step": 2131 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.417354205506663e-05, |
| "loss": 0.8104, |
| "step": 2132 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.409271264282524e-05, |
| "loss": 0.7732, |
| "step": 2133 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.401188710337758e-05, |
| "loss": 0.7885, |
| "step": 2134 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.393106548971256e-05, |
| "loss": 0.8051, |
| "step": 2135 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.385024785481654e-05, |
| "loss": 0.6853, |
| "step": 2136 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.376943425167314e-05, |
| "loss": 0.7504, |
| "step": 2137 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.368862473326354e-05, |
| "loss": 0.7567, |
| "step": 2138 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.360781935256607e-05, |
| "loss": 0.7205, |
| "step": 2139 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.352701816255643e-05, |
| "loss": 0.8081, |
| "step": 2140 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.344622121620753e-05, |
| "loss": 0.8056, |
| "step": 2141 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.336542856648956e-05, |
| "loss": 0.847, |
| "step": 2142 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.328464026636984e-05, |
| "loss": 0.8909, |
| "step": 2143 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.320385636881283e-05, |
| "loss": 0.7299, |
| "step": 2144 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.312307692678017e-05, |
| "loss": 0.7915, |
| "step": 2145 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.304230199323049e-05, |
| "loss": 0.8418, |
| "step": 2146 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.296153162111956e-05, |
| "loss": 0.7666, |
| "step": 2147 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.288076586340006e-05, |
| "loss": 0.6936, |
| "step": 2148 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.280000477302173e-05, |
| "loss": 0.7963, |
| "step": 2149 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.27192484029312e-05, |
| "loss": 0.7948, |
| "step": 2150 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.2638496806072e-05, |
| "loss": 0.7703, |
| "step": 2151 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.255775003538462e-05, |
| "loss": 0.8168, |
| "step": 2152 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.247700814380624e-05, |
| "loss": 0.6834, |
| "step": 2153 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.239627118427096e-05, |
| "loss": 0.7005, |
| "step": 2154 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.231553920970958e-05, |
| "loss": 0.6284, |
| "step": 2155 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.223481227304968e-05, |
| "loss": 0.7647, |
| "step": 2156 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.215409042721552e-05, |
| "loss": 0.718, |
| "step": 2157 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.207337372512796e-05, |
| "loss": 0.7383, |
| "step": 2158 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.199266221970462e-05, |
| "loss": 0.903, |
| "step": 2159 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.19119559638596e-05, |
| "loss": 0.7483, |
| "step": 2160 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.183125501050358e-05, |
| "loss": 0.7185, |
| "step": 2161 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.175055941254379e-05, |
| "loss": 0.5695, |
| "step": 2162 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.166986922288397e-05, |
| "loss": 0.7706, |
| "step": 2163 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.158918449442423e-05, |
| "loss": 0.8572, |
| "step": 2164 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.150850528006119e-05, |
| "loss": 0.5849, |
| "step": 2165 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.142783163268781e-05, |
| "loss": 0.783, |
| "step": 2166 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.13471636051934e-05, |
| "loss": 0.7248, |
| "step": 2167 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.126650125046361e-05, |
| "loss": 0.5595, |
| "step": 2168 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.118584462138031e-05, |
| "loss": 0.8597, |
| "step": 2169 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.110519377082172e-05, |
| "loss": 0.7005, |
| "step": 2170 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.102454875166218e-05, |
| "loss": 0.7486, |
| "step": 2171 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.094390961677223e-05, |
| "loss": 0.7958, |
| "step": 2172 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.086327641901859e-05, |
| "loss": 0.6535, |
| "step": 2173 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.078264921126404e-05, |
| "loss": 0.7787, |
| "step": 2174 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.070202804636745e-05, |
| "loss": 0.6842, |
| "step": 2175 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.062141297718371e-05, |
| "loss": 0.8536, |
| "step": 2176 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.054080405656379e-05, |
| "loss": 0.9343, |
| "step": 2177 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.046020133735454e-05, |
| "loss": 0.7542, |
| "step": 2178 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.037960487239876e-05, |
| "loss": 0.7554, |
| "step": 2179 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.02990147145352e-05, |
| "loss": 0.7821, |
| "step": 2180 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.021843091659843e-05, |
| "loss": 0.6783, |
| "step": 2181 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.013785353141886e-05, |
| "loss": 0.6486, |
| "step": 2182 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.005728261182268e-05, |
| "loss": 0.7589, |
| "step": 2183 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 8.997671821063191e-05, |
| "loss": 0.794, |
| "step": 2184 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 8.98961603806642e-05, |
| "loss": 0.7155, |
| "step": 2185 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 8.981560917473293e-05, |
| "loss": 1.0281, |
| "step": 2186 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 8.97350646456472e-05, |
| "loss": 0.8645, |
| "step": 2187 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 8.965452684621164e-05, |
| "loss": 0.888, |
| "step": 2188 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 8.957399582922653e-05, |
| "loss": 0.7091, |
| "step": 2189 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.949347164748762e-05, |
| "loss": 0.7683, |
| "step": 2190 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.94129543537863e-05, |
| "loss": 0.7165, |
| "step": 2191 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.933244400090937e-05, |
| "loss": 0.6682, |
| "step": 2192 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.925194064163906e-05, |
| "loss": 0.6587, |
| "step": 2193 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.91714443287531e-05, |
| "loss": 0.6632, |
| "step": 2194 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.909095511502452e-05, |
| "loss": 0.7441, |
| "step": 2195 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.901047305322172e-05, |
| "loss": 0.9829, |
| "step": 2196 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.892999819610841e-05, |
| "loss": 0.8008, |
| "step": 2197 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.88495305964436e-05, |
| "loss": 0.6156, |
| "step": 2198 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.876907030698151e-05, |
| "loss": 0.5607, |
| "step": 2199 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.868861738047158e-05, |
| "loss": 0.7847, |
| "step": 2200 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.860817186965844e-05, |
| "loss": 0.7385, |
| "step": 2201 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.852773382728183e-05, |
| "loss": 0.8593, |
| "step": 2202 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.844730330607661e-05, |
| "loss": 0.732, |
| "step": 2203 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.836688035877267e-05, |
| "loss": 0.7723, |
| "step": 2204 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.828646503809504e-05, |
| "loss": 0.8353, |
| "step": 2205 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.820605739676363e-05, |
| "loss": 0.6978, |
| "step": 2206 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.812565748749336e-05, |
| "loss": 0.6289, |
| "step": 2207 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.804526536299413e-05, |
| "loss": 0.7798, |
| "step": 2208 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.796488107597065e-05, |
| "loss": 0.6638, |
| "step": 2209 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.788450467912255e-05, |
| "loss": 0.6794, |
| "step": 2210 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.780413622514424e-05, |
| "loss": 0.74, |
| "step": 2211 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.772377576672502e-05, |
| "loss": 0.6647, |
| "step": 2212 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.764342335654883e-05, |
| "loss": 0.8224, |
| "step": 2213 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.756307904729439e-05, |
| "loss": 0.6551, |
| "step": 2214 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.748274289163514e-05, |
| "loss": 0.7631, |
| "step": 2215 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.740241494223911e-05, |
| "loss": 0.7263, |
| "step": 2216 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.732209525176898e-05, |
| "loss": 0.7453, |
| "step": 2217 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.724178387288201e-05, |
| "loss": 0.8067, |
| "step": 2218 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.716148085823004e-05, |
| "loss": 0.7468, |
| "step": 2219 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.70811862604594e-05, |
| "loss": 0.6288, |
| "step": 2220 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.700090013221085e-05, |
| "loss": 0.7844, |
| "step": 2221 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.692062252611973e-05, |
| "loss": 0.8089, |
| "step": 2222 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.684035349481567e-05, |
| "loss": 0.7792, |
| "step": 2223 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.676009309092272e-05, |
| "loss": 0.8764, |
| "step": 2224 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.667984136705928e-05, |
| "loss": 0.7154, |
| "step": 2225 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.659959837583807e-05, |
| "loss": 0.7216, |
| "step": 2226 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.651936416986607e-05, |
| "loss": 0.6194, |
| "step": 2227 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.643913880174448e-05, |
| "loss": 0.6728, |
| "step": 2228 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.635892232406877e-05, |
| "loss": 0.6986, |
| "step": 2229 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.627871478942851e-05, |
| "loss": 0.7309, |
| "step": 2230 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.619851625040743e-05, |
| "loss": 0.7372, |
| "step": 2231 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.611832675958336e-05, |
| "loss": 0.728, |
| "step": 2232 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.603814636952824e-05, |
| "loss": 0.75, |
| "step": 2233 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.595797513280799e-05, |
| "loss": 0.716, |
| "step": 2234 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.587781310198255e-05, |
| "loss": 0.7283, |
| "step": 2235 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.579766032960582e-05, |
| "loss": 0.7685, |
| "step": 2236 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.571751686822566e-05, |
| "loss": 0.6708, |
| "step": 2237 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.563738277038377e-05, |
| "loss": 0.7838, |
| "step": 2238 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.555725808861573e-05, |
| "loss": 0.8678, |
| "step": 2239 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.5477142875451e-05, |
| "loss": 0.8324, |
| "step": 2240 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.539703718341277e-05, |
| "loss": 0.8329, |
| "step": 2241 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.531694106501797e-05, |
| "loss": 0.6748, |
| "step": 2242 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.523685457277736e-05, |
| "loss": 0.7081, |
| "step": 2243 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.515677775919527e-05, |
| "loss": 0.6422, |
| "step": 2244 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.507671067676979e-05, |
| "loss": 0.5794, |
| "step": 2245 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.499665337799254e-05, |
| "loss": 0.7236, |
| "step": 2246 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.491660591534872e-05, |
| "loss": 0.7952, |
| "step": 2247 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.48365683413172e-05, |
| "loss": 0.716, |
| "step": 2248 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.475654070837026e-05, |
| "loss": 0.7204, |
| "step": 2249 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.467652306897369e-05, |
| "loss": 0.7459, |
| "step": 2250 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.459651547558671e-05, |
| "loss": 0.7864, |
| "step": 2251 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.451651798066203e-05, |
| "loss": 0.6773, |
| "step": 2252 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.443653063664563e-05, |
| "loss": 0.7429, |
| "step": 2253 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.435655349597689e-05, |
| "loss": 0.6312, |
| "step": 2254 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.427658661108857e-05, |
| "loss": 0.7869, |
| "step": 2255 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.419663003440657e-05, |
| "loss": 0.7114, |
| "step": 2256 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.411668381835012e-05, |
| "loss": 0.7666, |
| "step": 2257 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.403674801533161e-05, |
| "loss": 0.8254, |
| "step": 2258 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.395682267775668e-05, |
| "loss": 0.7892, |
| "step": 2259 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.387690785802402e-05, |
| "loss": 0.7308, |
| "step": 2260 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.379700360852548e-05, |
| "loss": 0.6002, |
| "step": 2261 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.371710998164594e-05, |
| "loss": 0.9391, |
| "step": 2262 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.36372270297634e-05, |
| "loss": 0.6838, |
| "step": 2263 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.355735480524874e-05, |
| "loss": 0.8056, |
| "step": 2264 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.347749336046586e-05, |
| "loss": 0.7218, |
| "step": 2265 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.339764274777165e-05, |
| "loss": 0.6961, |
| "step": 2266 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.331780301951583e-05, |
| "loss": 0.6629, |
| "step": 2267 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.323797422804099e-05, |
| "loss": 0.6305, |
| "step": 2268 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.31581564256826e-05, |
| "loss": 0.6236, |
| "step": 2269 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.307834966476884e-05, |
| "loss": 0.8001, |
| "step": 2270 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.299855399762073e-05, |
| "loss": 0.6721, |
| "step": 2271 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.291876947655196e-05, |
| "loss": 0.7006, |
| "step": 2272 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.283899615386898e-05, |
| "loss": 0.8546, |
| "step": 2273 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.275923408187086e-05, |
| "loss": 0.7405, |
| "step": 2274 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.267948331284923e-05, |
| "loss": 0.8515, |
| "step": 2275 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.259974389908842e-05, |
| "loss": 0.8379, |
| "step": 2276 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.252001589286529e-05, |
| "loss": 0.8405, |
| "step": 2277 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.244029934644916e-05, |
| "loss": 0.8263, |
| "step": 2278 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.236059431210186e-05, |
| "loss": 0.7541, |
| "step": 2279 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.228090084207774e-05, |
| "loss": 0.796, |
| "step": 2280 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.220121898862348e-05, |
| "loss": 0.7715, |
| "step": 2281 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.212154880397818e-05, |
| "loss": 0.714, |
| "step": 2282 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.204189034037332e-05, |
| "loss": 0.5867, |
| "step": 2283 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.196224365003267e-05, |
| "loss": 0.8508, |
| "step": 2284 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.188260878517224e-05, |
| "loss": 0.8511, |
| "step": 2285 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.180298579800035e-05, |
| "loss": 0.7545, |
| "step": 2286 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.17233747407175e-05, |
| "loss": 0.7715, |
| "step": 2287 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.16437756655164e-05, |
| "loss": 0.6578, |
| "step": 2288 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.156418862458184e-05, |
| "loss": 0.6742, |
| "step": 2289 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.14846136700908e-05, |
| "loss": 0.7058, |
| "step": 2290 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.140505085421229e-05, |
| "loss": 0.8087, |
| "step": 2291 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.132550022910737e-05, |
| "loss": 0.5936, |
| "step": 2292 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.12459618469291e-05, |
| "loss": 0.684, |
| "step": 2293 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.116643575982253e-05, |
| "loss": 0.8786, |
| "step": 2294 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.108692201992465e-05, |
| "loss": 1.0615, |
| "step": 2295 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.100742067936431e-05, |
| "loss": 0.7618, |
| "step": 2296 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.092793179026232e-05, |
| "loss": 0.8744, |
| "step": 2297 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.084845540473126e-05, |
| "loss": 0.877, |
| "step": 2298 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.076899157487551e-05, |
| "loss": 0.6661, |
| "step": 2299 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.068954035279121e-05, |
| "loss": 0.781, |
| "step": 2300 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.061010179056633e-05, |
| "loss": 0.6117, |
| "step": 2301 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.053067594028044e-05, |
| "loss": 0.6215, |
| "step": 2302 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.045126285400476e-05, |
| "loss": 0.7328, |
| "step": 2303 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.037186258380226e-05, |
| "loss": 0.8742, |
| "step": 2304 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.02924751817274e-05, |
| "loss": 0.8059, |
| "step": 2305 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.021310069982624e-05, |
| "loss": 0.7726, |
| "step": 2306 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.013373919013639e-05, |
| "loss": 0.8052, |
| "step": 2307 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.005439070468692e-05, |
| "loss": 0.7897, |
| "step": 2308 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.99750552954984e-05, |
| "loss": 0.7599, |
| "step": 2309 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.989573301458273e-05, |
| "loss": 0.9599, |
| "step": 2310 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.981642391394337e-05, |
| "loss": 0.695, |
| "step": 2311 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.973712804557501e-05, |
| "loss": 0.7415, |
| "step": 2312 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.965784546146366e-05, |
| "loss": 0.8911, |
| "step": 2313 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.957857621358674e-05, |
| "loss": 0.7343, |
| "step": 2314 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.949932035391278e-05, |
| "loss": 0.7423, |
| "step": 2315 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.942007793440164e-05, |
| "loss": 0.8168, |
| "step": 2316 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.934084900700432e-05, |
| "loss": 0.6335, |
| "step": 2317 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.926163362366299e-05, |
| "loss": 0.8912, |
| "step": 2318 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.918243183631093e-05, |
| "loss": 0.6991, |
| "step": 2319 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.91032436968725e-05, |
| "loss": 0.7096, |
| "step": 2320 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.902406925726315e-05, |
| "loss": 0.7495, |
| "step": 2321 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.894490856938932e-05, |
| "loss": 0.8511, |
| "step": 2322 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.886576168514841e-05, |
| "loss": 0.7776, |
| "step": 2323 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.878662865642881e-05, |
| "loss": 0.6397, |
| "step": 2324 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.870750953510984e-05, |
| "loss": 0.7288, |
| "step": 2325 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.862840437306165e-05, |
| "loss": 0.7668, |
| "step": 2326 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.854931322214525e-05, |
| "loss": 0.7353, |
| "step": 2327 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.847023613421251e-05, |
| "loss": 0.7889, |
| "step": 2328 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.839117316110608e-05, |
| "loss": 0.7718, |
| "step": 2329 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.831212435465924e-05, |
| "loss": 0.6946, |
| "step": 2330 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.82330897666961e-05, |
| "loss": 0.7522, |
| "step": 2331 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.815406944903147e-05, |
| "loss": 0.6726, |
| "step": 2332 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 7.807506345347071e-05, |
| "loss": 0.7787, |
| "step": 2333 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.799607183180982e-05, |
| "loss": 0.6878, |
| "step": 2334 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.79170946358354e-05, |
| "loss": 0.6479, |
| "step": 2335 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.78381319173246e-05, |
| "loss": 0.731, |
| "step": 2336 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.775918372804504e-05, |
| "loss": 0.7796, |
| "step": 2337 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.768025011975481e-05, |
| "loss": 0.7546, |
| "step": 2338 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.760133114420252e-05, |
| "loss": 0.6705, |
| "step": 2339 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.75224268531271e-05, |
| "loss": 0.8177, |
| "step": 2340 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 7.744353729825786e-05, |
| "loss": 0.7305, |
| "step": 2341 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.736466253131452e-05, |
| "loss": 0.7266, |
| "step": 2342 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.728580260400705e-05, |
| "loss": 0.736, |
| "step": 2343 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.72069575680357e-05, |
| "loss": 0.6585, |
| "step": 2344 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.71281274750909e-05, |
| "loss": 0.7782, |
| "step": 2345 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.704931237685342e-05, |
| "loss": 0.7217, |
| "step": 2346 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.697051232499409e-05, |
| "loss": 0.8326, |
| "step": 2347 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.689172737117389e-05, |
| "loss": 0.6534, |
| "step": 2348 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 7.681295756704395e-05, |
| "loss": 0.704, |
| "step": 2349 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.673420296424541e-05, |
| "loss": 0.8751, |
| "step": 2350 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.66554636144095e-05, |
| "loss": 0.7862, |
| "step": 2351 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.657673956915735e-05, |
| "loss": 0.5698, |
| "step": 2352 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.64980308801002e-05, |
| "loss": 0.6797, |
| "step": 2353 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.641933759883913e-05, |
| "loss": 0.7571, |
| "step": 2354 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.634065977696511e-05, |
| "loss": 0.7094, |
| "step": 2355 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.626199746605903e-05, |
| "loss": 0.8548, |
| "step": 2356 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 7.618335071769158e-05, |
| "loss": 0.7919, |
| "step": 2357 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.610471958342326e-05, |
| "loss": 0.7596, |
| "step": 2358 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.602610411480427e-05, |
| "loss": 0.7547, |
| "step": 2359 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.594750436337467e-05, |
| "loss": 0.794, |
| "step": 2360 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.586892038066408e-05, |
| "loss": 0.7217, |
| "step": 2361 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.579035221819187e-05, |
| "loss": 0.7139, |
| "step": 2362 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.571179992746702e-05, |
| "loss": 0.7993, |
| "step": 2363 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.563326355998803e-05, |
| "loss": 0.8273, |
| "step": 2364 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 7.555474316724313e-05, |
| "loss": 0.8638, |
| "step": 2365 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.547623880070993e-05, |
| "loss": 0.5596, |
| "step": 2366 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.539775051185551e-05, |
| "loss": 0.7809, |
| "step": 2367 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.531927835213656e-05, |
| "loss": 0.6814, |
| "step": 2368 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.52408223729991e-05, |
| "loss": 0.6972, |
| "step": 2369 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.516238262587851e-05, |
| "loss": 0.7944, |
| "step": 2370 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.508395916219959e-05, |
| "loss": 0.7282, |
| "step": 2371 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.500555203337647e-05, |
| "loss": 0.7314, |
| "step": 2372 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.492716129081253e-05, |
| "loss": 0.7154, |
| "step": 2373 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.48487869859004e-05, |
| "loss": 0.8379, |
| "step": 2374 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.4770429170022e-05, |
| "loss": 0.7518, |
| "step": 2375 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.469208789454838e-05, |
| "loss": 0.7781, |
| "step": 2376 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.461376321083975e-05, |
| "loss": 0.7672, |
| "step": 2377 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.453545517024547e-05, |
| "loss": 0.6594, |
| "step": 2378 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.445716382410398e-05, |
| "loss": 0.8446, |
| "step": 2379 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.437888922374276e-05, |
| "loss": 0.9074, |
| "step": 2380 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 7.430063142047831e-05, |
| "loss": 0.6465, |
| "step": 2381 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.422239046561619e-05, |
| "loss": 0.6926, |
| "step": 2382 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.41441664104508e-05, |
| "loss": 0.6466, |
| "step": 2383 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.40659593062655e-05, |
| "loss": 0.7887, |
| "step": 2384 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.398776920433258e-05, |
| "loss": 0.777, |
| "step": 2385 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.390959615591316e-05, |
| "loss": 0.6326, |
| "step": 2386 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.383144021225717e-05, |
| "loss": 0.8081, |
| "step": 2387 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.37533014246033e-05, |
| "loss": 0.9853, |
| "step": 2388 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 7.36751798441791e-05, |
| "loss": 0.7513, |
| "step": 2389 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.35970755222007e-05, |
| "loss": 0.761, |
| "step": 2390 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.351898850987299e-05, |
| "loss": 0.6888, |
| "step": 2391 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.344091885838948e-05, |
| "loss": 0.8088, |
| "step": 2392 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.336286661893237e-05, |
| "loss": 0.7203, |
| "step": 2393 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.328483184267235e-05, |
| "loss": 0.6644, |
| "step": 2394 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.320681458076871e-05, |
| "loss": 0.9015, |
| "step": 2395 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.312881488436927e-05, |
| "loss": 0.7852, |
| "step": 2396 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.305083280461032e-05, |
| "loss": 0.5865, |
| "step": 2397 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.297286839261658e-05, |
| "loss": 0.7166, |
| "step": 2398 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.289492169950118e-05, |
| "loss": 0.7402, |
| "step": 2399 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.281699277636572e-05, |
| "loss": 0.7876, |
| "step": 2400 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.273908167430004e-05, |
| "loss": 0.7534, |
| "step": 2401 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.266118844438235e-05, |
| "loss": 0.9019, |
| "step": 2402 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.258331313767914e-05, |
| "loss": 0.7311, |
| "step": 2403 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.250545580524515e-05, |
| "loss": 0.9113, |
| "step": 2404 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.242761649812335e-05, |
| "loss": 0.8763, |
| "step": 2405 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.234979526734482e-05, |
| "loss": 0.7224, |
| "step": 2406 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.227199216392892e-05, |
| "loss": 0.7691, |
| "step": 2407 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.2194207238883e-05, |
| "loss": 0.8828, |
| "step": 2408 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.211644054320254e-05, |
| "loss": 0.599, |
| "step": 2409 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.20386921278711e-05, |
| "loss": 0.7165, |
| "step": 2410 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.196096204386022e-05, |
| "loss": 0.7151, |
| "step": 2411 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.188325034212943e-05, |
| "loss": 0.6387, |
| "step": 2412 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.180555707362618e-05, |
| "loss": 0.7354, |
| "step": 2413 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.172788228928591e-05, |
| "loss": 0.7929, |
| "step": 2414 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.165022604003186e-05, |
| "loss": 0.7197, |
| "step": 2415 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.157258837677514e-05, |
| "loss": 0.8234, |
| "step": 2416 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.149496935041476e-05, |
| "loss": 0.7235, |
| "step": 2417 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.141736901183736e-05, |
| "loss": 0.6741, |
| "step": 2418 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.133978741191745e-05, |
| "loss": 0.7936, |
| "step": 2419 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.126222460151719e-05, |
| "loss": 0.7044, |
| "step": 2420 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.118468063148646e-05, |
| "loss": 0.6982, |
| "step": 2421 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.110715555266281e-05, |
| "loss": 0.6448, |
| "step": 2422 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.102964941587129e-05, |
| "loss": 0.6316, |
| "step": 2423 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.095216227192467e-05, |
| "loss": 0.7873, |
| "step": 2424 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.08746941716232e-05, |
| "loss": 0.787, |
| "step": 2425 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.079724516575466e-05, |
| "loss": 0.7776, |
| "step": 2426 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.071981530509424e-05, |
| "loss": 0.7189, |
| "step": 2427 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.064240464040473e-05, |
| "loss": 0.8225, |
| "step": 2428 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.056501322243622e-05, |
| "loss": 0.6398, |
| "step": 2429 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.048764110192618e-05, |
| "loss": 0.6382, |
| "step": 2430 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.04102883295995e-05, |
| "loss": 0.7683, |
| "step": 2431 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.033295495616834e-05, |
| "loss": 0.6836, |
| "step": 2432 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.025564103233213e-05, |
| "loss": 0.7196, |
| "step": 2433 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.017834660877756e-05, |
| "loss": 0.7546, |
| "step": 2434 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.010107173617857e-05, |
| "loss": 0.7087, |
| "step": 2435 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.002381646519625e-05, |
| "loss": 0.6198, |
| "step": 2436 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 6.994658084647881e-05, |
| "loss": 0.7183, |
| "step": 2437 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.986936493066165e-05, |
| "loss": 0.8834, |
| "step": 2438 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.97921687683672e-05, |
| "loss": 0.7635, |
| "step": 2439 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.971499241020495e-05, |
| "loss": 0.7624, |
| "step": 2440 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.963783590677138e-05, |
| "loss": 0.6364, |
| "step": 2441 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.956069930865004e-05, |
| "loss": 0.7203, |
| "step": 2442 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.948358266641135e-05, |
| "loss": 0.7349, |
| "step": 2443 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.940648603061263e-05, |
| "loss": 0.6215, |
| "step": 2444 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 6.932940945179818e-05, |
| "loss": 0.6638, |
| "step": 2445 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.925235298049906e-05, |
| "loss": 0.5951, |
| "step": 2446 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.917531666723321e-05, |
| "loss": 0.7193, |
| "step": 2447 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.909830056250527e-05, |
| "loss": 0.6822, |
| "step": 2448 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.902130471680675e-05, |
| "loss": 0.7825, |
| "step": 2449 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.894432918061579e-05, |
| "loss": 0.6985, |
| "step": 2450 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.886737400439721e-05, |
| "loss": 0.5583, |
| "step": 2451 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.879043923860257e-05, |
| "loss": 0.7553, |
| "step": 2452 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 6.871352493366997e-05, |
| "loss": 0.77, |
| "step": 2453 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.863663114002411e-05, |
| "loss": 0.672, |
| "step": 2454 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.855975790807623e-05, |
| "loss": 0.7333, |
| "step": 2455 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.848290528822416e-05, |
| "loss": 0.7179, |
| "step": 2456 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.840607333085214e-05, |
| "loss": 0.7309, |
| "step": 2457 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.832926208633086e-05, |
| "loss": 0.7289, |
| "step": 2458 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.825247160501753e-05, |
| "loss": 0.865, |
| "step": 2459 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.817570193725564e-05, |
| "loss": 0.7329, |
| "step": 2460 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 6.809895313337508e-05, |
| "loss": 0.725, |
| "step": 2461 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.802222524369202e-05, |
| "loss": 0.6374, |
| "step": 2462 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.794551831850902e-05, |
| "loss": 0.7045, |
| "step": 2463 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.786883240811479e-05, |
| "loss": 0.5344, |
| "step": 2464 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.77921675627843e-05, |
| "loss": 0.9198, |
| "step": 2465 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.771552383277875e-05, |
| "loss": 0.6753, |
| "step": 2466 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.763890126834543e-05, |
| "loss": 0.737, |
| "step": 2467 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.756229991971779e-05, |
| "loss": 0.7448, |
| "step": 2468 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 6.748571983711534e-05, |
| "loss": 0.6173, |
| "step": 2469 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.740916107074372e-05, |
| "loss": 0.7897, |
| "step": 2470 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.733262367079453e-05, |
| "loss": 0.6522, |
| "step": 2471 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.725610768744534e-05, |
| "loss": 0.5612, |
| "step": 2472 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.717961317085979e-05, |
| "loss": 0.5554, |
| "step": 2473 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.710314017118734e-05, |
| "loss": 0.8011, |
| "step": 2474 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.702668873856338e-05, |
| "loss": 0.6443, |
| "step": 2475 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.695025892310914e-05, |
| "loss": 0.765, |
| "step": 2476 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 6.687385077493174e-05, |
| "loss": 0.6826, |
| "step": 2477 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.679746434412404e-05, |
| "loss": 0.7919, |
| "step": 2478 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.672109968076465e-05, |
| "loss": 0.516, |
| "step": 2479 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.664475683491796e-05, |
| "loss": 0.8268, |
| "step": 2480 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.656843585663403e-05, |
| "loss": 0.5922, |
| "step": 2481 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.649213679594859e-05, |
| "loss": 0.8128, |
| "step": 2482 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.641585970288297e-05, |
| "loss": 0.8269, |
| "step": 2483 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.633960462744416e-05, |
| "loss": 0.7101, |
| "step": 2484 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 6.626337161962461e-05, |
| "loss": 0.7049, |
| "step": 2485 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.618716072940248e-05, |
| "loss": 0.6792, |
| "step": 2486 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.611097200674125e-05, |
| "loss": 0.7382, |
| "step": 2487 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.603480550158995e-05, |
| "loss": 0.893, |
| "step": 2488 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.595866126388305e-05, |
| "loss": 0.6765, |
| "step": 2489 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.588253934354039e-05, |
| "loss": 0.7335, |
| "step": 2490 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.580643979046717e-05, |
| "loss": 0.8172, |
| "step": 2491 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.5730362654554e-05, |
| "loss": 0.7475, |
| "step": 2492 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 6.565430798567673e-05, |
| "loss": 0.8856, |
| "step": 2493 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.557827583369647e-05, |
| "loss": 0.7962, |
| "step": 2494 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.550226624845961e-05, |
| "loss": 0.6308, |
| "step": 2495 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.542627927979771e-05, |
| "loss": 0.632, |
| "step": 2496 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.535031497752754e-05, |
| "loss": 0.7097, |
| "step": 2497 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.527437339145098e-05, |
| "loss": 0.7664, |
| "step": 2498 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.519845457135504e-05, |
| "loss": 0.7715, |
| "step": 2499 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.512255856701177e-05, |
| "loss": 0.6746, |
| "step": 2500 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 6.504668542817831e-05, |
| "loss": 0.7355, |
| "step": 2501 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.497083520459674e-05, |
| "loss": 0.7095, |
| "step": 2502 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.489500794599421e-05, |
| "loss": 0.6612, |
| "step": 2503 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.481920370208274e-05, |
| "loss": 0.671, |
| "step": 2504 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.474342252255927e-05, |
| "loss": 0.7809, |
| "step": 2505 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.466766445710568e-05, |
| "loss": 0.6668, |
| "step": 2506 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.459192955538863e-05, |
| "loss": 0.6836, |
| "step": 2507 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.451621786705962e-05, |
| "loss": 0.6211, |
| "step": 2508 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 6.444052944175488e-05, |
| "loss": 0.6149, |
| "step": 2509 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.43648643290955e-05, |
| "loss": 0.7946, |
| "step": 2510 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.428922257868722e-05, |
| "loss": 0.938, |
| "step": 2511 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.42136042401204e-05, |
| "loss": 0.5667, |
| "step": 2512 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.413800936297019e-05, |
| "loss": 0.7271, |
| "step": 2513 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.406243799679626e-05, |
| "loss": 0.8119, |
| "step": 2514 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.398689019114289e-05, |
| "loss": 0.7309, |
| "step": 2515 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.39113659955389e-05, |
| "loss": 0.816, |
| "step": 2516 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 6.383586545949768e-05, |
| "loss": 0.8262, |
| "step": 2517 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.376038863251705e-05, |
| "loss": 0.7314, |
| "step": 2518 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.368493556407933e-05, |
| "loss": 0.7167, |
| "step": 2519 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.360950630365126e-05, |
| "loss": 0.8018, |
| "step": 2520 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.353410090068392e-05, |
| "loss": 0.8891, |
| "step": 2521 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.345871940461283e-05, |
| "loss": 0.843, |
| "step": 2522 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.338336186485774e-05, |
| "loss": 0.7473, |
| "step": 2523 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.330802833082279e-05, |
| "loss": 0.7173, |
| "step": 2524 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 6.323271885189635e-05, |
| "loss": 0.8303, |
| "step": 2525 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.315743347745097e-05, |
| "loss": 0.6188, |
| "step": 2526 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.308217225684348e-05, |
| "loss": 0.7432, |
| "step": 2527 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.300693523941482e-05, |
| "loss": 0.6928, |
| "step": 2528 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.293172247449005e-05, |
| "loss": 0.7576, |
| "step": 2529 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.285653401137837e-05, |
| "loss": 0.7265, |
| "step": 2530 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.278136989937305e-05, |
| "loss": 0.7186, |
| "step": 2531 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.270623018775135e-05, |
| "loss": 0.6506, |
| "step": 2532 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 6.263111492577456e-05, |
| "loss": 0.7082, |
| "step": 2533 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.255602416268799e-05, |
| "loss": 0.7348, |
| "step": 2534 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.248095794772079e-05, |
| "loss": 0.7679, |
| "step": 2535 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.24059163300861e-05, |
| "loss": 0.5934, |
| "step": 2536 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.233089935898088e-05, |
| "loss": 0.7287, |
| "step": 2537 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.225590708358596e-05, |
| "loss": 0.6793, |
| "step": 2538 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.218093955306598e-05, |
| "loss": 0.6902, |
| "step": 2539 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.210599681656933e-05, |
| "loss": 0.746, |
| "step": 2540 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 6.203107892322819e-05, |
| "loss": 0.7777, |
| "step": 2541 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.195618592215843e-05, |
| "loss": 0.691, |
| "step": 2542 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.188131786245959e-05, |
| "loss": 0.6089, |
| "step": 2543 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.180647479321485e-05, |
| "loss": 0.6872, |
| "step": 2544 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.173165676349103e-05, |
| "loss": 0.7273, |
| "step": 2545 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.165686382233855e-05, |
| "loss": 0.6593, |
| "step": 2546 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.158209601879134e-05, |
| "loss": 0.7259, |
| "step": 2547 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.15073534018669e-05, |
| "loss": 0.76, |
| "step": 2548 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 6.143263602056615e-05, |
| "loss": 0.7319, |
| "step": 2549 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.135794392387353e-05, |
| "loss": 0.6582, |
| "step": 2550 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.128327716075683e-05, |
| "loss": 0.7743, |
| "step": 2551 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.120863578016735e-05, |
| "loss": 0.6363, |
| "step": 2552 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.113401983103966e-05, |
| "loss": 0.8139, |
| "step": 2553 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.105942936229161e-05, |
| "loss": 0.6334, |
| "step": 2554 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.0984864422824496e-05, |
| "loss": 0.6589, |
| "step": 2555 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.091032506152274e-05, |
| "loss": 0.6473, |
| "step": 2556 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 6.083581132725407e-05, |
| "loss": 0.7228, |
| "step": 2557 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.076132326886934e-05, |
| "loss": 0.8345, |
| "step": 2558 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.0686860935202694e-05, |
| "loss": 0.7443, |
| "step": 2559 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.061242437507131e-05, |
| "loss": 0.772, |
| "step": 2560 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.053801363727545e-05, |
| "loss": 0.7736, |
| "step": 2561 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.046362877059857e-05, |
| "loss": 0.8095, |
| "step": 2562 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.0389269823807036e-05, |
| "loss": 0.7914, |
| "step": 2563 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.031493684565029e-05, |
| "loss": 0.7269, |
| "step": 2564 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 6.024062988486072e-05, |
| "loss": 0.7921, |
| "step": 2565 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 6.016634899015369e-05, |
| "loss": 0.7535, |
| "step": 2566 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 6.009209421022744e-05, |
| "loss": 0.8988, |
| "step": 2567 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 6.00178655937631e-05, |
| "loss": 0.7246, |
| "step": 2568 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 5.994366318942468e-05, |
| "loss": 0.732, |
| "step": 2569 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 5.986948704585895e-05, |
| "loss": 0.7149, |
| "step": 2570 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 5.97953372116955e-05, |
| "loss": 0.8137, |
| "step": 2571 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 5.972121373554664e-05, |
| "loss": 0.7464, |
| "step": 2572 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 5.9647116666007455e-05, |
| "loss": 0.805, |
| "step": 2573 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.957304605165567e-05, |
| "loss": 0.6894, |
| "step": 2574 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.949900194105167e-05, |
| "loss": 0.789, |
| "step": 2575 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.942498438273849e-05, |
| "loss": 0.5962, |
| "step": 2576 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.935099342524175e-05, |
| "loss": 0.7421, |
| "step": 2577 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.9277029117069604e-05, |
| "loss": 0.827, |
| "step": 2578 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.920309150671275e-05, |
| "loss": 0.6649, |
| "step": 2579 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.9129180642644414e-05, |
| "loss": 0.5851, |
| "step": 2580 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 5.9055296573320226e-05, |
| "loss": 0.7707, |
| "step": 2581 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.8981439347178304e-05, |
| "loss": 0.7082, |
| "step": 2582 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.890760901263915e-05, |
| "loss": 0.6365, |
| "step": 2583 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.883380561810563e-05, |
| "loss": 0.6476, |
| "step": 2584 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.8760029211962954e-05, |
| "loss": 0.6256, |
| "step": 2585 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.8686279842578615e-05, |
| "loss": 0.7587, |
| "step": 2586 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.861255755830244e-05, |
| "loss": 0.6907, |
| "step": 2587 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.8538862407466425e-05, |
| "loss": 0.7745, |
| "step": 2588 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 5.8465194438384816e-05, |
| "loss": 0.8004, |
| "step": 2589 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.839155369935407e-05, |
| "loss": 0.7186, |
| "step": 2590 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.8317940238652714e-05, |
| "loss": 0.6458, |
| "step": 2591 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.82443541045415e-05, |
| "loss": 0.8524, |
| "step": 2592 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.8170795345263106e-05, |
| "loss": 0.7729, |
| "step": 2593 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.809726400904242e-05, |
| "loss": 0.8253, |
| "step": 2594 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.802376014408632e-05, |
| "loss": 0.8208, |
| "step": 2595 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.795028379858355e-05, |
| "loss": 0.6729, |
| "step": 2596 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 5.7876835020705e-05, |
| "loss": 0.7047, |
| "step": 2597 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.780341385860333e-05, |
| "loss": 0.6933, |
| "step": 2598 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.773002036041315e-05, |
| "loss": 0.6589, |
| "step": 2599 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.765665457425102e-05, |
| "loss": 0.7539, |
| "step": 2600 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.758331654821516e-05, |
| "loss": 0.8596, |
| "step": 2601 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.751000633038572e-05, |
| "loss": 0.5354, |
| "step": 2602 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.743672396882464e-05, |
| "loss": 0.7826, |
| "step": 2603 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.736346951157544e-05, |
| "loss": 0.6813, |
| "step": 2604 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 5.729024300666349e-05, |
| "loss": 0.7611, |
| "step": 2605 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.7217044502095806e-05, |
| "loss": 0.6351, |
| "step": 2606 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.714387404586101e-05, |
| "loss": 0.7804, |
| "step": 2607 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.707073168592942e-05, |
| "loss": 0.6416, |
| "step": 2608 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.699761747025278e-05, |
| "loss": 0.7768, |
| "step": 2609 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.6924531446764504e-05, |
| "loss": 0.6593, |
| "step": 2610 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.6851473663379564e-05, |
| "loss": 0.7075, |
| "step": 2611 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.677844416799424e-05, |
| "loss": 0.8571, |
| "step": 2612 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 5.670544300848643e-05, |
| "loss": 0.6766, |
| "step": 2613 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.6632470232715426e-05, |
| "loss": 0.7602, |
| "step": 2614 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.6559525888521815e-05, |
| "loss": 0.9211, |
| "step": 2615 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.648661002372768e-05, |
| "loss": 0.7262, |
| "step": 2616 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.641372268613631e-05, |
| "loss": 0.6234, |
| "step": 2617 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.6340863923532396e-05, |
| "loss": 0.7539, |
| "step": 2618 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.626803378368185e-05, |
| "loss": 0.7176, |
| "step": 2619 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.6195232314331766e-05, |
| "loss": 0.7247, |
| "step": 2620 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 5.612245956321053e-05, |
| "loss": 0.712, |
| "step": 2621 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.60497155780277e-05, |
| "loss": 0.6421, |
| "step": 2622 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.597700040647385e-05, |
| "loss": 0.7797, |
| "step": 2623 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.590431409622081e-05, |
| "loss": 0.8409, |
| "step": 2624 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.5831656694921465e-05, |
| "loss": 0.7742, |
| "step": 2625 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.575902825020962e-05, |
| "loss": 0.7277, |
| "step": 2626 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.568642880970026e-05, |
| "loss": 0.7137, |
| "step": 2627 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.56138584209893e-05, |
| "loss": 0.7527, |
| "step": 2628 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 5.554131713165353e-05, |
| "loss": 0.6276, |
| "step": 2629 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.5468804989250786e-05, |
| "loss": 0.7864, |
| "step": 2630 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.539632204131968e-05, |
| "loss": 0.7586, |
| "step": 2631 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.532386833537977e-05, |
| "loss": 0.6876, |
| "step": 2632 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.525144391893144e-05, |
| "loss": 0.7628, |
| "step": 2633 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.517904883945577e-05, |
| "loss": 0.7147, |
| "step": 2634 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.510668314441474e-05, |
| "loss": 0.8237, |
| "step": 2635 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.503434688125104e-05, |
| "loss": 0.7778, |
| "step": 2636 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 5.496204009738795e-05, |
| "loss": 0.7622, |
| "step": 2637 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.488976284022953e-05, |
| "loss": 0.6646, |
| "step": 2638 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.4817515157160515e-05, |
| "loss": 0.6707, |
| "step": 2639 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.474529709554612e-05, |
| "loss": 0.6026, |
| "step": 2640 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.467310870273224e-05, |
| "loss": 0.6737, |
| "step": 2641 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.4600950026045326e-05, |
| "loss": 0.7882, |
| "step": 2642 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.452882111279227e-05, |
| "loss": 0.7879, |
| "step": 2643 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.445672201026054e-05, |
| "loss": 0.6573, |
| "step": 2644 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 5.438465276571796e-05, |
| "loss": 0.812, |
| "step": 2645 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.431261342641286e-05, |
| "loss": 0.6547, |
| "step": 2646 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.4240604039573984e-05, |
| "loss": 0.6894, |
| "step": 2647 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.416862465241033e-05, |
| "loss": 0.6796, |
| "step": 2648 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.409667531211131e-05, |
| "loss": 0.6921, |
| "step": 2649 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.402475606584669e-05, |
| "loss": 0.7982, |
| "step": 2650 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.395286696076632e-05, |
| "loss": 0.7225, |
| "step": 2651 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.388100804400049e-05, |
| "loss": 0.7148, |
| "step": 2652 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 5.380917936265961e-05, |
| "loss": 0.8328, |
| "step": 2653 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.373738096383423e-05, |
| "loss": 0.7578, |
| "step": 2654 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.366561289459512e-05, |
| "loss": 0.7143, |
| "step": 2655 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.3593875201993174e-05, |
| "loss": 0.8177, |
| "step": 2656 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.3522167933059243e-05, |
| "loss": 0.6649, |
| "step": 2657 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.3450491134804414e-05, |
| "loss": 0.893, |
| "step": 2658 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.337884485421962e-05, |
| "loss": 0.6458, |
| "step": 2659 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.3307229138275936e-05, |
| "loss": 0.6816, |
| "step": 2660 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 5.323564403392436e-05, |
| "loss": 0.7116, |
| "step": 2661 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.31640895880957e-05, |
| "loss": 0.7358, |
| "step": 2662 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.3092565847700834e-05, |
| "loss": 0.7647, |
| "step": 2663 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.302107285963045e-05, |
| "loss": 0.7122, |
| "step": 2664 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.2949610670755e-05, |
| "loss": 0.7058, |
| "step": 2665 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.287817932792485e-05, |
| "loss": 0.6237, |
| "step": 2666 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.280677887797012e-05, |
| "loss": 0.741, |
| "step": 2667 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.273540936770058e-05, |
| "loss": 0.879, |
| "step": 2668 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 5.266407084390586e-05, |
| "loss": 0.7692, |
| "step": 2669 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.259276335335521e-05, |
| "loss": 0.6391, |
| "step": 2670 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.252148694279748e-05, |
| "loss": 0.7541, |
| "step": 2671 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.245024165896126e-05, |
| "loss": 0.54, |
| "step": 2672 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.237902754855462e-05, |
| "loss": 0.7436, |
| "step": 2673 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.230784465826524e-05, |
| "loss": 0.6506, |
| "step": 2674 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.223669303476041e-05, |
| "loss": 0.7735, |
| "step": 2675 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.2165572724686754e-05, |
| "loss": 0.8176, |
| "step": 2676 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 5.2094483774670497e-05, |
| "loss": 0.747, |
| "step": 2677 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.202342623131731e-05, |
| "loss": 0.7075, |
| "step": 2678 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.1952400141212164e-05, |
| "loss": 0.7082, |
| "step": 2679 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.1881405550919493e-05, |
| "loss": 0.7959, |
| "step": 2680 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.181044250698312e-05, |
| "loss": 0.6419, |
| "step": 2681 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.1739511055926047e-05, |
| "loss": 0.7703, |
| "step": 2682 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.1668611244250684e-05, |
| "loss": 0.7224, |
| "step": 2683 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.1597743118438726e-05, |
| "loss": 0.6709, |
| "step": 2684 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 5.152690672495091e-05, |
| "loss": 0.5835, |
| "step": 2685 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.1456102110227375e-05, |
| "loss": 0.6547, |
| "step": 2686 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.1385329320687294e-05, |
| "loss": 0.6885, |
| "step": 2687 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.1314588402729044e-05, |
| "loss": 0.7383, |
| "step": 2688 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.124387940273011e-05, |
| "loss": 0.6867, |
| "step": 2689 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.117320236704697e-05, |
| "loss": 0.7023, |
| "step": 2690 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.1102557342015224e-05, |
| "loss": 0.7567, |
| "step": 2691 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.103194437394952e-05, |
| "loss": 0.6996, |
| "step": 2692 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 5.096136350914335e-05, |
| "loss": 0.8404, |
| "step": 2693 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.089081479386928e-05, |
| "loss": 0.7904, |
| "step": 2694 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.08202982743788e-05, |
| "loss": 0.7218, |
| "step": 2695 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.074981399690218e-05, |
| "loss": 0.6913, |
| "step": 2696 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.067936200764869e-05, |
| "loss": 0.7725, |
| "step": 2697 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.0608942352806364e-05, |
| "loss": 0.7411, |
| "step": 2698 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.0538555078542005e-05, |
| "loss": 0.7427, |
| "step": 2699 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.0468200231001286e-05, |
| "loss": 0.7478, |
| "step": 2700 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 5.039787785630847e-05, |
| "loss": 0.8113, |
| "step": 2701 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 5.0327588000566695e-05, |
| "loss": 0.6636, |
| "step": 2702 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 5.025733070985772e-05, |
| "loss": 0.8221, |
| "step": 2703 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 5.018710603024187e-05, |
| "loss": 0.6825, |
| "step": 2704 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 5.01169140077582e-05, |
| "loss": 0.619, |
| "step": 2705 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 5.004675468842436e-05, |
| "loss": 0.7122, |
| "step": 2706 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 4.997662811823642e-05, |
| "loss": 0.745, |
| "step": 2707 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 4.9906534343169144e-05, |
| "loss": 0.8198, |
| "step": 2708 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 4.9836473409175754e-05, |
| "loss": 0.8252, |
| "step": 2709 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.976644536218783e-05, |
| "loss": 0.664, |
| "step": 2710 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.9696450248115524e-05, |
| "loss": 0.7003, |
| "step": 2711 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.962648811284738e-05, |
| "loss": 0.7166, |
| "step": 2712 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.9556559002250235e-05, |
| "loss": 0.8048, |
| "step": 2713 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.948666296216938e-05, |
| "loss": 0.643, |
| "step": 2714 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.9416800038428324e-05, |
| "loss": 0.6391, |
| "step": 2715 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.934697027682894e-05, |
| "loss": 0.821, |
| "step": 2716 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 4.927717372315139e-05, |
| "loss": 0.8238, |
| "step": 2717 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.920741042315392e-05, |
| "loss": 0.5706, |
| "step": 2718 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.913768042257313e-05, |
| "loss": 0.6465, |
| "step": 2719 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.9067983767123736e-05, |
| "loss": 0.66, |
| "step": 2720 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.899832050249853e-05, |
| "loss": 0.7169, |
| "step": 2721 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.89286906743685e-05, |
| "loss": 0.6601, |
| "step": 2722 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.885909432838269e-05, |
| "loss": 0.7123, |
| "step": 2723 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.8789531510168163e-05, |
| "loss": 0.6422, |
| "step": 2724 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 4.8720002265330015e-05, |
| "loss": 0.6611, |
| "step": 2725 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.865050663945139e-05, |
| "loss": 0.7804, |
| "step": 2726 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.858104467809324e-05, |
| "loss": 0.8916, |
| "step": 2727 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.851161642679466e-05, |
| "loss": 0.7832, |
| "step": 2728 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.8442221931072466e-05, |
| "loss": 0.6926, |
| "step": 2729 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.837286123642141e-05, |
| "loss": 0.8913, |
| "step": 2730 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.830353438831413e-05, |
| "loss": 0.6641, |
| "step": 2731 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.8234241432200965e-05, |
| "loss": 0.6908, |
| "step": 2732 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 4.8164982413510177e-05, |
| "loss": 0.7409, |
| "step": 2733 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.809575737764759e-05, |
| "loss": 0.7816, |
| "step": 2734 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.8026566369996926e-05, |
| "loss": 0.8089, |
| "step": 2735 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.795740943591955e-05, |
| "loss": 0.6895, |
| "step": 2736 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.7888286620754406e-05, |
| "loss": 0.7576, |
| "step": 2737 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.7819197969818175e-05, |
| "loss": 0.7121, |
| "step": 2738 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.7750143528405126e-05, |
| "loss": 0.7826, |
| "step": 2739 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.768112334178699e-05, |
| "loss": 0.8603, |
| "step": 2740 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 4.761213745521317e-05, |
| "loss": 0.7321, |
| "step": 2741 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.754318591391057e-05, |
| "loss": 0.8115, |
| "step": 2742 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.7474268763083464e-05, |
| "loss": 0.7361, |
| "step": 2743 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.74053860479137e-05, |
| "loss": 0.7155, |
| "step": 2744 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.733653781356055e-05, |
| "loss": 0.6928, |
| "step": 2745 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.726772410516055e-05, |
| "loss": 0.749, |
| "step": 2746 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.7198944967827774e-05, |
| "loss": 0.8049, |
| "step": 2747 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.7130200446653475e-05, |
| "loss": 0.6278, |
| "step": 2748 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 4.7061490586706305e-05, |
| "loss": 0.821, |
| "step": 2749 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.699281543303222e-05, |
| "loss": 0.7185, |
| "step": 2750 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.6924175030654304e-05, |
| "loss": 0.707, |
| "step": 2751 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.6855569424572955e-05, |
| "loss": 0.6567, |
| "step": 2752 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.6786998659765766e-05, |
| "loss": 0.8059, |
| "step": 2753 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.67184627811874e-05, |
| "loss": 0.6674, |
| "step": 2754 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.6649961833769715e-05, |
| "loss": 0.7591, |
| "step": 2755 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.65814958624217e-05, |
| "loss": 0.66, |
| "step": 2756 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 4.6513064912029304e-05, |
| "loss": 0.7361, |
| "step": 2757 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.644466902745561e-05, |
| "loss": 0.8713, |
| "step": 2758 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.6376308253540726e-05, |
| "loss": 0.684, |
| "step": 2759 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.630798263510162e-05, |
| "loss": 0.7005, |
| "step": 2760 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.623969221693236e-05, |
| "loss": 0.6121, |
| "step": 2761 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.617143704380381e-05, |
| "loss": 0.6308, |
| "step": 2762 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.610321716046382e-05, |
| "loss": 0.7153, |
| "step": 2763 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.6035032611637094e-05, |
| "loss": 0.7845, |
| "step": 2764 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 4.596688344202509e-05, |
| "loss": 0.8432, |
| "step": 2765 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.5898769696306155e-05, |
| "loss": 0.6058, |
| "step": 2766 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.583069141913543e-05, |
| "loss": 0.7352, |
| "step": 2767 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.5762648655144666e-05, |
| "loss": 0.6885, |
| "step": 2768 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.5694641448942485e-05, |
| "loss": 0.7257, |
| "step": 2769 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.562666984511416e-05, |
| "loss": 0.836, |
| "step": 2770 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.555873388822153e-05, |
| "loss": 0.7716, |
| "step": 2771 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.549083362280317e-05, |
| "loss": 0.7716, |
| "step": 2772 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 4.542296909337426e-05, |
| "loss": 0.7863, |
| "step": 2773 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.535514034442644e-05, |
| "loss": 0.8777, |
| "step": 2774 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.528734742042803e-05, |
| "loss": 0.7374, |
| "step": 2775 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.5219590365823714e-05, |
| "loss": 0.6106, |
| "step": 2776 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.5151869225034806e-05, |
| "loss": 0.6947, |
| "step": 2777 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.508418404245903e-05, |
| "loss": 0.6859, |
| "step": 2778 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.501653486247046e-05, |
| "loss": 0.8493, |
| "step": 2779 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.494892172941965e-05, |
| "loss": 0.6054, |
| "step": 2780 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 4.488134468763353e-05, |
| "loss": 0.8118, |
| "step": 2781 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.481380378141527e-05, |
| "loss": 0.7298, |
| "step": 2782 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.474629905504446e-05, |
| "loss": 0.7301, |
| "step": 2783 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.467883055277695e-05, |
| "loss": 0.6953, |
| "step": 2784 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.461139831884474e-05, |
| "loss": 0.7508, |
| "step": 2785 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.454400239745619e-05, |
| "loss": 0.6568, |
| "step": 2786 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.44766428327958e-05, |
| "loss": 0.7498, |
| "step": 2787 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.440931966902418e-05, |
| "loss": 0.7896, |
| "step": 2788 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 4.43420329502782e-05, |
| "loss": 0.7187, |
| "step": 2789 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.427478272067066e-05, |
| "loss": 0.6244, |
| "step": 2790 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.4207569024290596e-05, |
| "loss": 0.7448, |
| "step": 2791 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.414039190520308e-05, |
| "loss": 0.8247, |
| "step": 2792 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.407325140744907e-05, |
| "loss": 0.7668, |
| "step": 2793 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.400614757504564e-05, |
| "loss": 0.6496, |
| "step": 2794 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.393908045198585e-05, |
| "loss": 0.8899, |
| "step": 2795 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.387205008223854e-05, |
| "loss": 0.7862, |
| "step": 2796 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 4.3805056509748586e-05, |
| "loss": 0.517, |
| "step": 2797 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.373809977843676e-05, |
| "loss": 0.6701, |
| "step": 2798 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.367117993219955e-05, |
| "loss": 0.7365, |
| "step": 2799 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.360429701490934e-05, |
| "loss": 0.7009, |
| "step": 2800 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.353745107041438e-05, |
| "loss": 0.6843, |
| "step": 2801 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.34706421425385e-05, |
| "loss": 0.6483, |
| "step": 2802 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.340387027508145e-05, |
| "loss": 0.9249, |
| "step": 2803 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.333713551181852e-05, |
| "loss": 0.7288, |
| "step": 2804 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 4.327043789650078e-05, |
| "loss": 0.6811, |
| "step": 2805 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.320377747285497e-05, |
| "loss": 0.7997, |
| "step": 2806 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.31371542845833e-05, |
| "loss": 0.6522, |
| "step": 2807 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.307056837536373e-05, |
| "loss": 0.7622, |
| "step": 2808 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.300401978884973e-05, |
| "loss": 0.7135, |
| "step": 2809 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.2937508568670194e-05, |
| "loss": 0.6894, |
| "step": 2810 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.2871034758429686e-05, |
| "loss": 0.6637, |
| "step": 2811 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.2804598401708175e-05, |
| "loss": 0.8291, |
| "step": 2812 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 4.2738199542061005e-05, |
| "loss": 0.7039, |
| "step": 2813 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.2671838223019036e-05, |
| "loss": 0.6073, |
| "step": 2814 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.2605514488088515e-05, |
| "loss": 0.7236, |
| "step": 2815 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.253922838075095e-05, |
| "loss": 0.7725, |
| "step": 2816 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.247297994446331e-05, |
| "loss": 0.7011, |
| "step": 2817 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.240676922265774e-05, |
| "loss": 0.7098, |
| "step": 2818 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.234059625874174e-05, |
| "loss": 0.7017, |
| "step": 2819 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.227446109609809e-05, |
| "loss": 0.6856, |
| "step": 2820 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 4.220836377808465e-05, |
| "loss": 0.8549, |
| "step": 2821 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.21423043480346e-05, |
| "loss": 0.7382, |
| "step": 2822 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.207628284925628e-05, |
| "loss": 0.7248, |
| "step": 2823 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.2010299325033034e-05, |
| "loss": 0.8919, |
| "step": 2824 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.1944353818623424e-05, |
| "loss": 0.7865, |
| "step": 2825 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.18784463732611e-05, |
| "loss": 0.7372, |
| "step": 2826 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.181257703215466e-05, |
| "loss": 0.761, |
| "step": 2827 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.17467458384878e-05, |
| "loss": 0.6281, |
| "step": 2828 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 4.1680952835419216e-05, |
| "loss": 0.759, |
| "step": 2829 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.161519806608247e-05, |
| "loss": 0.6285, |
| "step": 2830 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.15494815735862e-05, |
| "loss": 0.848, |
| "step": 2831 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.1483803401013796e-05, |
| "loss": 0.6014, |
| "step": 2832 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.141816359142364e-05, |
| "loss": 0.8041, |
| "step": 2833 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.1352562187848954e-05, |
| "loss": 0.821, |
| "step": 2834 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.12869992332977e-05, |
| "loss": 0.795, |
| "step": 2835 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.12214747707527e-05, |
| "loss": 0.7797, |
| "step": 2836 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 4.1155988843171565e-05, |
| "loss": 0.689, |
| "step": 2837 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.109054149348655e-05, |
| "loss": 0.6424, |
| "step": 2838 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.102513276460471e-05, |
| "loss": 0.6614, |
| "step": 2839 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.0959762699407766e-05, |
| "loss": 0.6848, |
| "step": 2840 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.0894431340751994e-05, |
| "loss": 0.6655, |
| "step": 2841 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.0829138731468416e-05, |
| "loss": 0.7573, |
| "step": 2842 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.0763884914362635e-05, |
| "loss": 0.7774, |
| "step": 2843 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.0698669932214727e-05, |
| "loss": 0.7641, |
| "step": 2844 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 4.0633493827779425e-05, |
| "loss": 0.776, |
| "step": 2845 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.0568356643785856e-05, |
| "loss": 0.835, |
| "step": 2846 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.050325842293773e-05, |
| "loss": 0.6695, |
| "step": 2847 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.043819920791322e-05, |
| "loss": 0.6048, |
| "step": 2848 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.037317904136477e-05, |
| "loss": 0.7548, |
| "step": 2849 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.030819796591949e-05, |
| "loss": 0.8898, |
| "step": 2850 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.02432560241786e-05, |
| "loss": 0.6424, |
| "step": 2851 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.0178353258717804e-05, |
| "loss": 0.6338, |
| "step": 2852 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 4.0113489712087125e-05, |
| "loss": 0.7473, |
| "step": 2853 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 4.0048665426810794e-05, |
| "loss": 0.6693, |
| "step": 2854 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.9983880445387366e-05, |
| "loss": 0.6598, |
| "step": 2855 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.991913481028965e-05, |
| "loss": 0.7745, |
| "step": 2856 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.9854428563964564e-05, |
| "loss": 0.7472, |
| "step": 2857 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.978976174883329e-05, |
| "loss": 0.8453, |
| "step": 2858 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.9725134407291154e-05, |
| "loss": 0.6937, |
| "step": 2859 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.966054658170754e-05, |
| "loss": 0.6364, |
| "step": 2860 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.959599831442596e-05, |
| "loss": 0.7933, |
| "step": 2861 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.953148964776408e-05, |
| "loss": 0.822, |
| "step": 2862 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.946702062401341e-05, |
| "loss": 0.7253, |
| "step": 2863 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.940259128543967e-05, |
| "loss": 0.6483, |
| "step": 2864 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.9338201674282406e-05, |
| "loss": 0.6983, |
| "step": 2865 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.9273851832755214e-05, |
| "loss": 0.7912, |
| "step": 2866 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.9209541803045635e-05, |
| "loss": 0.7821, |
| "step": 2867 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.9145271627314986e-05, |
| "loss": 0.8756, |
| "step": 2868 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.9081041347698574e-05, |
| "loss": 0.675, |
| "step": 2869 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.9016851006305545e-05, |
| "loss": 0.756, |
| "step": 2870 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.8952700645218766e-05, |
| "loss": 0.7474, |
| "step": 2871 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.8888590306494974e-05, |
| "loss": 0.6701, |
| "step": 2872 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.882452003216469e-05, |
| "loss": 0.7634, |
| "step": 2873 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.8760489864232066e-05, |
| "loss": 0.7713, |
| "step": 2874 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.869649984467504e-05, |
| "loss": 0.7232, |
| "step": 2875 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.8632550015445256e-05, |
| "loss": 0.7406, |
| "step": 2876 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.856864041846789e-05, |
| "loss": 0.6416, |
| "step": 2877 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.8504771095641904e-05, |
| "loss": 0.6791, |
| "step": 2878 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.844094208883966e-05, |
| "loss": 0.683, |
| "step": 2879 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.8377153439907266e-05, |
| "loss": 0.7268, |
| "step": 2880 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.831340519066433e-05, |
| "loss": 0.7651, |
| "step": 2881 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.824969738290386e-05, |
| "loss": 0.7041, |
| "step": 2882 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.81860300583925e-05, |
| "loss": 0.8141, |
| "step": 2883 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.81224032588703e-05, |
| "loss": 0.6316, |
| "step": 2884 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.8058817026050677e-05, |
| "loss": 0.7407, |
| "step": 2885 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.799527140162055e-05, |
| "loss": 0.6551, |
| "step": 2886 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.793176642724019e-05, |
| "loss": 0.6148, |
| "step": 2887 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.786830214454315e-05, |
| "loss": 0.7614, |
| "step": 2888 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.780487859513636e-05, |
| "loss": 0.6965, |
| "step": 2889 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.774149582060012e-05, |
| "loss": 0.6854, |
| "step": 2890 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.767815386248784e-05, |
| "loss": 0.6633, |
| "step": 2891 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.7614852762326305e-05, |
| "loss": 0.8191, |
| "step": 2892 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.7551592561615403e-05, |
| "loss": 0.7205, |
| "step": 2893 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.7488373301828296e-05, |
| "loss": 0.684, |
| "step": 2894 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.742519502441132e-05, |
| "loss": 0.7654, |
| "step": 2895 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.736205777078381e-05, |
| "loss": 0.7517, |
| "step": 2896 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.729896158233835e-05, |
| "loss": 0.7461, |
| "step": 2897 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.7235906500440574e-05, |
| "loss": 0.8077, |
| "step": 2898 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.717289256642906e-05, |
| "loss": 0.7304, |
| "step": 2899 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.710991982161555e-05, |
| "loss": 0.7112, |
| "step": 2900 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.704698830728472e-05, |
| "loss": 0.6429, |
| "step": 2901 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.698409806469417e-05, |
| "loss": 0.7241, |
| "step": 2902 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.6921249135074524e-05, |
| "loss": 0.6498, |
| "step": 2903 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.6858441559629306e-05, |
| "loss": 0.9153, |
| "step": 2904 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.679567537953485e-05, |
| "loss": 0.8049, |
| "step": 2905 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.673295063594049e-05, |
| "loss": 0.6003, |
| "step": 2906 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.667026736996823e-05, |
| "loss": 0.7328, |
| "step": 2907 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.6607625622713e-05, |
| "loss": 0.7377, |
| "step": 2908 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.654502543524253e-05, |
| "loss": 0.6705, |
| "step": 2909 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.648246684859716e-05, |
| "loss": 0.6651, |
| "step": 2910 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.6419949903790085e-05, |
| "loss": 0.6744, |
| "step": 2911 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.63574746418072e-05, |
| "loss": 0.7428, |
| "step": 2912 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.629504110360697e-05, |
| "loss": 0.7946, |
| "step": 2913 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.6232649330120605e-05, |
| "loss": 0.7558, |
| "step": 2914 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.617029936225193e-05, |
| "loss": 0.7047, |
| "step": 2915 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.610799124087725e-05, |
| "loss": 0.7149, |
| "step": 2916 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.604572500684558e-05, |
| "loss": 0.678, |
| "step": 2917 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.5983500700978425e-05, |
| "loss": 0.584, |
| "step": 2918 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.592131836406972e-05, |
| "loss": 0.7768, |
| "step": 2919 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.585917803688603e-05, |
| "loss": 0.6653, |
| "step": 2920 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.579707976016624e-05, |
| "loss": 0.8123, |
| "step": 2921 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.573502357462176e-05, |
| "loss": 0.7705, |
| "step": 2922 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.567300952093641e-05, |
| "loss": 0.6264, |
| "step": 2923 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.5611037639766265e-05, |
| "loss": 0.6712, |
| "step": 2924 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.55491079717399e-05, |
| "loss": 0.6838, |
| "step": 2925 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.5487220557458176e-05, |
| "loss": 0.6565, |
| "step": 2926 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.542537543749417e-05, |
| "loss": 0.8221, |
| "step": 2927 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.5363572652393326e-05, |
| "loss": 0.6631, |
| "step": 2928 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.530181224267333e-05, |
| "loss": 0.6951, |
| "step": 2929 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.5240094248824e-05, |
| "loss": 0.5913, |
| "step": 2930 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.5178418711307414e-05, |
| "loss": 0.5944, |
| "step": 2931 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.511678567055786e-05, |
| "loss": 0.7174, |
| "step": 2932 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.5055195166981645e-05, |
| "loss": 0.7476, |
| "step": 2933 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.4993647240957304e-05, |
| "loss": 0.7939, |
| "step": 2934 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.493214193283536e-05, |
| "loss": 0.816, |
| "step": 2935 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.487067928293848e-05, |
| "loss": 0.5908, |
| "step": 2936 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.480925933156134e-05, |
| "loss": 0.707, |
| "step": 2937 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.4747882118970565e-05, |
| "loss": 0.8311, |
| "step": 2938 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.468654768540483e-05, |
| "loss": 0.8066, |
| "step": 2939 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.4625256071074773e-05, |
| "loss": 0.6111, |
| "step": 2940 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.456400731616288e-05, |
| "loss": 0.7178, |
| "step": 2941 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.4502801460823607e-05, |
| "loss": 0.5499, |
| "step": 2942 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.444163854518331e-05, |
| "loss": 0.869, |
| "step": 2943 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.4380518609340076e-05, |
| "loss": 0.6707, |
| "step": 2944 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.4319441693363906e-05, |
| "loss": 0.8913, |
| "step": 2945 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.425840783729664e-05, |
| "loss": 0.6768, |
| "step": 2946 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.419741708115174e-05, |
| "loss": 0.6123, |
| "step": 2947 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.4136469464914575e-05, |
| "loss": 0.6966, |
| "step": 2948 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.407556502854209e-05, |
| "loss": 0.6916, |
| "step": 2949 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.4014703811963025e-05, |
| "loss": 0.8758, |
| "step": 2950 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.3953885855077774e-05, |
| "loss": 0.7017, |
| "step": 2951 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.389311119775828e-05, |
| "loss": 0.7668, |
| "step": 2952 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.38323798798482e-05, |
| "loss": 0.6312, |
| "step": 2953 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.377169194116275e-05, |
| "loss": 0.6305, |
| "step": 2954 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.3711047421488675e-05, |
| "loss": 0.6499, |
| "step": 2955 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.3650446360584275e-05, |
| "loss": 0.6119, |
| "step": 2956 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.3589888798179416e-05, |
| "loss": 0.713, |
| "step": 2957 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.35293747739753e-05, |
| "loss": 0.7515, |
| "step": 2958 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.3468904327644736e-05, |
| "loss": 0.6239, |
| "step": 2959 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.340847749883191e-05, |
| "loss": 0.8624, |
| "step": 2960 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.334809432715238e-05, |
| "loss": 0.6415, |
| "step": 2961 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.3287754852193144e-05, |
| "loss": 0.8464, |
| "step": 2962 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.322745911351245e-05, |
| "loss": 0.633, |
| "step": 2963 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.316720715064e-05, |
| "loss": 0.7774, |
| "step": 2964 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.3106999003076746e-05, |
| "loss": 0.678, |
| "step": 2965 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.304683471029485e-05, |
| "loss": 0.6667, |
| "step": 2966 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.2986714311737824e-05, |
| "loss": 0.6331, |
| "step": 2967 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.292663784682036e-05, |
| "loss": 0.7183, |
| "step": 2968 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.2866605354928315e-05, |
| "loss": 0.6625, |
| "step": 2969 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.280661687541876e-05, |
| "loss": 0.7132, |
| "step": 2970 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.274667244761991e-05, |
| "loss": 0.6766, |
| "step": 2971 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.268677211083109e-05, |
| "loss": 0.6175, |
| "step": 2972 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.2626915904322744e-05, |
| "loss": 0.74, |
| "step": 2973 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.256710386733629e-05, |
| "loss": 0.614, |
| "step": 2974 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.2507336039084314e-05, |
| "loss": 0.721, |
| "step": 2975 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.2447612458750365e-05, |
| "loss": 0.6411, |
| "step": 2976 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.238793316548894e-05, |
| "loss": 0.6661, |
| "step": 2977 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.232829819842555e-05, |
| "loss": 0.7069, |
| "step": 2978 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.2268707596656714e-05, |
| "loss": 0.7285, |
| "step": 2979 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.2209161399249674e-05, |
| "loss": 0.7317, |
| "step": 2980 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.214965964524278e-05, |
| "loss": 0.7298, |
| "step": 2981 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.209020237364505e-05, |
| "loss": 0.8565, |
| "step": 2982 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.2030789623436485e-05, |
| "loss": 0.7006, |
| "step": 2983 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.197142143356787e-05, |
| "loss": 0.785, |
| "step": 2984 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.191209784296068e-05, |
| "loss": 0.6926, |
| "step": 2985 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.185281889050725e-05, |
| "loss": 0.7075, |
| "step": 2986 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.179358461507068e-05, |
| "loss": 0.6426, |
| "step": 2987 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.173439505548462e-05, |
| "loss": 0.7944, |
| "step": 2988 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.167525025055357e-05, |
| "loss": 0.6844, |
| "step": 2989 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.161615023905265e-05, |
| "loss": 0.7558, |
| "step": 2990 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.15570950597275e-05, |
| "loss": 0.6388, |
| "step": 2991 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.149808475129452e-05, |
| "loss": 0.659, |
| "step": 2992 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.143911935244065e-05, |
| "loss": 0.686, |
| "step": 2993 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.138019890182331e-05, |
| "loss": 0.6085, |
| "step": 2994 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.132132343807056e-05, |
| "loss": 0.674, |
| "step": 2995 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.126249299978086e-05, |
| "loss": 0.6892, |
| "step": 2996 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.120370762552325e-05, |
| "loss": 0.7351, |
| "step": 2997 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.1144967353837196e-05, |
| "loss": 0.7213, |
| "step": 2998 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.108627222323253e-05, |
| "loss": 0.6966, |
| "step": 2999 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.102762227218957e-05, |
| "loss": 0.6944, |
| "step": 3000 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.096901753915903e-05, |
| "loss": 0.7204, |
| "step": 3001 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.091045806256187e-05, |
| "loss": 0.6358, |
| "step": 3002 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.085194388078946e-05, |
| "loss": 0.6018, |
| "step": 3003 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.079347503220351e-05, |
| "loss": 0.621, |
| "step": 3004 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.073505155513591e-05, |
| "loss": 0.5979, |
| "step": 3005 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.067667348788885e-05, |
| "loss": 0.7458, |
| "step": 3006 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.061834086873482e-05, |
| "loss": 0.7776, |
| "step": 3007 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.056005373591637e-05, |
| "loss": 0.7395, |
| "step": 3008 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.050181212764639e-05, |
| "loss": 0.6737, |
| "step": 3009 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.044361608210775e-05, |
| "loss": 0.6894, |
| "step": 3010 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.0385465637453604e-05, |
| "loss": 1.0309, |
| "step": 3011 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.032736083180716e-05, |
| "loss": 0.6945, |
| "step": 3012 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.026930170326164e-05, |
| "loss": 0.7493, |
| "step": 3013 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 3.02112882898804e-05, |
| "loss": 0.8388, |
| "step": 3014 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 3.0153320629696846e-05, |
| "loss": 0.7042, |
| "step": 3015 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 3.0095398760714267e-05, |
| "loss": 0.7494, |
| "step": 3016 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 3.003752272090603e-05, |
| "loss": 0.6994, |
| "step": 3017 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 2.9979692548215477e-05, |
| "loss": 0.6553, |
| "step": 3018 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 2.9921908280555778e-05, |
| "loss": 0.7313, |
| "step": 3019 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 2.9864169955810084e-05, |
| "loss": 0.7419, |
| "step": 3020 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 2.980647761183144e-05, |
| "loss": 0.7731, |
| "step": 3021 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.9748831286442657e-05, |
| "loss": 0.7237, |
| "step": 3022 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.9691231017436494e-05, |
| "loss": 0.6521, |
| "step": 3023 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.9633676842575387e-05, |
| "loss": 0.7137, |
| "step": 3024 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.9576168799591664e-05, |
| "loss": 0.6998, |
| "step": 3025 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.951870692618739e-05, |
| "loss": 0.8611, |
| "step": 3026 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.9461291260034264e-05, |
| "loss": 0.7364, |
| "step": 3027 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.940392183877382e-05, |
| "loss": 0.652, |
| "step": 3028 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.9346598700017226e-05, |
| "loss": 0.7365, |
| "step": 3029 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.9289321881345254e-05, |
| "loss": 0.6576, |
| "step": 3030 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.9232091420308383e-05, |
| "loss": 0.5657, |
| "step": 3031 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.9174907354426696e-05, |
| "loss": 0.7131, |
| "step": 3032 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.911776972118978e-05, |
| "loss": 0.7075, |
| "step": 3033 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.9060678558056874e-05, |
| "loss": 0.6957, |
| "step": 3034 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.900363390245674e-05, |
| "loss": 0.7524, |
| "step": 3035 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.8946635791787545e-05, |
| "loss": 0.7197, |
| "step": 3036 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.888968426341707e-05, |
| "loss": 0.7454, |
| "step": 3037 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8832779354682536e-05, |
| "loss": 0.7464, |
| "step": 3038 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8775921102890513e-05, |
| "loss": 0.8166, |
| "step": 3039 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8719109545317103e-05, |
| "loss": 0.6796, |
| "step": 3040 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8662344719207658e-05, |
| "loss": 0.7903, |
| "step": 3041 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8605626661776996e-05, |
| "loss": 0.6623, |
| "step": 3042 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.854895541020931e-05, |
| "loss": 0.6707, |
| "step": 3043 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8492331001657945e-05, |
| "loss": 0.7224, |
| "step": 3044 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8435753473245698e-05, |
| "loss": 0.639, |
| "step": 3045 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.8379222862064568e-05, |
| "loss": 0.7693, |
| "step": 3046 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.8322739205175763e-05, |
| "loss": 0.7514, |
| "step": 3047 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.8266302539609745e-05, |
| "loss": 0.7614, |
| "step": 3048 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.820991290236622e-05, |
| "loss": 0.6995, |
| "step": 3049 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.8153570330413925e-05, |
| "loss": 0.6454, |
| "step": 3050 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.8097274860690857e-05, |
| "loss": 0.7605, |
| "step": 3051 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.804102653010414e-05, |
| "loss": 0.7318, |
| "step": 3052 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.7984825375529877e-05, |
| "loss": 0.6778, |
| "step": 3053 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.7928671433813392e-05, |
| "loss": 0.5937, |
| "step": 3054 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.7872564741768913e-05, |
| "loss": 0.7342, |
| "step": 3055 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.7816505336179798e-05, |
| "loss": 0.6962, |
| "step": 3056 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.7760493253798393e-05, |
| "loss": 0.5959, |
| "step": 3057 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.770452853134593e-05, |
| "loss": 0.8126, |
| "step": 3058 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.7648611205512685e-05, |
| "loss": 0.7277, |
| "step": 3059 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.759274131295787e-05, |
| "loss": 0.6525, |
| "step": 3060 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.7536918890309493e-05, |
| "loss": 0.6761, |
| "step": 3061 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.7481143974164547e-05, |
| "loss": 0.7845, |
| "step": 3062 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.742541660108886e-05, |
| "loss": 0.7773, |
| "step": 3063 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.736973680761702e-05, |
| "loss": 0.6165, |
| "step": 3064 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.73141046302525e-05, |
| "loss": 0.6223, |
| "step": 3065 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.7258520105467567e-05, |
| "loss": 0.7605, |
| "step": 3066 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.720298326970315e-05, |
| "loss": 0.5629, |
| "step": 3067 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.7147494159369036e-05, |
| "loss": 0.7498, |
| "step": 3068 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.7092052810843593e-05, |
| "loss": 0.6361, |
| "step": 3069 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.7036659260473974e-05, |
| "loss": 0.7289, |
| "step": 3070 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.6981313544576016e-05, |
| "loss": 0.8828, |
| "step": 3071 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.6926015699434072e-05, |
| "loss": 0.6026, |
| "step": 3072 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.687076576130122e-05, |
| "loss": 0.7732, |
| "step": 3073 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.681556376639912e-05, |
| "loss": 0.6778, |
| "step": 3074 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.6760409750917927e-05, |
| "loss": 0.6975, |
| "step": 3075 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.6705303751016408e-05, |
| "loss": 0.6264, |
| "step": 3076 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.6650245802821884e-05, |
| "loss": 0.6789, |
| "step": 3077 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.659523594243004e-05, |
| "loss": 0.702, |
| "step": 3078 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.6540274205905168e-05, |
| "loss": 0.9042, |
| "step": 3079 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.6485360629279987e-05, |
| "loss": 0.6892, |
| "step": 3080 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.6430495248555544e-05, |
| "loss": 0.6651, |
| "step": 3081 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.6375678099701428e-05, |
| "loss": 0.7098, |
| "step": 3082 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.6320909218655486e-05, |
| "loss": 0.7337, |
| "step": 3083 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.6266188641323996e-05, |
| "loss": 0.5448, |
| "step": 3084 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.6211516403581582e-05, |
| "loss": 0.7427, |
| "step": 3085 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.6156892541271084e-05, |
| "loss": 0.7691, |
| "step": 3086 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.610231709020372e-05, |
| "loss": 0.8486, |
| "step": 3087 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.6047790086158952e-05, |
| "loss": 0.686, |
| "step": 3088 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.5993311564884417e-05, |
| "loss": 0.7776, |
| "step": 3089 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.593888156209603e-05, |
| "loss": 0.6824, |
| "step": 3090 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.5884500113477894e-05, |
| "loss": 0.637, |
| "step": 3091 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.5830167254682257e-05, |
| "loss": 0.6734, |
| "step": 3092 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.5775883021329562e-05, |
| "loss": 0.687, |
| "step": 3093 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.572164744900827e-05, |
| "loss": 0.6944, |
| "step": 3094 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.5667460573275028e-05, |
| "loss": 0.782, |
| "step": 3095 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.5613322429654574e-05, |
| "loss": 0.6375, |
| "step": 3096 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.5559233053639575e-05, |
| "loss": 0.7817, |
| "step": 3097 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.5505192480690865e-05, |
| "loss": 0.7291, |
| "step": 3098 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.5451200746237237e-05, |
| "loss": 0.5891, |
| "step": 3099 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.5397257885675397e-05, |
| "loss": 0.7397, |
| "step": 3100 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.5343363934370122e-05, |
| "loss": 0.6579, |
| "step": 3101 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.5289518927654023e-05, |
| "loss": 0.8571, |
| "step": 3102 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.5235722900827684e-05, |
| "loss": 0.5451, |
| "step": 3103 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.5181975889159615e-05, |
| "loss": 0.9301, |
| "step": 3104 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.5128277927886055e-05, |
| "loss": 0.6174, |
| "step": 3105 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.5074629052211217e-05, |
| "loss": 0.6359, |
| "step": 3106 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.5021029297307107e-05, |
| "loss": 0.7649, |
| "step": 3107 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.496747869831345e-05, |
| "loss": 0.7556, |
| "step": 3108 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.4913977290337842e-05, |
| "loss": 0.8617, |
| "step": 3109 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.48605251084556e-05, |
| "loss": 0.7836, |
| "step": 3110 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.480712218770972e-05, |
| "loss": 0.6031, |
| "step": 3111 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.475376856311097e-05, |
| "loss": 0.7999, |
| "step": 3112 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.470046426963778e-05, |
| "loss": 0.8265, |
| "step": 3113 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.464720934223619e-05, |
| "loss": 0.7921, |
| "step": 3114 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.4594003815819966e-05, |
| "loss": 0.7095, |
| "step": 3115 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.4540847725270378e-05, |
| "loss": 0.7534, |
| "step": 3116 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.448774110543638e-05, |
| "loss": 0.7081, |
| "step": 3117 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.4434683991134477e-05, |
| "loss": 0.7413, |
| "step": 3118 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.4381676417148648e-05, |
| "loss": 0.6881, |
| "step": 3119 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.432871841823047e-05, |
| "loss": 0.655, |
| "step": 3120 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.4275810029099024e-05, |
| "loss": 0.7231, |
| "step": 3121 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.4222951284440776e-05, |
| "loss": 0.7015, |
| "step": 3122 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.4170142218909742e-05, |
| "loss": 0.6438, |
| "step": 3123 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.411738286712735e-05, |
| "loss": 0.6888, |
| "step": 3124 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.406467326368237e-05, |
| "loss": 0.7553, |
| "step": 3125 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.401201344313102e-05, |
| "loss": 0.5768, |
| "step": 3126 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.3959403439996907e-05, |
| "loss": 0.7068, |
| "step": 3127 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.3906843288770886e-05, |
| "loss": 0.6629, |
| "step": 3128 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.3854333023911225e-05, |
| "loss": 0.7573, |
| "step": 3129 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.3801872679843385e-05, |
| "loss": 0.7663, |
| "step": 3130 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.374946229096019e-05, |
| "loss": 0.6664, |
| "step": 3131 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.3697101891621697e-05, |
| "loss": 0.729, |
| "step": 3132 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.364479151615513e-05, |
| "loss": 0.6841, |
| "step": 3133 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.3592531198854974e-05, |
| "loss": 0.7572, |
| "step": 3134 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.3540320973982922e-05, |
| "loss": 0.721, |
| "step": 3135 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.3488160875767717e-05, |
| "loss": 0.7077, |
| "step": 3136 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.3436050938405363e-05, |
| "loss": 0.6842, |
| "step": 3137 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.338399119605892e-05, |
| "loss": 0.7126, |
| "step": 3138 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.3331981682858505e-05, |
| "loss": 0.7343, |
| "step": 3139 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.3280022432901383e-05, |
| "loss": 0.702, |
| "step": 3140 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.3228113480251846e-05, |
| "loss": 0.776, |
| "step": 3141 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.3176254858941127e-05, |
| "loss": 0.7751, |
| "step": 3142 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.3124446602967597e-05, |
| "loss": 0.6545, |
| "step": 3143 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.307268874629649e-05, |
| "loss": 0.7138, |
| "step": 3144 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.302098132286006e-05, |
| "loss": 0.706, |
| "step": 3145 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.2969324366557522e-05, |
| "loss": 0.6488, |
| "step": 3146 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.291771791125491e-05, |
| "loss": 0.7156, |
| "step": 3147 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.2866161990785228e-05, |
| "loss": 0.724, |
| "step": 3148 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.2814656638948384e-05, |
| "loss": 0.6404, |
| "step": 3149 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.2763201889510987e-05, |
| "loss": 0.7384, |
| "step": 3150 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.2711797776206624e-05, |
| "loss": 0.7625, |
| "step": 3151 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.266044433273562e-05, |
| "loss": 0.7898, |
| "step": 3152 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.260914159276506e-05, |
| "loss": 0.7434, |
| "step": 3153 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.2557889589928815e-05, |
| "loss": 0.6486, |
| "step": 3154 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.2506688357827544e-05, |
| "loss": 0.6276, |
| "step": 3155 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.245553793002849e-05, |
| "loss": 0.7543, |
| "step": 3156 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.2404438340065736e-05, |
| "loss": 0.7806, |
| "step": 3157 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.2353389621439903e-05, |
| "loss": 0.6806, |
| "step": 3158 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.230239180761836e-05, |
| "loss": 0.6416, |
| "step": 3159 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.2251444932035094e-05, |
| "loss": 0.6174, |
| "step": 3160 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.22005490280906e-05, |
| "loss": 0.6585, |
| "step": 3161 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.2149704129152084e-05, |
| "loss": 0.7731, |
| "step": 3162 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.2098910268553263e-05, |
| "loss": 0.7195, |
| "step": 3163 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.204816747959434e-05, |
| "loss": 0.691, |
| "step": 3164 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.199747579554211e-05, |
| "loss": 0.6855, |
| "step": 3165 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.194683524962986e-05, |
| "loss": 0.6972, |
| "step": 3166 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.1896245875057265e-05, |
| "loss": 0.6637, |
| "step": 3167 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.184570770499056e-05, |
| "loss": 0.7567, |
| "step": 3168 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.1795220772562386e-05, |
| "loss": 0.7841, |
| "step": 3169 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.174478511087171e-05, |
| "loss": 0.6571, |
| "step": 3170 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.1694400752984013e-05, |
| "loss": 0.7274, |
| "step": 3171 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.1644067731931007e-05, |
| "loss": 0.6374, |
| "step": 3172 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.1593786080710864e-05, |
| "loss": 0.6825, |
| "step": 3173 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.1543555832288054e-05, |
| "loss": 0.6977, |
| "step": 3174 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.149337701959325e-05, |
| "loss": 0.6492, |
| "step": 3175 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.1443249675523536e-05, |
| "loss": 0.6597, |
| "step": 3176 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.1393173832942205e-05, |
| "loss": 0.6028, |
| "step": 3177 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.134314952467873e-05, |
| "loss": 0.7685, |
| "step": 3178 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.1293176783528867e-05, |
| "loss": 0.7196, |
| "step": 3179 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.1243255642254578e-05, |
| "loss": 0.6479, |
| "step": 3180 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.1193386133583904e-05, |
| "loss": 0.7614, |
| "step": 3181 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.1143568290211114e-05, |
| "loss": 0.6816, |
| "step": 3182 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.1093802144796627e-05, |
| "loss": 0.7594, |
| "step": 3183 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.1044087729966856e-05, |
| "loss": 0.6665, |
| "step": 3184 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.099442507831444e-05, |
| "loss": 0.6588, |
| "step": 3185 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.0944814222397944e-05, |
| "loss": 0.7978, |
| "step": 3186 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.089525519474209e-05, |
| "loss": 0.7243, |
| "step": 3187 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.0845748027837586e-05, |
| "loss": 0.7428, |
| "step": 3188 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.0796292754141088e-05, |
| "loss": 0.7041, |
| "step": 3189 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.074688940607529e-05, |
| "loss": 0.6377, |
| "step": 3190 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.0697538016028862e-05, |
| "loss": 0.6814, |
| "step": 3191 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.0648238616356332e-05, |
| "loss": 0.7488, |
| "step": 3192 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.0598991239378185e-05, |
| "loss": 0.7016, |
| "step": 3193 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.0549795917380864e-05, |
| "loss": 0.7111, |
| "step": 3194 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.050065268261655e-05, |
| "loss": 0.8881, |
| "step": 3195 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.045156156730338e-05, |
| "loss": 0.7649, |
| "step": 3196 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.0402522603625308e-05, |
| "loss": 0.7176, |
| "step": 3197 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.035353582373205e-05, |
| "loss": 0.7165, |
| "step": 3198 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.0304601259739175e-05, |
| "loss": 0.6536, |
| "step": 3199 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.025571894372794e-05, |
| "loss": 0.7246, |
| "step": 3200 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.0206888907745415e-05, |
| "loss": 0.7693, |
| "step": 3201 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.0158111183804407e-05, |
| "loss": 0.7736, |
| "step": 3202 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.010938580388334e-05, |
| "loss": 0.7311, |
| "step": 3203 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.0060712799926408e-05, |
| "loss": 0.662, |
| "step": 3204 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.001209220384346e-05, |
| "loss": 0.6213, |
| "step": 3205 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 1.9963524047509897e-05, |
| "loss": 0.7469, |
| "step": 3206 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.991500836276685e-05, |
| "loss": 0.5494, |
| "step": 3207 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.9866545181421013e-05, |
| "loss": 0.6462, |
| "step": 3208 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.9818134535244624e-05, |
| "loss": 0.704, |
| "step": 3209 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.976977645597552e-05, |
| "loss": 0.729, |
| "step": 3210 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.9721470975317092e-05, |
| "loss": 0.6594, |
| "step": 3211 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.967321812493813e-05, |
| "loss": 0.701, |
| "step": 3212 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.9625017936473124e-05, |
| "loss": 0.6732, |
| "step": 3213 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 1.9576870441521833e-05, |
| "loss": 0.6238, |
| "step": 3214 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9528775671649592e-05, |
| "loss": 0.7944, |
| "step": 3215 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9480733658387175e-05, |
| "loss": 0.6794, |
| "step": 3216 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9432744433230666e-05, |
| "loss": 0.7424, |
| "step": 3217 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9384808027641666e-05, |
| "loss": 0.6073, |
| "step": 3218 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9336924473047037e-05, |
| "loss": 0.6668, |
| "step": 3219 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9289093800839066e-05, |
| "loss": 0.6922, |
| "step": 3220 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9241316042375366e-05, |
| "loss": 0.7422, |
| "step": 3221 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 1.9193591228978814e-05, |
| "loss": 0.68, |
| "step": 3222 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.9145919391937616e-05, |
| "loss": 0.6408, |
| "step": 3223 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.9098300562505266e-05, |
| "loss": 0.7263, |
| "step": 3224 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.9050734771900413e-05, |
| "loss": 0.6976, |
| "step": 3225 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.9003222051307047e-05, |
| "loss": 0.576, |
| "step": 3226 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.8955762431874312e-05, |
| "loss": 0.758, |
| "step": 3227 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.8908355944716517e-05, |
| "loss": 0.6367, |
| "step": 3228 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.8861002620913172e-05, |
| "loss": 0.6995, |
| "step": 3229 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 1.8813702491508955e-05, |
| "loss": 0.7699, |
| "step": 3230 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.87664555875136e-05, |
| "loss": 0.7118, |
| "step": 3231 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.871926193990202e-05, |
| "loss": 0.6419, |
| "step": 3232 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.8672121579614144e-05, |
| "loss": 0.6712, |
| "step": 3233 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.8625034537555018e-05, |
| "loss": 0.6847, |
| "step": 3234 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.8578000844594747e-05, |
| "loss": 0.5491, |
| "step": 3235 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.8531020531568378e-05, |
| "loss": 0.6546, |
| "step": 3236 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.8484093629276032e-05, |
| "loss": 0.5877, |
| "step": 3237 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 1.8437220168482837e-05, |
| "loss": 0.7664, |
| "step": 3238 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.8390400179918786e-05, |
| "loss": 0.6371, |
| "step": 3239 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.8343633694278895e-05, |
| "loss": 0.6142, |
| "step": 3240 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.8296920742223122e-05, |
| "loss": 0.6295, |
| "step": 3241 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.825026135437622e-05, |
| "loss": 0.6407, |
| "step": 3242 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.8203655561327938e-05, |
| "loss": 0.6669, |
| "step": 3243 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.8157103393632868e-05, |
| "loss": 0.7123, |
| "step": 3244 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.8110604881810355e-05, |
| "loss": 0.5909, |
| "step": 3245 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 1.8064160056344716e-05, |
| "loss": 0.6804, |
| "step": 3246 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.801776894768492e-05, |
| "loss": 0.7808, |
| "step": 3247 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.7971431586244815e-05, |
| "loss": 0.6845, |
| "step": 3248 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.7925148002403026e-05, |
| "loss": 0.6425, |
| "step": 3249 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.7878918226502816e-05, |
| "loss": 0.7069, |
| "step": 3250 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.783274228885229e-05, |
| "loss": 0.6969, |
| "step": 3251 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.7786620219724204e-05, |
| "loss": 0.6531, |
| "step": 3252 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.7740552049355962e-05, |
| "loss": 0.6585, |
| "step": 3253 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 1.7694537807949708e-05, |
| "loss": 0.7001, |
| "step": 3254 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.7648577525672194e-05, |
| "loss": 0.7631, |
| "step": 3255 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.7602671232654754e-05, |
| "loss": 0.6829, |
| "step": 3256 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.7556818958993403e-05, |
| "loss": 0.6099, |
| "step": 3257 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.751102073474873e-05, |
| "loss": 0.709, |
| "step": 3258 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.746527658994581e-05, |
| "loss": 0.8643, |
| "step": 3259 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.741958655457436e-05, |
| "loss": 0.7701, |
| "step": 3260 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.7373950658588557e-05, |
| "loss": 0.6955, |
| "step": 3261 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 1.7328368931907113e-05, |
| "loss": 0.6581, |
| "step": 3262 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.7282841404413264e-05, |
| "loss": 0.7879, |
| "step": 3263 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.723736810595461e-05, |
| "loss": 0.6529, |
| "step": 3264 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.7191949066343305e-05, |
| "loss": 0.7637, |
| "step": 3265 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.7146584315355885e-05, |
| "loss": 0.6176, |
| "step": 3266 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.710127388273327e-05, |
| "loss": 0.6326, |
| "step": 3267 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.7056017798180824e-05, |
| "loss": 0.6485, |
| "step": 3268 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.7010816091368265e-05, |
| "loss": 0.8045, |
| "step": 3269 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 1.69656687919296e-05, |
| "loss": 0.6509, |
| "step": 3270 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.6920575929463256e-05, |
| "loss": 0.7548, |
| "step": 3271 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.6875537533531948e-05, |
| "loss": 0.7417, |
| "step": 3272 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.6830553633662606e-05, |
| "loss": 0.6007, |
| "step": 3273 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.6785624259346557e-05, |
| "loss": 0.6032, |
| "step": 3274 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.6740749440039262e-05, |
| "loss": 0.7323, |
| "step": 3275 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.6695929205160487e-05, |
| "loss": 0.6936, |
| "step": 3276 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.665116358409424e-05, |
| "loss": 0.7821, |
| "step": 3277 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 1.660645260618864e-05, |
| "loss": 0.6713, |
| "step": 3278 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.6561796300756006e-05, |
| "loss": 0.7688, |
| "step": 3279 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.65171946970729e-05, |
| "loss": 0.7459, |
| "step": 3280 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.6472647824379882e-05, |
| "loss": 0.6855, |
| "step": 3281 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.6428155711881722e-05, |
| "loss": 0.6401, |
| "step": 3282 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.6383718388747316e-05, |
| "loss": 0.6522, |
| "step": 3283 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.6339335884109518e-05, |
| "loss": 0.5665, |
| "step": 3284 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.6295008227065367e-05, |
| "loss": 0.7228, |
| "step": 3285 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 1.6250735446675912e-05, |
| "loss": 0.7127, |
| "step": 3286 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.620651757196615e-05, |
| "loss": 0.6596, |
| "step": 3287 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.6162354631925204e-05, |
| "loss": 0.7576, |
| "step": 3288 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.6118246655506065e-05, |
| "loss": 0.7588, |
| "step": 3289 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.607419367162577e-05, |
| "loss": 0.7071, |
| "step": 3290 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.6030195709165295e-05, |
| "loss": 0.7232, |
| "step": 3291 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.598625279696948e-05, |
| "loss": 0.6292, |
| "step": 3292 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.5942364963847156e-05, |
| "loss": 0.7072, |
| "step": 3293 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 1.589853223857103e-05, |
| "loss": 0.5998, |
| "step": 3294 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.5854754649877603e-05, |
| "loss": 0.6431, |
| "step": 3295 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.5811032226467305e-05, |
| "loss": 0.6734, |
| "step": 3296 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.576736499700442e-05, |
| "loss": 0.646, |
| "step": 3297 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.5723752990116947e-05, |
| "loss": 0.7127, |
| "step": 3298 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.5680196234396783e-05, |
| "loss": 0.7218, |
| "step": 3299 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.563669475839956e-05, |
| "loss": 0.6639, |
| "step": 3300 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.559324859064464e-05, |
| "loss": 0.7533, |
| "step": 3301 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 1.5549857759615194e-05, |
| "loss": 0.7024, |
| "step": 3302 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.550652229375804e-05, |
| "loss": 0.7989, |
| "step": 3303 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.5463242221483743e-05, |
| "loss": 0.7309, |
| "step": 3304 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.542001757116658e-05, |
| "loss": 0.7007, |
| "step": 3305 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.5376848371144402e-05, |
| "loss": 0.738, |
| "step": 3306 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.533373464971879e-05, |
| "loss": 0.7312, |
| "step": 3307 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.529067643515495e-05, |
| "loss": 0.6555, |
| "step": 3308 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.5247673755681624e-05, |
| "loss": 0.8253, |
| "step": 3309 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 1.5204726639491218e-05, |
| "loss": 0.6797, |
| "step": 3310 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.5161835114739708e-05, |
| "loss": 0.694, |
| "step": 3311 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.5118999209546559e-05, |
| "loss": 0.6936, |
| "step": 3312 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.5076218951994858e-05, |
| "loss": 0.6164, |
| "step": 3313 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.5033494370131163e-05, |
| "loss": 0.6596, |
| "step": 3314 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.4990825491965522e-05, |
| "loss": 0.7225, |
| "step": 3315 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.4948212345471491e-05, |
| "loss": 0.7005, |
| "step": 3316 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.4905654958586068e-05, |
| "loss": 0.714, |
| "step": 3317 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 1.4863153359209692e-05, |
| "loss": 0.7454, |
| "step": 3318 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.4820707575206294e-05, |
| "loss": 0.6974, |
| "step": 3319 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.4778317634403083e-05, |
| "loss": 0.8554, |
| "step": 3320 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.4735983564590783e-05, |
| "loss": 0.7199, |
| "step": 3321 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.4693705393523449e-05, |
| "loss": 0.6265, |
| "step": 3322 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.4651483148918432e-05, |
| "loss": 0.7087, |
| "step": 3323 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.460931685845649e-05, |
| "loss": 0.699, |
| "step": 3324 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.4567206549781698e-05, |
| "loss": 0.705, |
| "step": 3325 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 1.4525152250501361e-05, |
| "loss": 0.731, |
| "step": 3326 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.4483153988186116e-05, |
| "loss": 0.7884, |
| "step": 3327 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.444121179036989e-05, |
| "loss": 0.6553, |
| "step": 3328 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.4399325684549781e-05, |
| "loss": 0.7723, |
| "step": 3329 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.4357495698186186e-05, |
| "loss": 0.6328, |
| "step": 3330 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.4315721858702613e-05, |
| "loss": 0.5567, |
| "step": 3331 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.427400419348588e-05, |
| "loss": 0.6091, |
| "step": 3332 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.4232342729885916e-05, |
| "loss": 0.6114, |
| "step": 3333 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 1.4190737495215745e-05, |
| "loss": 0.7248, |
| "step": 3334 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.41491885167517e-05, |
| "loss": 0.5502, |
| "step": 3335 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.4107695821733025e-05, |
| "loss": 0.6711, |
| "step": 3336 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.4066259437362205e-05, |
| "loss": 0.681, |
| "step": 3337 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.402487939080479e-05, |
| "loss": 0.8116, |
| "step": 3338 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.3983555709189333e-05, |
| "loss": 0.7097, |
| "step": 3339 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.3942288419607475e-05, |
| "loss": 0.7368, |
| "step": 3340 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.3901077549113906e-05, |
| "loss": 0.5746, |
| "step": 3341 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 1.3859923124726281e-05, |
| "loss": 0.7134, |
| "step": 3342 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.3818825173425287e-05, |
| "loss": 0.6099, |
| "step": 3343 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.3777783722154603e-05, |
| "loss": 0.7361, |
| "step": 3344 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.3736798797820782e-05, |
| "loss": 0.7597, |
| "step": 3345 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.369587042729341e-05, |
| "loss": 0.6406, |
| "step": 3346 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.3654998637404981e-05, |
| "loss": 0.7219, |
| "step": 3347 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.3614183454950824e-05, |
| "loss": 0.7869, |
| "step": 3348 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.3573424906689258e-05, |
| "loss": 0.7048, |
| "step": 3349 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 1.3532723019341375e-05, |
| "loss": 0.8082, |
| "step": 3350 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3492077819591187e-05, |
| "loss": 0.6617, |
| "step": 3351 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3451489334085554e-05, |
| "loss": 0.5602, |
| "step": 3352 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3410957589434082e-05, |
| "loss": 0.7496, |
| "step": 3353 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3370482612209223e-05, |
| "loss": 0.791, |
| "step": 3354 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3330064428946254e-05, |
| "loss": 0.8307, |
| "step": 3355 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3289703066143111e-05, |
| "loss": 0.7203, |
| "step": 3356 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3249398550260561e-05, |
| "loss": 0.734, |
| "step": 3357 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 1.3209150907722123e-05, |
| "loss": 0.7205, |
| "step": 3358 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.3168960164913924e-05, |
| "loss": 0.6422, |
| "step": 3359 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.3128826348184887e-05, |
| "loss": 0.595, |
| "step": 3360 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.3088749483846596e-05, |
| "loss": 0.5669, |
| "step": 3361 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.3048729598173248e-05, |
| "loss": 0.7438, |
| "step": 3362 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.3008766717401743e-05, |
| "loss": 0.72, |
| "step": 3363 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.2968860867731569e-05, |
| "loss": 0.8365, |
| "step": 3364 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.2929012075324831e-05, |
| "loss": 0.7655, |
| "step": 3365 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 1.2889220366306276e-05, |
| "loss": 0.7364, |
| "step": 3366 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2849485766763147e-05, |
| "loss": 0.596, |
| "step": 3367 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2809808302745297e-05, |
| "loss": 0.6937, |
| "step": 3368 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2770188000265137e-05, |
| "loss": 0.695, |
| "step": 3369 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2730624885297537e-05, |
| "loss": 0.6485, |
| "step": 3370 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2691118983779937e-05, |
| "loss": 0.6978, |
| "step": 3371 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2651670321612263e-05, |
| "loss": 0.7086, |
| "step": 3372 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2612278924656862e-05, |
| "loss": 0.7564, |
| "step": 3373 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 1.2572944818738586e-05, |
| "loss": 0.6358, |
| "step": 3374 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.2533668029644751e-05, |
| "loss": 0.6349, |
| "step": 3375 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.2494448583125018e-05, |
| "loss": 0.7699, |
| "step": 3376 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.2455286504891528e-05, |
| "loss": 0.7081, |
| "step": 3377 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.2416181820618744e-05, |
| "loss": 0.6856, |
| "step": 3378 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.237713455594357e-05, |
| "loss": 0.6728, |
| "step": 3379 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.233814473646524e-05, |
| "loss": 0.6956, |
| "step": 3380 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.2299212387745295e-05, |
| "loss": 0.5776, |
| "step": 3381 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 1.2260337535307631e-05, |
| "loss": 0.621, |
| "step": 3382 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.222152020463848e-05, |
| "loss": 0.7125, |
| "step": 3383 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.218276042118629e-05, |
| "loss": 0.8168, |
| "step": 3384 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.214405821036182e-05, |
| "loss": 0.7213, |
| "step": 3385 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.2105413597538107e-05, |
| "loss": 0.5724, |
| "step": 3386 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.2066826608050374e-05, |
| "loss": 0.78, |
| "step": 3387 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.202829726719611e-05, |
| "loss": 0.617, |
| "step": 3388 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.1989825600235016e-05, |
| "loss": 0.6821, |
| "step": 3389 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 1.195141163238892e-05, |
| "loss": 0.6974, |
| "step": 3390 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.1913055388841909e-05, |
| "loss": 0.6424, |
| "step": 3391 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.1874756894740135e-05, |
| "loss": 0.6534, |
| "step": 3392 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.1836516175191948e-05, |
| "loss": 0.7664, |
| "step": 3393 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.1798333255267857e-05, |
| "loss": 0.7151, |
| "step": 3394 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.1760208160000363e-05, |
| "loss": 0.6757, |
| "step": 3395 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.172214091438416e-05, |
| "loss": 0.6363, |
| "step": 3396 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.1684131543375986e-05, |
| "loss": 0.7267, |
| "step": 3397 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 1.1646180071894607e-05, |
| "loss": 0.7248, |
| "step": 3398 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.1608286524820878e-05, |
| "loss": 0.6677, |
| "step": 3399 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.1570450926997655e-05, |
| "loss": 0.7026, |
| "step": 3400 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.1532673303229769e-05, |
| "loss": 0.7734, |
| "step": 3401 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.1494953678284103e-05, |
| "loss": 0.5277, |
| "step": 3402 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.1457292076889525e-05, |
| "loss": 0.6706, |
| "step": 3403 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.141968852373676e-05, |
| "loss": 0.6472, |
| "step": 3404 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.13821430434786e-05, |
| "loss": 0.7212, |
| "step": 3405 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 1.1344655660729675e-05, |
| "loss": 0.7058, |
| "step": 3406 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.1307226400066573e-05, |
| "loss": 0.7681, |
| "step": 3407 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.1269855286027797e-05, |
| "loss": 0.593, |
| "step": 3408 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.123254234311364e-05, |
| "loss": 0.6433, |
| "step": 3409 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.1195287595786352e-05, |
| "loss": 0.6224, |
| "step": 3410 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.1158091068470011e-05, |
| "loss": 0.6322, |
| "step": 3411 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.1120952785550476e-05, |
| "loss": 0.787, |
| "step": 3412 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.1083872771375459e-05, |
| "loss": 0.6334, |
| "step": 3413 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 1.1046851050254502e-05, |
| "loss": 0.6295, |
| "step": 3414 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.1009887646458861e-05, |
| "loss": 0.6515, |
| "step": 3415 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.0972982584221592e-05, |
| "loss": 0.6799, |
| "step": 3416 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.0936135887737542e-05, |
| "loss": 0.6979, |
| "step": 3417 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.0899347581163221e-05, |
| "loss": 0.7052, |
| "step": 3418 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.0862617688616917e-05, |
| "loss": 0.6454, |
| "step": 3419 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.0825946234178574e-05, |
| "loss": 0.6757, |
| "step": 3420 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.0789333241889854e-05, |
| "loss": 0.7214, |
| "step": 3421 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 1.075277873575412e-05, |
| "loss": 0.634, |
| "step": 3422 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.0716282739736316e-05, |
| "loss": 0.5729, |
| "step": 3423 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.067984527776309e-05, |
| "loss": 0.663, |
| "step": 3424 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.0643466373722711e-05, |
| "loss": 0.6547, |
| "step": 3425 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.0607146051465012e-05, |
| "loss": 0.6794, |
| "step": 3426 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.0570884334801457e-05, |
| "loss": 0.6314, |
| "step": 3427 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.0534681247505106e-05, |
| "loss": 0.7596, |
| "step": 3428 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.049853681331051e-05, |
| "loss": 0.6964, |
| "step": 3429 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 1.0462451055913847e-05, |
| "loss": 0.785, |
| "step": 3430 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.0426423998972811e-05, |
| "loss": 0.7619, |
| "step": 3431 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.0390455666106547e-05, |
| "loss": 0.6658, |
| "step": 3432 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.03545460808958e-05, |
| "loss": 0.7013, |
| "step": 3433 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.0318695266882693e-05, |
| "loss": 0.8046, |
| "step": 3434 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.0282903247570908e-05, |
| "loss": 0.6932, |
| "step": 3435 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.024717004642557e-05, |
| "loss": 0.6654, |
| "step": 3436 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.0211495686873174e-05, |
| "loss": 0.618, |
| "step": 3437 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 1.0175880192301713e-05, |
| "loss": 0.7176, |
| "step": 3438 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 1.0140323586060563e-05, |
| "loss": 0.6271, |
| "step": 3439 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 1.010482589146048e-05, |
| "loss": 0.6717, |
| "step": 3440 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 1.0069387131773612e-05, |
| "loss": 0.7172, |
| "step": 3441 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 1.0034007330233486e-05, |
| "loss": 0.6903, |
| "step": 3442 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 9.998686510034916e-06, |
| "loss": 0.6868, |
| "step": 3443 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 9.963424694334122e-06, |
| "loss": 0.7248, |
| "step": 3444 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 9.928221906248614e-06, |
| "loss": 0.5985, |
| "step": 3445 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 9.893078168857173e-06, |
| "loss": 0.6184, |
| "step": 3446 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.857993505199913e-06, |
| "loss": 0.698, |
| "step": 3447 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.822967938278171e-06, |
| "loss": 0.5248, |
| "step": 3448 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.788001491054577e-06, |
| "loss": 0.6832, |
| "step": 3449 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.753094186453026e-06, |
| "loss": 0.7532, |
| "step": 3450 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.71824604735856e-06, |
| "loss": 0.6092, |
| "step": 3451 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.683457096617488e-06, |
| "loss": 0.679, |
| "step": 3452 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.648727357037335e-06, |
| "loss": 0.6772, |
| "step": 3453 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 9.614056851386744e-06, |
| "loss": 0.6717, |
| "step": 3454 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.579445602395576e-06, |
| "loss": 0.6338, |
| "step": 3455 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.544893632754814e-06, |
| "loss": 0.742, |
| "step": 3456 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.510400965116595e-06, |
| "loss": 0.6677, |
| "step": 3457 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.475967622094205e-06, |
| "loss": 0.6977, |
| "step": 3458 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.441593626261947e-06, |
| "loss": 0.7612, |
| "step": 3459 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.407279000155312e-06, |
| "loss": 0.8282, |
| "step": 3460 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.373023766270827e-06, |
| "loss": 0.7906, |
| "step": 3461 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 9.338827947066076e-06, |
| "loss": 0.7513, |
| "step": 3462 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.30469156495969e-06, |
| "loss": 0.8023, |
| "step": 3463 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.270614642331376e-06, |
| "loss": 0.6327, |
| "step": 3464 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.23659720152179e-06, |
| "loss": 0.6193, |
| "step": 3465 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.202639264832668e-06, |
| "loss": 0.6558, |
| "step": 3466 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.168740854526648e-06, |
| "loss": 0.6934, |
| "step": 3467 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.134901992827427e-06, |
| "loss": 0.7386, |
| "step": 3468 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.101122701919629e-06, |
| "loss": 0.6215, |
| "step": 3469 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 9.067403003948782e-06, |
| "loss": 0.7285, |
| "step": 3470 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 9.033742921021415e-06, |
| "loss": 0.71, |
| "step": 3471 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 9.000142475204964e-06, |
| "loss": 0.7302, |
| "step": 3472 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 8.966601688527686e-06, |
| "loss": 0.6238, |
| "step": 3473 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 8.933120582978827e-06, |
| "loss": 0.602, |
| "step": 3474 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 8.89969918050847e-06, |
| "loss": 0.6694, |
| "step": 3475 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 8.866337503027522e-06, |
| "loss": 0.6838, |
| "step": 3476 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 8.833035572407777e-06, |
| "loss": 0.7906, |
| "step": 3477 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 8.79979341048187e-06, |
| "loss": 0.6009, |
| "step": 3478 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.766611039043193e-06, |
| "loss": 0.5806, |
| "step": 3479 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.733488479845997e-06, |
| "loss": 0.8257, |
| "step": 3480 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.700425754605268e-06, |
| "loss": 0.6316, |
| "step": 3481 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.667422884996823e-06, |
| "loss": 0.6162, |
| "step": 3482 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.634479892657221e-06, |
| "loss": 0.6765, |
| "step": 3483 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.60159679918372e-06, |
| "loss": 0.7833, |
| "step": 3484 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.568773626134364e-06, |
| "loss": 0.765, |
| "step": 3485 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 8.536010395027904e-06, |
| "loss": 0.7669, |
| "step": 3486 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.503307127343751e-06, |
| "loss": 0.682, |
| "step": 3487 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.470663844522052e-06, |
| "loss": 0.6625, |
| "step": 3488 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.438080567963625e-06, |
| "loss": 0.715, |
| "step": 3489 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.405557319029912e-06, |
| "loss": 0.7023, |
| "step": 3490 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.373094119043023e-06, |
| "loss": 0.6978, |
| "step": 3491 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.340690989285726e-06, |
| "loss": 0.6732, |
| "step": 3492 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.308347951001361e-06, |
| "loss": 0.6345, |
| "step": 3493 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 8.276065025393908e-06, |
| "loss": 0.5816, |
| "step": 3494 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.243842233627896e-06, |
| "loss": 0.6632, |
| "step": 3495 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.21167959682848e-06, |
| "loss": 0.589, |
| "step": 3496 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.179577136081361e-06, |
| "loss": 0.5358, |
| "step": 3497 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.14753487243276e-06, |
| "loss": 0.6908, |
| "step": 3498 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.115552826889461e-06, |
| "loss": 0.7534, |
| "step": 3499 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.083631020418791e-06, |
| "loss": 0.672, |
| "step": 3500 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.051769473948512e-06, |
| "loss": 0.5886, |
| "step": 3501 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 8.019968208366958e-06, |
| "loss": 0.732, |
| "step": 3502 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.988227244522916e-06, |
| "loss": 0.727, |
| "step": 3503 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.956546603225601e-06, |
| "loss": 0.6834, |
| "step": 3504 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.924926305244728e-06, |
| "loss": 0.6765, |
| "step": 3505 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.893366371310462e-06, |
| "loss": 0.6227, |
| "step": 3506 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.86186682211334e-06, |
| "loss": 0.6952, |
| "step": 3507 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.830427678304353e-06, |
| "loss": 0.7067, |
| "step": 3508 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.799048960494848e-06, |
| "loss": 0.7066, |
| "step": 3509 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.767730689256614e-06, |
| "loss": 0.7127, |
| "step": 3510 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.736472885121792e-06, |
| "loss": 0.5671, |
| "step": 3511 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.705275568582848e-06, |
| "loss": 0.6785, |
| "step": 3512 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.674138760092619e-06, |
| "loss": 0.6866, |
| "step": 3513 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.6430624800643e-06, |
| "loss": 0.6382, |
| "step": 3514 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.612046748871327e-06, |
| "loss": 0.6426, |
| "step": 3515 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.581091586847522e-06, |
| "loss": 0.644, |
| "step": 3516 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.5501970142869634e-06, |
| "loss": 0.6609, |
| "step": 3517 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.519363051443995e-06, |
| "loss": 0.6774, |
| "step": 3518 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.488589718533234e-06, |
| "loss": 0.7174, |
| "step": 3519 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.457877035729588e-06, |
| "loss": 0.6395, |
| "step": 3520 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.427225023168127e-06, |
| "loss": 0.7698, |
| "step": 3521 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.3966337009442e-06, |
| "loss": 0.6063, |
| "step": 3522 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.366103089113374e-06, |
| "loss": 0.5887, |
| "step": 3523 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.335633207691361e-06, |
| "loss": 0.6803, |
| "step": 3524 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.305224076654127e-06, |
| "loss": 0.6974, |
| "step": 3525 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 7.274875715937746e-06, |
| "loss": 0.6216, |
| "step": 3526 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.24458814543848e-06, |
| "loss": 0.7976, |
| "step": 3527 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.21436138501278e-06, |
| "loss": 0.736, |
| "step": 3528 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.184195454477138e-06, |
| "loss": 0.6963, |
| "step": 3529 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.154090373608235e-06, |
| "loss": 0.6817, |
| "step": 3530 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.124046162142861e-06, |
| "loss": 0.7671, |
| "step": 3531 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.094062839777837e-06, |
| "loss": 0.5841, |
| "step": 3532 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.064140426170141e-06, |
| "loss": 0.6577, |
| "step": 3533 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.03427894093679e-06, |
| "loss": 0.5979, |
| "step": 3534 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 7.004478403654835e-06, |
| "loss": 0.7418, |
| "step": 3535 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 6.974738833861383e-06, |
| "loss": 0.6404, |
| "step": 3536 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 6.945060251053603e-06, |
| "loss": 0.6472, |
| "step": 3537 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 6.915442674688632e-06, |
| "loss": 0.6831, |
| "step": 3538 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 6.885886124183649e-06, |
| "loss": 0.5881, |
| "step": 3539 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 6.856390618915775e-06, |
| "loss": 0.655, |
| "step": 3540 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 6.826956178222177e-06, |
| "loss": 0.7027, |
| "step": 3541 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 6.7975828213999725e-06, |
| "loss": 0.6806, |
| "step": 3542 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.768270567706158e-06, |
| "loss": 0.6387, |
| "step": 3543 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.739019436357774e-06, |
| "loss": 0.7274, |
| "step": 3544 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.709829446531734e-06, |
| "loss": 0.6556, |
| "step": 3545 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.680700617364877e-06, |
| "loss": 0.7245, |
| "step": 3546 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.651632967953925e-06, |
| "loss": 0.7239, |
| "step": 3547 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.622626517355557e-06, |
| "loss": 0.714, |
| "step": 3548 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.5936812845862395e-06, |
| "loss": 0.7507, |
| "step": 3549 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 6.564797288622371e-06, |
| "loss": 0.8123, |
| "step": 3550 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.535974548400192e-06, |
| "loss": 0.6595, |
| "step": 3551 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.507213082815744e-06, |
| "loss": 0.6654, |
| "step": 3552 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.478512910724966e-06, |
| "loss": 0.6483, |
| "step": 3553 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.449874050943549e-06, |
| "loss": 0.7007, |
| "step": 3554 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.4212965222470115e-06, |
| "loss": 0.6482, |
| "step": 3555 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.392780343370686e-06, |
| "loss": 0.6774, |
| "step": 3556 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.3643255330096515e-06, |
| "loss": 0.627, |
| "step": 3557 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 6.335932109818754e-06, |
| "loss": 0.6942, |
| "step": 3558 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.307600092412647e-06, |
| "loss": 0.7169, |
| "step": 3559 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.2793294993656494e-06, |
| "loss": 0.7734, |
| "step": 3560 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.251120349211847e-06, |
| "loss": 0.6412, |
| "step": 3561 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.222972660445081e-06, |
| "loss": 0.826, |
| "step": 3562 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.194886451518822e-06, |
| "loss": 0.8978, |
| "step": 3563 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.166861740846297e-06, |
| "loss": 0.6954, |
| "step": 3564 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.138898546800398e-06, |
| "loss": 0.7732, |
| "step": 3565 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 6.110996887713661e-06, |
| "loss": 0.6706, |
| "step": 3566 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 6.083156781878318e-06, |
| "loss": 0.7175, |
| "step": 3567 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 6.055378247546218e-06, |
| "loss": 0.5223, |
| "step": 3568 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 6.027661302928867e-06, |
| "loss": 0.6253, |
| "step": 3569 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 6.000005966197387e-06, |
| "loss": 0.568, |
| "step": 3570 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 5.972412255482473e-06, |
| "loss": 0.7352, |
| "step": 3571 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 5.9448801888744795e-06, |
| "loss": 0.6638, |
| "step": 3572 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 5.917409784423334e-06, |
| "loss": 0.7207, |
| "step": 3573 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 5.8900010601384835e-06, |
| "loss": 0.6272, |
| "step": 3574 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.86265403398899e-06, |
| "loss": 0.793, |
| "step": 3575 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.835368723903456e-06, |
| "loss": 0.7517, |
| "step": 3576 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.808145147770016e-06, |
| "loss": 0.6889, |
| "step": 3577 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.780983323436373e-06, |
| "loss": 0.7372, |
| "step": 3578 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.7538832687096586e-06, |
| "loss": 0.6594, |
| "step": 3579 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.726845001356573e-06, |
| "loss": 0.5621, |
| "step": 3580 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.699868539103326e-06, |
| "loss": 0.6722, |
| "step": 3581 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 5.672953899635525e-06, |
| "loss": 0.8223, |
| "step": 3582 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.646101100598344e-06, |
| "loss": 0.6327, |
| "step": 3583 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.6193101595963585e-06, |
| "loss": 0.7267, |
| "step": 3584 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.592581094193583e-06, |
| "loss": 0.7317, |
| "step": 3585 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.565913921913513e-06, |
| "loss": 0.6914, |
| "step": 3586 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.5393086602390066e-06, |
| "loss": 0.689, |
| "step": 3587 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.512765326612379e-06, |
| "loss": 0.5784, |
| "step": 3588 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.486283938435355e-06, |
| "loss": 0.7296, |
| "step": 3589 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 5.45986451306899e-06, |
| "loss": 0.7332, |
| "step": 3590 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.433507067833776e-06, |
| "loss": 0.7716, |
| "step": 3591 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.407211620009544e-06, |
| "loss": 0.7337, |
| "step": 3592 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.380978186835484e-06, |
| "loss": 0.7159, |
| "step": 3593 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.354806785510113e-06, |
| "loss": 0.6871, |
| "step": 3594 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.328697433191321e-06, |
| "loss": 0.82, |
| "step": 3595 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.30265014699628e-06, |
| "loss": 0.6564, |
| "step": 3596 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.276664944001475e-06, |
| "loss": 0.6183, |
| "step": 3597 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 5.250741841242734e-06, |
| "loss": 0.7437, |
| "step": 3598 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.224880855715098e-06, |
| "loss": 0.7949, |
| "step": 3599 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.199082004372957e-06, |
| "loss": 0.6514, |
| "step": 3600 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.173345304129917e-06, |
| "loss": 0.7038, |
| "step": 3601 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.147670771858848e-06, |
| "loss": 0.7311, |
| "step": 3602 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.1220584243918955e-06, |
| "loss": 0.6651, |
| "step": 3603 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.096508278520384e-06, |
| "loss": 0.7588, |
| "step": 3604 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.0710203509948924e-06, |
| "loss": 0.652, |
| "step": 3605 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 5.045594658525232e-06, |
| "loss": 0.6412, |
| "step": 3606 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 5.020231217780336e-06, |
| "loss": 0.7101, |
| "step": 3607 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 4.994930045388413e-06, |
| "loss": 0.7717, |
| "step": 3608 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 4.969691157936796e-06, |
| "loss": 0.7117, |
| "step": 3609 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 4.944514571971981e-06, |
| "loss": 0.5564, |
| "step": 3610 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 4.919400303999655e-06, |
| "loss": 0.7413, |
| "step": 3611 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 4.8943483704846475e-06, |
| "loss": 0.7776, |
| "step": 3612 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 4.869358787850864e-06, |
| "loss": 0.6269, |
| "step": 3613 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 4.8444315724814115e-06, |
| "loss": 0.5717, |
| "step": 3614 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.819566740718439e-06, |
| "loss": 0.6604, |
| "step": 3615 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.794764308863242e-06, |
| "loss": 0.7864, |
| "step": 3616 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.770024293176223e-06, |
| "loss": 0.763, |
| "step": 3617 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.745346709876786e-06, |
| "loss": 0.7784, |
| "step": 3618 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.7207315751434885e-06, |
| "loss": 0.7537, |
| "step": 3619 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.6961789051139124e-06, |
| "loss": 0.6485, |
| "step": 3620 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.671688715884681e-06, |
| "loss": 0.6689, |
| "step": 3621 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 4.647261023511451e-06, |
| "loss": 0.7189, |
| "step": 3622 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.6228958440089565e-06, |
| "loss": 0.6563, |
| "step": 3623 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.5985931933508754e-06, |
| "loss": 0.5758, |
| "step": 3624 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.574353087469929e-06, |
| "loss": 0.6609, |
| "step": 3625 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.550175542257862e-06, |
| "loss": 0.5894, |
| "step": 3626 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.526060573565338e-06, |
| "loss": 0.6835, |
| "step": 3627 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.502008197202068e-06, |
| "loss": 0.7067, |
| "step": 3628 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.47801842893667e-06, |
| "loss": 0.7825, |
| "step": 3629 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 4.454091284496731e-06, |
| "loss": 0.6789, |
| "step": 3630 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.430226779568835e-06, |
| "loss": 0.6477, |
| "step": 3631 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.406424929798403e-06, |
| "loss": 0.6441, |
| "step": 3632 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.38268575078985e-06, |
| "loss": 0.7222, |
| "step": 3633 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.3590092581065055e-06, |
| "loss": 0.709, |
| "step": 3634 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.335395467270553e-06, |
| "loss": 0.7128, |
| "step": 3635 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.311844393763109e-06, |
| "loss": 0.6408, |
| "step": 3636 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.28835605302419e-06, |
| "loss": 0.6736, |
| "step": 3637 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 4.26493046045261e-06, |
| "loss": 0.7049, |
| "step": 3638 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.241567631406118e-06, |
| "loss": 0.6938, |
| "step": 3639 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.2182675812012965e-06, |
| "loss": 0.5635, |
| "step": 3640 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.195030325113547e-06, |
| "loss": 0.6491, |
| "step": 3641 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.17185587837714e-06, |
| "loss": 0.7619, |
| "step": 3642 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.148744256185122e-06, |
| "loss": 0.6213, |
| "step": 3643 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.125695473689406e-06, |
| "loss": 0.6675, |
| "step": 3644 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.102709546000671e-06, |
| "loss": 0.6653, |
| "step": 3645 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 4.0797864881883975e-06, |
| "loss": 0.7361, |
| "step": 3646 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 4.0569263152808405e-06, |
| "loss": 0.6684, |
| "step": 3647 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 4.034129042265066e-06, |
| "loss": 0.6394, |
| "step": 3648 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 4.011394684086845e-06, |
| "loss": 0.5542, |
| "step": 3649 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.988723255650728e-06, |
| "loss": 0.7499, |
| "step": 3650 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.966114771820051e-06, |
| "loss": 0.6807, |
| "step": 3651 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.943569247416801e-06, |
| "loss": 0.6941, |
| "step": 3652 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.921086697221743e-06, |
| "loss": 0.717, |
| "step": 3653 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.898667135974376e-06, |
| "loss": 0.7237, |
| "step": 3654 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.876310578372832e-06, |
| "loss": 0.6331, |
| "step": 3655 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.854017039074009e-06, |
| "loss": 0.619, |
| "step": 3656 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.831786532693449e-06, |
| "loss": 0.6666, |
| "step": 3657 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.8096190738053818e-06, |
| "loss": 0.8119, |
| "step": 3658 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.787514676942716e-06, |
| "loss": 0.738, |
| "step": 3659 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.7654733565969826e-06, |
| "loss": 0.8414, |
| "step": 3660 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.7434951272183994e-06, |
| "loss": 0.6529, |
| "step": 3661 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.7215800032158075e-06, |
| "loss": 0.6005, |
| "step": 3662 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.699727998956659e-06, |
| "loss": 0.7136, |
| "step": 3663 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.6779391287670494e-06, |
| "loss": 0.7898, |
| "step": 3664 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.6562134069316854e-06, |
| "loss": 0.6859, |
| "step": 3665 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.6345508476938297e-06, |
| "loss": 0.6669, |
| "step": 3666 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.6129514652553986e-06, |
| "loss": 0.6928, |
| "step": 3667 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.591415273776855e-06, |
| "loss": 0.7278, |
| "step": 3668 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.569942287377226e-06, |
| "loss": 0.8252, |
| "step": 3669 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.548532520134129e-06, |
| "loss": 0.6575, |
| "step": 3670 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.527185986083703e-06, |
| "loss": 0.7622, |
| "step": 3671 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.5059026992206647e-06, |
| "loss": 0.6192, |
| "step": 3672 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.484682673498263e-06, |
| "loss": 0.7076, |
| "step": 3673 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.4635259228282256e-06, |
| "loss": 0.6673, |
| "step": 3674 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.442432461080858e-06, |
| "loss": 0.6555, |
| "step": 3675 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.421402302084953e-06, |
| "loss": 0.6705, |
| "step": 3676 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.400435459627782e-06, |
| "loss": 0.7799, |
| "step": 3677 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.379531947455128e-06, |
| "loss": 0.7502, |
| "step": 3678 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.3586917792712615e-06, |
| "loss": 0.6775, |
| "step": 3679 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.3379149687388867e-06, |
| "loss": 0.7785, |
| "step": 3680 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.317201529479208e-06, |
| "loss": 0.6853, |
| "step": 3681 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.2965514750718963e-06, |
| "loss": 0.6687, |
| "step": 3682 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.2759648190549996e-06, |
| "loss": 0.5891, |
| "step": 3683 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.2554415749250888e-06, |
| "loss": 0.7912, |
| "step": 3684 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.23498175613709e-06, |
| "loss": 0.7934, |
| "step": 3685 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.214585376104384e-06, |
| "loss": 0.6664, |
| "step": 3686 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.1942524481987757e-06, |
| "loss": 0.7818, |
| "step": 3687 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.1739829857504234e-06, |
| "loss": 0.5997, |
| "step": 3688 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.1537770020479196e-06, |
| "loss": 0.6982, |
| "step": 3689 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.1336345103382346e-06, |
| "loss": 0.6711, |
| "step": 3690 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.1135555238266722e-06, |
| "loss": 0.6664, |
| "step": 3691 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.093540055676958e-06, |
| "loss": 0.7396, |
| "step": 3692 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.0735881190111617e-06, |
| "loss": 0.7169, |
| "step": 3693 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.053699726909676e-06, |
| "loss": 0.657, |
| "step": 3694 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 3.0338748924112483e-06, |
| "loss": 0.6877, |
| "step": 3695 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 3.014113628512982e-06, |
| "loss": 0.652, |
| "step": 3696 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.994415948170237e-06, |
| "loss": 0.6903, |
| "step": 3697 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.9747818642967827e-06, |
| "loss": 0.639, |
| "step": 3698 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.955211389764623e-06, |
| "loss": 0.6287, |
| "step": 3699 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.9357045374040825e-06, |
| "loss": 0.649, |
| "step": 3700 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.916261320003777e-06, |
| "loss": 0.802, |
| "step": 3701 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.8968817503105983e-06, |
| "loss": 0.6164, |
| "step": 3702 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.8775658410297277e-06, |
| "loss": 0.6589, |
| "step": 3703 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.8583136048245697e-06, |
| "loss": 0.6475, |
| "step": 3704 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.839125054316838e-06, |
| "loss": 0.6043, |
| "step": 3705 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.820000202086459e-06, |
| "loss": 0.7104, |
| "step": 3706 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.8009390606716035e-06, |
| "loss": 0.7408, |
| "step": 3707 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.781941642568686e-06, |
| "loss": 0.6798, |
| "step": 3708 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.7630079602323442e-06, |
| "loss": 0.6394, |
| "step": 3709 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.7441380260754048e-06, |
| "loss": 0.6268, |
| "step": 3710 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.7253318524689262e-06, |
| "loss": 0.76, |
| "step": 3711 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.706589451742181e-06, |
| "loss": 0.7018, |
| "step": 3712 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.6879108361825835e-06, |
| "loss": 0.6948, |
| "step": 3713 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.6692960180357716e-06, |
| "loss": 0.6427, |
| "step": 3714 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.6507450095055618e-06, |
| "loss": 0.7047, |
| "step": 3715 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.632257822753881e-06, |
| "loss": 0.6712, |
| "step": 3716 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.613834469900889e-06, |
| "loss": 0.896, |
| "step": 3717 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.5954749630248353e-06, |
| "loss": 0.6832, |
| "step": 3718 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.577179314162148e-06, |
| "loss": 0.7184, |
| "step": 3719 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.5589475353073988e-06, |
| "loss": 0.7956, |
| "step": 3720 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.5407796384132487e-06, |
| "loss": 0.7651, |
| "step": 3721 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.522675635390492e-06, |
| "loss": 0.746, |
| "step": 3722 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.5046355381080578e-06, |
| "loss": 0.6629, |
| "step": 3723 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.486659358392951e-06, |
| "loss": 0.7833, |
| "step": 3724 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.4687471080302894e-06, |
| "loss": 0.7059, |
| "step": 3725 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.450898798763268e-06, |
| "loss": 0.7012, |
| "step": 3726 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.4331144422931608e-06, |
| "loss": 0.75, |
| "step": 3727 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.415394050279318e-06, |
| "loss": 0.6988, |
| "step": 3728 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.397737634339181e-06, |
| "loss": 0.7264, |
| "step": 3729 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.380145206048201e-06, |
| "loss": 0.7137, |
| "step": 3730 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.362616776939919e-06, |
| "loss": 0.6452, |
| "step": 3731 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.3451523585058754e-06, |
| "loss": 0.6698, |
| "step": 3732 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.32775196219569e-06, |
| "loss": 0.6319, |
| "step": 3733 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.310415599417004e-06, |
| "loss": 0.6905, |
| "step": 3734 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.2931432815354594e-06, |
| "loss": 0.6685, |
| "step": 3735 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.2759350198746976e-06, |
| "loss": 0.7455, |
| "step": 3736 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.258790825716417e-06, |
| "loss": 0.6826, |
| "step": 3737 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.24171071030026e-06, |
| "loss": 0.68, |
| "step": 3738 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.2246946848238804e-06, |
| "loss": 0.6061, |
| "step": 3739 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.2077427604429433e-06, |
| "loss": 0.6656, |
| "step": 3740 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.190854948271026e-06, |
| "loss": 0.5905, |
| "step": 3741 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.1740312593797273e-06, |
| "loss": 0.7855, |
| "step": 3742 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.1572717047986025e-06, |
| "loss": 0.7919, |
| "step": 3743 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.1405762955151176e-06, |
| "loss": 0.721, |
| "step": 3744 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.1239450424747508e-06, |
| "loss": 0.5418, |
| "step": 3745 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.107377956580847e-06, |
| "loss": 0.7518, |
| "step": 3746 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.0908750486947405e-06, |
| "loss": 0.765, |
| "step": 3747 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.074436329635687e-06, |
| "loss": 0.7374, |
| "step": 3748 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.0580618101808115e-06, |
| "loss": 0.8086, |
| "step": 3749 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.041751501065203e-06, |
| "loss": 0.7438, |
| "step": 3750 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.025505412981843e-06, |
| "loss": 0.6736, |
| "step": 3751 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.009323556581566e-06, |
| "loss": 0.5604, |
| "step": 3752 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.993205942473164e-06, |
| "loss": 0.6589, |
| "step": 3753 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.977152581223274e-06, |
| "loss": 0.6693, |
| "step": 3754 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.9611634833564096e-06, |
| "loss": 0.666, |
| "step": 3755 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.945238659354953e-06, |
| "loss": 0.6172, |
| "step": 3756 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.9293781196591755e-06, |
| "loss": 0.6339, |
| "step": 3757 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.9135818746671586e-06, |
| "loss": 0.6054, |
| "step": 3758 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.8978499347348743e-06, |
| "loss": 0.7465, |
| "step": 3759 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.882182310176095e-06, |
| "loss": 0.7345, |
| "step": 3760 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.8665790112624814e-06, |
| "loss": 0.701, |
| "step": 3761 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.8510400482234847e-06, |
| "loss": 0.7386, |
| "step": 3762 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.8355654312463666e-06, |
| "loss": 0.6527, |
| "step": 3763 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.8201551704762453e-06, |
| "loss": 0.6804, |
| "step": 3764 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.8048092760160285e-06, |
| "loss": 0.6119, |
| "step": 3765 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.7895277579264014e-06, |
| "loss": 0.617, |
| "step": 3766 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.7743106262258835e-06, |
| "loss": 0.6905, |
| "step": 3767 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.7591578908907724e-06, |
| "loss": 0.6577, |
| "step": 3768 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.7440695618551106e-06, |
| "loss": 0.7777, |
| "step": 3769 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.729045649010752e-06, |
| "loss": 0.5481, |
| "step": 3770 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.7140861622073511e-06, |
| "loss": 0.6004, |
| "step": 3771 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.6991911112522407e-06, |
| "loss": 0.8025, |
| "step": 3772 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.684360505910576e-06, |
| "loss": 0.7217, |
| "step": 3773 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.6695943559052462e-06, |
| "loss": 0.661, |
| "step": 3774 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.6548926709168633e-06, |
| "loss": 0.5793, |
| "step": 3775 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.6402554605838172e-06, |
| "loss": 0.6152, |
| "step": 3776 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.6256827345021764e-06, |
| "loss": 0.6295, |
| "step": 3777 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.6111745022257874e-06, |
| "loss": 0.7411, |
| "step": 3778 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.596730773266175e-06, |
| "loss": 0.6035, |
| "step": 3779 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.5823515570925763e-06, |
| "loss": 0.7556, |
| "step": 3780 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.568036863131972e-06, |
| "loss": 0.601, |
| "step": 3781 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.553786700769011e-06, |
| "loss": 0.6925, |
| "step": 3782 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.5396010793460314e-06, |
| "loss": 0.7466, |
| "step": 3783 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.5254800081630826e-06, |
| "loss": 0.6275, |
| "step": 3784 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.5114234964778707e-06, |
| "loss": 0.6086, |
| "step": 3785 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.4974315535058015e-06, |
| "loss": 0.7127, |
| "step": 3786 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.4835041884199263e-06, |
| "loss": 0.5898, |
| "step": 3787 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.4696414103509636e-06, |
| "loss": 0.6461, |
| "step": 3788 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.455843228387299e-06, |
| "loss": 0.7701, |
| "step": 3789 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.4421096515749855e-06, |
| "loss": 0.7585, |
| "step": 3790 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.4284406889176649e-06, |
| "loss": 0.7133, |
| "step": 3791 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.4148363493766802e-06, |
| "loss": 0.6809, |
| "step": 3792 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.401296641870975e-06, |
| "loss": 0.7032, |
| "step": 3793 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.3878215752771262e-06, |
| "loss": 0.6774, |
| "step": 3794 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.3744111584293228e-06, |
| "loss": 0.6861, |
| "step": 3795 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.361065400119399e-06, |
| "loss": 0.8521, |
| "step": 3796 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.3477843090967667e-06, |
| "loss": 0.742, |
| "step": 3797 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.3345678940684613e-06, |
| "loss": 0.5918, |
| "step": 3798 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.3214161636991184e-06, |
| "loss": 0.7435, |
| "step": 3799 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.30832912661093e-06, |
| "loss": 0.6431, |
| "step": 3800 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.2953067913837435e-06, |
| "loss": 0.763, |
| "step": 3801 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.2823491665549193e-06, |
| "loss": 0.741, |
| "step": 3802 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.2694562606194392e-06, |
| "loss": 0.6664, |
| "step": 3803 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.2566280820298426e-06, |
| "loss": 0.7967, |
| "step": 3804 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.2438646391962129e-06, |
| "loss": 0.6042, |
| "step": 3805 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.231165940486234e-06, |
| "loss": 0.6135, |
| "step": 3806 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.2185319942251027e-06, |
| "loss": 0.7164, |
| "step": 3807 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.2059628086956044e-06, |
| "loss": 0.5975, |
| "step": 3808 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.193458392138025e-06, |
| "loss": 0.7094, |
| "step": 3809 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.1810187527502182e-06, |
| "loss": 0.7137, |
| "step": 3810 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.1686438986875715e-06, |
| "loss": 0.8664, |
| "step": 3811 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.1563338380629618e-06, |
| "loss": 0.615, |
| "step": 3812 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.1440885789468558e-06, |
| "loss": 0.7204, |
| "step": 3813 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.1319081293671541e-06, |
| "loss": 0.6456, |
| "step": 3814 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.1197924973093464e-06, |
| "loss": 0.7285, |
| "step": 3815 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.1077416907163574e-06, |
| "loss": 0.7233, |
| "step": 3816 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.0957557174886778e-06, |
| "loss": 0.6879, |
| "step": 3817 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.0838345854842446e-06, |
| "loss": 0.7347, |
| "step": 3818 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.0719783025185058e-06, |
| "loss": 0.636, |
| "step": 3819 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.0601868763643996e-06, |
| "loss": 0.6901, |
| "step": 3820 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.0484603147523309e-06, |
| "loss": 0.6211, |
| "step": 3821 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.0367986253701944e-06, |
| "loss": 0.6789, |
| "step": 3822 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.0252018158633526e-06, |
| "loss": 0.7161, |
| "step": 3823 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.0136698938346011e-06, |
| "loss": 0.6204, |
| "step": 3824 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.0022028668442375e-06, |
| "loss": 0.7776, |
| "step": 3825 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 9.90800742410003e-07, |
| "loss": 0.5942, |
| "step": 3826 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 9.794635280070742e-07, |
| "loss": 0.7151, |
| "step": 3827 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 9.68191231068083e-07, |
| "loss": 0.6033, |
| "step": 3828 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 9.569838589831181e-07, |
| "loss": 0.5889, |
| "step": 3829 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 9.458414190996689e-07, |
| "loss": 0.6318, |
| "step": 3830 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 9.347639187226809e-07, |
| "loss": 0.6779, |
| "step": 3831 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 9.237513651145225e-07, |
| "loss": 0.6256, |
| "step": 3832 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 9.128037654949628e-07, |
| "loss": 0.6866, |
| "step": 3833 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 9.019211270412275e-07, |
| "loss": 0.6584, |
| "step": 3834 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.911034568879207e-07, |
| "loss": 0.6675, |
| "step": 3835 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.803507621270579e-07, |
| "loss": 0.7179, |
| "step": 3836 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.696630498080671e-07, |
| "loss": 0.5699, |
| "step": 3837 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.590403269377655e-07, |
| "loss": 0.7092, |
| "step": 3838 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.484826004803714e-07, |
| "loss": 0.6439, |
| "step": 3839 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.379898773574924e-07, |
| "loss": 0.7267, |
| "step": 3840 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.275621644481035e-07, |
| "loss": 0.7105, |
| "step": 3841 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.171994685885698e-07, |
| "loss": 0.8542, |
| "step": 3842 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.069017965726566e-07, |
| "loss": 0.7392, |
| "step": 3843 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 7.966691551514527e-07, |
| "loss": 0.5934, |
| "step": 3844 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 7.865015510334472e-07, |
| "loss": 0.7733, |
| "step": 3845 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 7.763989908844749e-07, |
| "loss": 0.7374, |
| "step": 3846 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.663614813277265e-07, |
| "loss": 0.7646, |
| "step": 3847 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.563890289437825e-07, |
| "loss": 0.6627, |
| "step": 3848 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.464816402705022e-07, |
| "loss": 0.8193, |
| "step": 3849 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.366393218031564e-07, |
| "loss": 0.7067, |
| "step": 3850 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.268620799943171e-07, |
| "loss": 0.6517, |
| "step": 3851 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.171499212539123e-07, |
| "loss": 0.7392, |
| "step": 3852 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.075028519491933e-07, |
| "loss": 0.7555, |
| "step": 3853 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 6.979208784047453e-07, |
| "loss": 0.6058, |
| "step": 3854 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.884040069024434e-07, |
| "loss": 0.5249, |
| "step": 3855 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.78952243681541e-07, |
| "loss": 0.6706, |
| "step": 3856 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.695655949385705e-07, |
| "loss": 0.6055, |
| "step": 3857 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.602440668273758e-07, |
| "loss": 0.722, |
| "step": 3858 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.509876654591018e-07, |
| "loss": 0.6995, |
| "step": 3859 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.41796396902239e-07, |
| "loss": 0.6126, |
| "step": 3860 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.326702671825113e-07, |
| "loss": 0.6928, |
| "step": 3861 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.236092822829887e-07, |
| "loss": 0.7388, |
| "step": 3862 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.146134481440191e-07, |
| "loss": 0.6398, |
| "step": 3863 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.056827706632185e-07, |
| "loss": 0.5682, |
| "step": 3864 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 5.968172556955365e-07, |
| "loss": 0.6497, |
| "step": 3865 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 5.880169090531351e-07, |
| "loss": 0.6825, |
| "step": 3866 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 5.792817365054992e-07, |
| "loss": 0.6574, |
| "step": 3867 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 5.706117437793701e-07, |
| "loss": 0.7725, |
| "step": 3868 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 5.620069365587677e-07, |
| "loss": 0.741, |
| "step": 3869 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 5.534673204849572e-07, |
| "loss": 0.587, |
| "step": 3870 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 5.449929011564936e-07, |
| "loss": 0.6444, |
| "step": 3871 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 5.365836841291438e-07, |
| "loss": 0.5979, |
| "step": 3872 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 5.282396749159868e-07, |
| "loss": 0.6551, |
| "step": 3873 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 5.199608789873134e-07, |
| "loss": 0.8386, |
| "step": 3874 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 5.117473017706486e-07, |
| "loss": 0.6598, |
| "step": 3875 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 5.035989486508075e-07, |
| "loss": 0.6402, |
| "step": 3876 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 4.955158249697945e-07, |
| "loss": 0.6729, |
| "step": 3877 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 4.874979360268928e-07, |
| "loss": 0.7487, |
| "step": 3878 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.795452870785866e-07, |
| "loss": 0.5709, |
| "step": 3879 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.7165788333860536e-07, |
| "loss": 0.8205, |
| "step": 3880 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.638357299778906e-07, |
| "loss": 0.6635, |
| "step": 3881 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.56078832124629e-07, |
| "loss": 0.6603, |
| "step": 3882 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.483871948641971e-07, |
| "loss": 0.5385, |
| "step": 3883 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.4076082323920576e-07, |
| "loss": 0.7146, |
| "step": 3884 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.331997222494777e-07, |
| "loss": 0.8316, |
| "step": 3885 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 4.257038968520366e-07, |
| "loss": 0.6555, |
| "step": 3886 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 4.1827335196111815e-07, |
| "loss": 0.765, |
| "step": 3887 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 4.1090809244814785e-07, |
| "loss": 0.864, |
| "step": 3888 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 4.0360812314177433e-07, |
| "loss": 0.743, |
| "step": 3889 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 3.963734488278248e-07, |
| "loss": 0.6622, |
| "step": 3890 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 3.892040742493164e-07, |
| "loss": 0.8674, |
| "step": 3891 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 3.82100004106456e-07, |
| "loss": 0.6649, |
| "step": 3892 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 3.7506124305666246e-07, |
| "loss": 0.649, |
| "step": 3893 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 3.6808779571451126e-07, |
| "loss": 0.7281, |
| "step": 3894 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.611796666517564e-07, |
| "loss": 0.6875, |
| "step": 3895 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.543368603973529e-07, |
| "loss": 0.8206, |
| "step": 3896 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.475593814374123e-07, |
| "loss": 0.6412, |
| "step": 3897 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.4084723421521356e-07, |
| "loss": 0.6884, |
| "step": 3898 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.342004231312257e-07, |
| "loss": 0.6955, |
| "step": 3899 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.2761895254306287e-07, |
| "loss": 0.6637, |
| "step": 3900 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.2110282676551806e-07, |
| "loss": 0.7734, |
| "step": 3901 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 3.146520500705297e-07, |
| "loss": 0.6223, |
| "step": 3902 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 3.0826662668720364e-07, |
| "loss": 0.5382, |
| "step": 3903 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 3.019465608018024e-07, |
| "loss": 0.6362, |
| "step": 3904 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.956918565577338e-07, |
| "loss": 0.7063, |
| "step": 3905 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.8950251805553997e-07, |
| "loss": 0.5882, |
| "step": 3906 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.833785493529528e-07, |
| "loss": 0.6206, |
| "step": 3907 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.773199544648164e-07, |
| "loss": 0.67, |
| "step": 3908 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.713267373630979e-07, |
| "loss": 0.8264, |
| "step": 3909 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.6539890197695427e-07, |
| "loss": 0.7322, |
| "step": 3910 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.5953645219263245e-07, |
| "loss": 0.5665, |
| "step": 3911 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.537393918535358e-07, |
| "loss": 0.6743, |
| "step": 3912 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.4800772476020197e-07, |
| "loss": 0.6483, |
| "step": 3913 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.423414546702807e-07, |
| "loss": 0.5782, |
| "step": 3914 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.3674058529855602e-07, |
| "loss": 0.7205, |
| "step": 3915 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.312051203169352e-07, |
| "loss": 0.665, |
| "step": 3916 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.257350633544486e-07, |
| "loss": 0.6761, |
| "step": 3917 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.2033041799723875e-07, |
| "loss": 0.51, |
| "step": 3918 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.149911877885713e-07, |
| "loss": 0.8748, |
| "step": 3919 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.0971737622883515e-07, |
| "loss": 0.6093, |
| "step": 3920 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.04508986775509e-07, |
| "loss": 0.6656, |
| "step": 3921 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.9936602284318373e-07, |
| "loss": 0.81, |
| "step": 3922 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.9428848780358444e-07, |
| "loss": 0.725, |
| "step": 3923 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.8927638498551502e-07, |
| "loss": 0.7288, |
| "step": 3924 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.843297176748804e-07, |
| "loss": 0.6541, |
| "step": 3925 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.7944848911470857e-07, |
| "loss": 0.711, |
| "step": 3926 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.7463270250510645e-07, |
| "loss": 0.6442, |
| "step": 3927 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.6988236100329292e-07, |
| "loss": 0.6213, |
| "step": 3928 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.651974677235546e-07, |
| "loss": 0.5859, |
| "step": 3929 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.605780257373124e-07, |
| "loss": 0.8115, |
| "step": 3930 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.5602403807303266e-07, |
| "loss": 0.6165, |
| "step": 3931 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.5153550771630498e-07, |
| "loss": 0.7601, |
| "step": 3932 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.4711243760978653e-07, |
| "loss": 0.6696, |
| "step": 3933 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.427548306532134e-07, |
| "loss": 0.7484, |
| "step": 3934 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.3846268970344466e-07, |
| "loss": 0.7123, |
| "step": 3935 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.3423601757436287e-07, |
| "loss": 0.6541, |
| "step": 3936 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.3007481703696256e-07, |
| "loss": 0.6967, |
| "step": 3937 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.2597909081931702e-07, |
| "loss": 0.5944, |
| "step": 3938 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.219488416065673e-07, |
| "loss": 0.66, |
| "step": 3939 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.179840720409331e-07, |
| "loss": 0.6786, |
| "step": 3940 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.1408478472167971e-07, |
| "loss": 0.5755, |
| "step": 3941 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.1025098220518448e-07, |
| "loss": 0.7488, |
| "step": 3942 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 1.064826670048702e-07, |
| "loss": 0.7736, |
| "step": 3943 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 1.0277984159122733e-07, |
| "loss": 0.7101, |
| "step": 3944 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 9.914250839180294e-08, |
| "loss": 0.6806, |
| "step": 3945 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 9.557066979123397e-08, |
| "loss": 0.7184, |
| "step": 3946 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 9.20643281312028e-08, |
| "loss": 0.612, |
| "step": 3947 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 8.862348571043733e-08, |
| "loss": 0.7672, |
| "step": 3948 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 8.524814478474419e-08, |
| "loss": 0.6917, |
| "step": 3949 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 8.193830756699772e-08, |
| "loss": 0.7284, |
| "step": 3950 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 7.869397622710661e-08, |
| "loss": 0.7737, |
| "step": 3951 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 7.551515289203615e-08, |
| "loss": 0.6833, |
| "step": 3952 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 7.240183964580816e-08, |
| "loss": 0.6684, |
| "step": 3953 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.935403852950107e-08, |
| "loss": 0.6261, |
| "step": 3954 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.637175154124986e-08, |
| "loss": 0.6541, |
| "step": 3955 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.34549806362239e-08, |
| "loss": 0.657, |
| "step": 3956 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.060372772663802e-08, |
| "loss": 0.5731, |
| "step": 3957 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 5.7817994681774735e-08, |
| "loss": 0.7464, |
| "step": 3958 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 5.50977833279509e-08, |
| "loss": 0.8479, |
| "step": 3959 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 5.2443095448506674e-08, |
| "loss": 0.6177, |
| "step": 3960 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 4.985393278386097e-08, |
| "loss": 0.7827, |
| "step": 3961 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 4.7330297031467075e-08, |
| "loss": 0.6983, |
| "step": 3962 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 4.487218984577934e-08, |
| "loss": 0.7159, |
| "step": 3963 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 4.247961283835311e-08, |
| "loss": 0.6281, |
| "step": 3964 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 4.015256757774477e-08, |
| "loss": 0.72, |
| "step": 3965 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 3.789105558954509e-08, |
| "loss": 0.6323, |
| "step": 3966 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 3.56950783564014e-08, |
| "loss": 0.6879, |
| "step": 3967 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 3.356463731798432e-08, |
| "loss": 0.6421, |
| "step": 3968 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 3.1499733870998807e-08, |
| "loss": 0.7646, |
| "step": 3969 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.9500369369195312e-08, |
| "loss": 0.7669, |
| "step": 3970 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.7566545123347553e-08, |
| "loss": 0.707, |
| "step": 3971 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.5698262401263605e-08, |
| "loss": 0.6269, |
| "step": 3972 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.389552242777482e-08, |
| "loss": 0.7364, |
| "step": 3973 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.215832638474691e-08, |
| "loss": 0.7645, |
| "step": 3974 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 2.0486675411102163e-08, |
| "loss": 0.7415, |
| "step": 3975 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.888057060274173e-08, |
| "loss": 0.7344, |
| "step": 3976 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.734001301263444e-08, |
| "loss": 0.7766, |
| "step": 3977 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.5865003650761266e-08, |
| "loss": 0.7109, |
| "step": 3978 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.445554348413758e-08, |
| "loss": 0.664, |
| "step": 3979 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.3111633436779791e-08, |
| "loss": 0.669, |
| "step": 3980 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.18332743897831e-08, |
| "loss": 0.6344, |
| "step": 3981 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.0620467181210459e-08, |
| "loss": 0.7265, |
| "step": 3982 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 9.473212606170289e-09, |
| "loss": 0.6268, |
| "step": 3983 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 8.391511416816489e-09, |
| "loss": 0.6244, |
| "step": 3984 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 7.375364322292911e-09, |
| "loss": 0.7479, |
| "step": 3985 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 6.42477198878888e-09, |
| "loss": 0.684, |
| "step": 3986 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 5.539735039505889e-09, |
| "loss": 0.6448, |
| "step": 3987 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 4.720254054679796e-09, |
| "loss": 0.858, |
| "step": 3988 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 3.96632957153642e-09, |
| "loss": 0.7484, |
| "step": 3989 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 3.2779620843692572e-09, |
| "loss": 0.6821, |
| "step": 3990 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.6551520444617616e-09, |
| "loss": 0.7201, |
| "step": 3991 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.0978998601206556e-09, |
| "loss": 0.6425, |
| "step": 3992 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 1.6062058966870297e-09, |
| "loss": 0.7212, |
| "step": 3993 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 1.1800704765030368e-09, |
| "loss": 0.7377, |
| "step": 3994 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 8.194938789451989e-10, |
| "loss": 0.6774, |
| "step": 3995 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 5.244763404133046e-10, |
| "loss": 0.6424, |
| "step": 3996 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.950180543082048e-10, |
| "loss": 0.7422, |
| "step": 3997 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 1.311191710651194e-10, |
| "loss": 0.6551, |
| "step": 3998 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 3.277979814253485e-11, |
| "loss": 0.7711, |
| "step": 3999 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 0.0, |
| "loss": 0.6624, |
| "step": 4000 |
| }, |
| { |
| "epoch": 5.0, |
| "step": 4000, |
| "total_flos": 56180610244608.0, |
| "train_loss": 0.7672133717834949, |
| "train_runtime": 10232.3403, |
| "train_samples_per_second": 6.256, |
| "train_steps_per_second": 0.391 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 4000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 50000, |
| "total_flos": 56180610244608.0, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|