| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.8530603540200469, |
| "eval_steps": 500, |
| "global_step": 2000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 1.2818, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.2952, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.2194, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 1.3376, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.5966, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 1.029, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 1.3407, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 1.2348, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.6e-06, |
| "loss": 1.1039, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.2841, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4e-06, |
| "loss": 1.2649, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.8619, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5.2e-06, |
| "loss": 1.3957, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5.600000000000001e-06, |
| "loss": 1.1898, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6e-06, |
| "loss": 1.2992, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.4000000000000006e-06, |
| "loss": 1.4607, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.800000000000001e-06, |
| "loss": 1.4102, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.2e-06, |
| "loss": 1.0972, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.6e-06, |
| "loss": 1.3559, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.3601, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.400000000000001e-06, |
| "loss": 1.2526, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.8e-06, |
| "loss": 1.3315, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.2e-06, |
| "loss": 1.25, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.600000000000001e-06, |
| "loss": 0.9863, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1e-05, |
| "loss": 1.347, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.04e-05, |
| "loss": 1.3124, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.08e-05, |
| "loss": 1.2464, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.1200000000000001e-05, |
| "loss": 1.2455, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.16e-05, |
| "loss": 1.4268, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.2e-05, |
| "loss": 1.0534, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.24e-05, |
| "loss": 1.303, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.2800000000000001e-05, |
| "loss": 1.1321, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.32e-05, |
| "loss": 1.1769, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3600000000000002e-05, |
| "loss": 1.3679, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4000000000000001e-05, |
| "loss": 1.312, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.44e-05, |
| "loss": 1.0369, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.48e-05, |
| "loss": 1.0389, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.52e-05, |
| "loss": 0.8703, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.56e-05, |
| "loss": 1.1453, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.1817, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.6400000000000002e-05, |
| "loss": 0.9743, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.6800000000000002e-05, |
| "loss": 1.1202, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7199999999999998e-05, |
| "loss": 1.2292, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.76e-05, |
| "loss": 1.1399, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8e-05, |
| "loss": 0.9743, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.84e-05, |
| "loss": 1.2158, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.88e-05, |
| "loss": 1.3657, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9200000000000003e-05, |
| "loss": 1.3059, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9600000000000002e-05, |
| "loss": 1.1884, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2e-05, |
| "loss": 1.0469, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.04e-05, |
| "loss": 1.2228, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.08e-05, |
| "loss": 0.9304, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.12e-05, |
| "loss": 1.201, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.16e-05, |
| "loss": 1.0362, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 0.7752, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2400000000000002e-05, |
| "loss": 1.2473, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2800000000000002e-05, |
| "loss": 1.0335, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.32e-05, |
| "loss": 1.3914, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.36e-05, |
| "loss": 1.0664, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.4e-05, |
| "loss": 0.9654, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.44e-05, |
| "loss": 1.1017, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.48e-05, |
| "loss": 1.4812, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.5200000000000003e-05, |
| "loss": 1.1304, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.5600000000000002e-05, |
| "loss": 1.2299, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.6000000000000002e-05, |
| "loss": 1.0958, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.64e-05, |
| "loss": 1.2071, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.6800000000000004e-05, |
| "loss": 1.1293, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.7200000000000004e-05, |
| "loss": 0.9291, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.7600000000000003e-05, |
| "loss": 1.1702, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.8000000000000003e-05, |
| "loss": 1.1316, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.84e-05, |
| "loss": 1.2628, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.88e-05, |
| "loss": 1.1668, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.9199999999999998e-05, |
| "loss": 1.3412, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.96e-05, |
| "loss": 1.1782, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3e-05, |
| "loss": 1.0287, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.04e-05, |
| "loss": 1.2885, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.08e-05, |
| "loss": 1.159, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.12e-05, |
| "loss": 1.3344, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.16e-05, |
| "loss": 1.0936, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 1.1493, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.24e-05, |
| "loss": 1.3783, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.2800000000000004e-05, |
| "loss": 1.223, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.32e-05, |
| "loss": 1.2664, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.3600000000000004e-05, |
| "loss": 1.3258, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.4000000000000007e-05, |
| "loss": 1.1702, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.4399999999999996e-05, |
| "loss": 1.1075, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.48e-05, |
| "loss": 1.3055, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.52e-05, |
| "loss": 1.2142, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.56e-05, |
| "loss": 0.8471, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.6e-05, |
| "loss": 1.3957, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.6400000000000004e-05, |
| "loss": 1.1564, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.68e-05, |
| "loss": 1.3207, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.72e-05, |
| "loss": 1.1267, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.76e-05, |
| "loss": 1.137, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.8e-05, |
| "loss": 0.8849, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.8400000000000005e-05, |
| "loss": 1.1582, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.88e-05, |
| "loss": 1.1917, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9200000000000004e-05, |
| "loss": 1.3419, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.960000000000001e-05, |
| "loss": 1.1813, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4e-05, |
| "loss": 1.2728, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.0400000000000006e-05, |
| "loss": 0.8103, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.08e-05, |
| "loss": 1.3143, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.12e-05, |
| "loss": 1.1302, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.16e-05, |
| "loss": 1.1283, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.2e-05, |
| "loss": 1.5791, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.24e-05, |
| "loss": 1.1763, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.2800000000000004e-05, |
| "loss": 1.0718, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.32e-05, |
| "loss": 1.0385, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.36e-05, |
| "loss": 1.4999, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.4000000000000006e-05, |
| "loss": 0.6334, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.44e-05, |
| "loss": 1.0041, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.4800000000000005e-05, |
| "loss": 1.0896, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.52e-05, |
| "loss": 1.0309, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.5600000000000004e-05, |
| "loss": 1.1714, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.600000000000001e-05, |
| "loss": 1.287, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.64e-05, |
| "loss": 1.1876, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.6800000000000006e-05, |
| "loss": 1.1546, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.72e-05, |
| "loss": 1.2149, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.76e-05, |
| "loss": 0.9551, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.8e-05, |
| "loss": 1.239, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.8400000000000004e-05, |
| "loss": 1.2245, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.88e-05, |
| "loss": 0.8828, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.92e-05, |
| "loss": 1.0366, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.96e-05, |
| "loss": 1.2768, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5e-05, |
| "loss": 1.0276, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5.0400000000000005e-05, |
| "loss": 1.0862, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5.08e-05, |
| "loss": 1.14, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5.1200000000000004e-05, |
| "loss": 1.2404, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.16e-05, |
| "loss": 1.3187, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.2000000000000004e-05, |
| "loss": 1.2262, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.2400000000000007e-05, |
| "loss": 1.2137, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.28e-05, |
| "loss": 1.1765, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.3200000000000006e-05, |
| "loss": 1.3582, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.360000000000001e-05, |
| "loss": 1.1968, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.4000000000000005e-05, |
| "loss": 1.1668, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.440000000000001e-05, |
| "loss": 1.1832, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.4800000000000004e-05, |
| "loss": 1.13, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.520000000000001e-05, |
| "loss": 1.0691, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.560000000000001e-05, |
| "loss": 1.1464, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.6000000000000006e-05, |
| "loss": 1.207, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.6399999999999995e-05, |
| "loss": 1.0166, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.68e-05, |
| "loss": 1.0261, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.72e-05, |
| "loss": 1.0902, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.76e-05, |
| "loss": 1.1066, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.8e-05, |
| "loss": 1.1081, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.8399999999999997e-05, |
| "loss": 1.1955, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.88e-05, |
| "loss": 1.1053, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.92e-05, |
| "loss": 1.3855, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 5.96e-05, |
| "loss": 1.2256, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 6e-05, |
| "loss": 1.0127, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 6.04e-05, |
| "loss": 1.0112, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 6.08e-05, |
| "loss": 1.4484, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.12e-05, |
| "loss": 1.2744, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.16e-05, |
| "loss": 1.0334, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.2e-05, |
| "loss": 1.133, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.24e-05, |
| "loss": 1.3, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.280000000000001e-05, |
| "loss": 0.896, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.32e-05, |
| "loss": 1.1135, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.36e-05, |
| "loss": 1.2471, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.400000000000001e-05, |
| "loss": 1.229, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.440000000000001e-05, |
| "loss": 1.1667, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.48e-05, |
| "loss": 0.8144, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.52e-05, |
| "loss": 1.0516, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.560000000000001e-05, |
| "loss": 1.3033, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.6e-05, |
| "loss": 1.2643, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.64e-05, |
| "loss": 1.307, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.680000000000001e-05, |
| "loss": 0.9852, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.720000000000001e-05, |
| "loss": 1.1063, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.76e-05, |
| "loss": 1.1459, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.800000000000001e-05, |
| "loss": 1.0577, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.840000000000001e-05, |
| "loss": 1.0419, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.879999999999999e-05, |
| "loss": 1.4201, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.92e-05, |
| "loss": 1.2597, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 6.96e-05, |
| "loss": 1.0115, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 7e-05, |
| "loss": 0.9554, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.04e-05, |
| "loss": 1.1427, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.08e-05, |
| "loss": 1.4519, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.12e-05, |
| "loss": 1.1573, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.16e-05, |
| "loss": 1.103, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.2e-05, |
| "loss": 1.2778, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.24e-05, |
| "loss": 1.0664, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.280000000000001e-05, |
| "loss": 1.2292, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.32e-05, |
| "loss": 1.3982, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.36e-05, |
| "loss": 1.3592, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.4e-05, |
| "loss": 1.0844, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.44e-05, |
| "loss": 1.2156, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.48e-05, |
| "loss": 1.0587, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.52e-05, |
| "loss": 1.032, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.560000000000001e-05, |
| "loss": 1.1892, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.6e-05, |
| "loss": 1.1206, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.64e-05, |
| "loss": 1.2045, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.680000000000001e-05, |
| "loss": 1.2164, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.72e-05, |
| "loss": 1.2035, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.76e-05, |
| "loss": 1.0796, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.800000000000001e-05, |
| "loss": 1.37, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.840000000000001e-05, |
| "loss": 1.2888, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.88e-05, |
| "loss": 1.1497, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.920000000000001e-05, |
| "loss": 1.0978, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 7.960000000000001e-05, |
| "loss": 1.2033, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8e-05, |
| "loss": 1.5056, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.04e-05, |
| "loss": 0.9095, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.080000000000001e-05, |
| "loss": 1.1337, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.120000000000001e-05, |
| "loss": 1.0801, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.16e-05, |
| "loss": 0.855, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.2e-05, |
| "loss": 0.9456, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.24e-05, |
| "loss": 1.3695, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.28e-05, |
| "loss": 1.1569, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.32e-05, |
| "loss": 1.2633, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.36e-05, |
| "loss": 1.1395, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.4e-05, |
| "loss": 1.1561, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.44e-05, |
| "loss": 1.3821, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.48e-05, |
| "loss": 1.0131, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.52e-05, |
| "loss": 1.1297, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.560000000000001e-05, |
| "loss": 1.0271, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.6e-05, |
| "loss": 1.1185, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.64e-05, |
| "loss": 1.0648, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.680000000000001e-05, |
| "loss": 0.8329, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.72e-05, |
| "loss": 1.3636, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.76e-05, |
| "loss": 1.2372, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.800000000000001e-05, |
| "loss": 1.2318, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.840000000000001e-05, |
| "loss": 1.0359, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 8.88e-05, |
| "loss": 1.0338, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 8.92e-05, |
| "loss": 1.3378, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 8.960000000000001e-05, |
| "loss": 1.0016, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9e-05, |
| "loss": 1.0013, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.04e-05, |
| "loss": 1.2705, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.080000000000001e-05, |
| "loss": 1.1285, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.120000000000001e-05, |
| "loss": 0.8645, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.16e-05, |
| "loss": 1.016, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.200000000000001e-05, |
| "loss": 1.1599, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.240000000000001e-05, |
| "loss": 1.2197, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.28e-05, |
| "loss": 1.2227, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.320000000000002e-05, |
| "loss": 0.9526, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.360000000000001e-05, |
| "loss": 0.8953, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.4e-05, |
| "loss": 1.0218, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.44e-05, |
| "loss": 1.0923, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.48e-05, |
| "loss": 1.067, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.52e-05, |
| "loss": 1.1224, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.56e-05, |
| "loss": 0.8466, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.6e-05, |
| "loss": 1.0339, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.64e-05, |
| "loss": 0.9805, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.680000000000001e-05, |
| "loss": 1.3529, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.72e-05, |
| "loss": 1.218, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.76e-05, |
| "loss": 1.0436, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.8e-05, |
| "loss": 0.9048, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.84e-05, |
| "loss": 1.2208, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 9.88e-05, |
| "loss": 1.0762, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 9.92e-05, |
| "loss": 0.9162, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 9.960000000000001e-05, |
| "loss": 0.9227, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001, |
| "loss": 1.3004, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001004, |
| "loss": 0.9945, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010080000000000001, |
| "loss": 0.9809, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010120000000000001, |
| "loss": 1.0726, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001016, |
| "loss": 1.1647, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010200000000000001, |
| "loss": 1.172, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010240000000000001, |
| "loss": 1.0538, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001028, |
| "loss": 0.9363, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001032, |
| "loss": 0.9594, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010360000000000001, |
| "loss": 1.3335, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010400000000000001, |
| "loss": 0.8316, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001044, |
| "loss": 1.2226, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010480000000000001, |
| "loss": 1.131, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010520000000000001, |
| "loss": 1.2415, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001056, |
| "loss": 1.0098, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010600000000000002, |
| "loss": 1.1222, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010640000000000001, |
| "loss": 1.2765, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010680000000000001, |
| "loss": 1.3166, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010720000000000002, |
| "loss": 1.0835, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00010760000000000001, |
| "loss": 0.9762, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00010800000000000001, |
| "loss": 1.2131, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00010840000000000002, |
| "loss": 1.3393, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00010880000000000002, |
| "loss": 1.0986, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00010920000000000001, |
| "loss": 1.1828, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00010960000000000001, |
| "loss": 1.0518, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011000000000000002, |
| "loss": 1.339, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011040000000000001, |
| "loss": 0.891, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011080000000000001, |
| "loss": 1.3192, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011120000000000002, |
| "loss": 1.1529, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011160000000000002, |
| "loss": 0.9901, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011200000000000001, |
| "loss": 0.9134, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011240000000000002, |
| "loss": 1.2848, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011279999999999999, |
| "loss": 0.9486, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001132, |
| "loss": 1.1006, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001136, |
| "loss": 1.0789, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011399999999999999, |
| "loss": 1.1266, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001144, |
| "loss": 1.1636, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001148, |
| "loss": 1.2079, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001152, |
| "loss": 1.3292, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011559999999999999, |
| "loss": 1.0871, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.000116, |
| "loss": 0.8548, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001164, |
| "loss": 1.1602, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00011679999999999999, |
| "loss": 1.1705, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001172, |
| "loss": 1.2637, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001176, |
| "loss": 1.2081, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.000118, |
| "loss": 1.1403, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001184, |
| "loss": 1.0839, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001188, |
| "loss": 1.0259, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001192, |
| "loss": 1.1124, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00011960000000000001, |
| "loss": 1.2362, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00012, |
| "loss": 1.0951, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001204, |
| "loss": 0.8936, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001208, |
| "loss": 1.2883, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001212, |
| "loss": 1.0128, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001216, |
| "loss": 1.2779, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.000122, |
| "loss": 1.422, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001224, |
| "loss": 1.1744, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001228, |
| "loss": 1.218, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001232, |
| "loss": 1.2358, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001236, |
| "loss": 1.0479, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.000124, |
| "loss": 1.2465, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00012440000000000002, |
| "loss": 1.0339, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001248, |
| "loss": 0.9013, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001252, |
| "loss": 1.4156, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00012560000000000002, |
| "loss": 1.3632, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.000126, |
| "loss": 1.1134, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001264, |
| "loss": 0.9918, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00012680000000000002, |
| "loss": 0.9781, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001272, |
| "loss": 1.2625, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001276, |
| "loss": 1.1926, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00012800000000000002, |
| "loss": 0.9746, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001284, |
| "loss": 1.351, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00012880000000000001, |
| "loss": 0.9687, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00012920000000000002, |
| "loss": 1.1816, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001296, |
| "loss": 1.2361, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00013000000000000002, |
| "loss": 0.9113, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001304, |
| "loss": 1.1801, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001308, |
| "loss": 1.2085, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00013120000000000002, |
| "loss": 1.0212, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001316, |
| "loss": 1.0453, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.000132, |
| "loss": 1.0107, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00013240000000000002, |
| "loss": 0.9697, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001328, |
| "loss": 1.162, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001332, |
| "loss": 1.2125, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00013360000000000002, |
| "loss": 1.309, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.000134, |
| "loss": 1.099, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00013440000000000001, |
| "loss": 1.355, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00013480000000000002, |
| "loss": 0.6282, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001352, |
| "loss": 1.3519, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00013560000000000002, |
| "loss": 0.8672, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00013600000000000003, |
| "loss": 0.9799, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001364, |
| "loss": 1.2927, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00013680000000000002, |
| "loss": 0.9295, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00013720000000000003, |
| "loss": 1.0341, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00013759999999999998, |
| "loss": 1.0038, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.000138, |
| "loss": 1.3566, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001384, |
| "loss": 1.3394, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00013879999999999999, |
| "loss": 1.0362, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001392, |
| "loss": 0.9735, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001396, |
| "loss": 0.9184, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00014, |
| "loss": 1.1407, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001404, |
| "loss": 1.1831, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001408, |
| "loss": 1.1407, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001412, |
| "loss": 1.4475, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001416, |
| "loss": 1.3113, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.000142, |
| "loss": 1.1889, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001424, |
| "loss": 1.0505, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001428, |
| "loss": 1.0357, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001432, |
| "loss": 1.3135, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001436, |
| "loss": 1.0995, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.000144, |
| "loss": 0.9231, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001444, |
| "loss": 1.0539, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001448, |
| "loss": 1.176, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001452, |
| "loss": 1.0333, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00014560000000000002, |
| "loss": 1.0417, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.000146, |
| "loss": 1.2966, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001464, |
| "loss": 1.1125, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00014680000000000002, |
| "loss": 1.272, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001472, |
| "loss": 1.0856, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001476, |
| "loss": 1.231, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.000148, |
| "loss": 1.0417, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001484, |
| "loss": 1.0135, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001488, |
| "loss": 1.0095, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001492, |
| "loss": 1.5798, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001496, |
| "loss": 1.2443, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00015000000000000001, |
| "loss": 1.099, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001504, |
| "loss": 1.5508, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001508, |
| "loss": 1.2389, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00015120000000000002, |
| "loss": 1.0767, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001516, |
| "loss": 1.2713, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.000152, |
| "loss": 1.1228, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00015240000000000002, |
| "loss": 0.8391, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001528, |
| "loss": 1.0183, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001532, |
| "loss": 1.437, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00015360000000000002, |
| "loss": 1.3913, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.000154, |
| "loss": 0.9054, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001544, |
| "loss": 1.3779, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015480000000000002, |
| "loss": 1.0458, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001552, |
| "loss": 0.9702, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015560000000000001, |
| "loss": 0.9826, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015600000000000002, |
| "loss": 1.1765, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001564, |
| "loss": 1.068, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015680000000000002, |
| "loss": 1.0562, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015720000000000003, |
| "loss": 1.3064, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001576, |
| "loss": 1.003, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015800000000000002, |
| "loss": 1.0544, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015840000000000003, |
| "loss": 0.8975, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001588, |
| "loss": 0.9229, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00015920000000000002, |
| "loss": 0.8955, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001596, |
| "loss": 1.1768, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00016, |
| "loss": 0.9362, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00016040000000000002, |
| "loss": 1.2776, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001608, |
| "loss": 1.0466, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00016120000000000002, |
| "loss": 1.0684, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00016160000000000002, |
| "loss": 1.1952, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.000162, |
| "loss": 0.9252, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00016240000000000002, |
| "loss": 0.8359, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001628, |
| "loss": 1.1491, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001632, |
| "loss": 1.2934, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001636, |
| "loss": 0.9825, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.000164, |
| "loss": 0.8535, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001644, |
| "loss": 1.0623, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001648, |
| "loss": 1.289, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001652, |
| "loss": 1.1956, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001656, |
| "loss": 1.1805, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.000166, |
| "loss": 1.3408, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001664, |
| "loss": 0.875, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001668, |
| "loss": 1.0994, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001672, |
| "loss": 1.1424, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001676, |
| "loss": 1.092, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.000168, |
| "loss": 1.1907, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001684, |
| "loss": 0.87, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001688, |
| "loss": 1.0553, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001692, |
| "loss": 0.9633, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001696, |
| "loss": 1.3221, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00017, |
| "loss": 0.9208, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001704, |
| "loss": 1.2193, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001708, |
| "loss": 0.9998, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00017120000000000001, |
| "loss": 1.1529, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001716, |
| "loss": 1.3577, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.000172, |
| "loss": 1.2927, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00017240000000000002, |
| "loss": 1.0202, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001728, |
| "loss": 0.9609, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0001732, |
| "loss": 0.9294, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00017360000000000002, |
| "loss": 1.3357, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.000174, |
| "loss": 1.0865, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001744, |
| "loss": 1.182, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00017480000000000002, |
| "loss": 1.2475, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001752, |
| "loss": 1.0237, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001756, |
| "loss": 0.8495, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00017600000000000002, |
| "loss": 1.1944, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001764, |
| "loss": 1.1077, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00017680000000000001, |
| "loss": 1.0118, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001772, |
| "loss": 1.0466, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001776, |
| "loss": 1.2098, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00017800000000000002, |
| "loss": 1.412, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001784, |
| "loss": 1.1311, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001788, |
| "loss": 1.2495, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00017920000000000002, |
| "loss": 1.1256, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001796, |
| "loss": 1.1823, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00018, |
| "loss": 1.0601, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00018040000000000002, |
| "loss": 0.8973, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001808, |
| "loss": 1.4697, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.0001812, |
| "loss": 1.1578, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00018160000000000002, |
| "loss": 1.1062, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.000182, |
| "loss": 1.3828, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00018240000000000002, |
| "loss": 1.4718, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00018280000000000003, |
| "loss": 1.0712, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001832, |
| "loss": 1.0828, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018360000000000002, |
| "loss": 1.2653, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018400000000000003, |
| "loss": 1.3578, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001844, |
| "loss": 1.2627, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018480000000000002, |
| "loss": 1.05, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018520000000000003, |
| "loss": 0.8436, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001856, |
| "loss": 1.0601, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018600000000000002, |
| "loss": 1.2333, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018640000000000003, |
| "loss": 1.0088, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018680000000000001, |
| "loss": 1.0088, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00018720000000000002, |
| "loss": 1.153, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001876, |
| "loss": 1.3097, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.000188, |
| "loss": 1.2258, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001884, |
| "loss": 1.4277, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001888, |
| "loss": 1.0696, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001892, |
| "loss": 1.1787, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001896, |
| "loss": 1.3921, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019, |
| "loss": 1.0147, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001904, |
| "loss": 1.4159, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001908, |
| "loss": 1.1812, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001912, |
| "loss": 1.0883, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.0001916, |
| "loss": 0.9791, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.000192, |
| "loss": 0.8734, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019240000000000001, |
| "loss": 1.1211, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001928, |
| "loss": 0.9795, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001932, |
| "loss": 1.1376, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019360000000000002, |
| "loss": 0.9738, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.000194, |
| "loss": 1.1821, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001944, |
| "loss": 1.2128, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001948, |
| "loss": 1.1318, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001952, |
| "loss": 1.3241, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001956, |
| "loss": 1.2116, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.000196, |
| "loss": 1.1755, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001964, |
| "loss": 0.9479, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001968, |
| "loss": 1.1903, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001972, |
| "loss": 1.1141, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001976, |
| "loss": 1.3308, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019800000000000002, |
| "loss": 1.0702, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001984, |
| "loss": 1.0281, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001988, |
| "loss": 1.2001, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019920000000000002, |
| "loss": 0.9977, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001996, |
| "loss": 1.1479, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0002, |
| "loss": 0.8147, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019999985487313587, |
| "loss": 1.2913, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001999994194929647, |
| "loss": 1.1063, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001999986938607502, |
| "loss": 1.3171, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019999767797859854, |
| "loss": 1.0168, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019999637184945834, |
| "loss": 0.9533, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019999477547712072, |
| "loss": 1.0006, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001999928888662192, |
| "loss": 1.4544, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019999071202222974, |
| "loss": 1.1641, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019998824495147074, |
| "loss": 1.1363, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019998548766110287, |
| "loss": 0.9164, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019998244015912938, |
| "loss": 0.965, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001999791024543957, |
| "loss": 1.15, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019997547455658963, |
| "loss": 0.9623, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001999715564762413, |
| "loss": 1.0395, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019996734822472312, |
| "loss": 1.2779, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019996284981424963, |
| "loss": 1.0833, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019995806125787765, |
| "loss": 1.3124, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019995298256950619, |
| "loss": 1.0696, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019994761376387627, |
| "loss": 0.9668, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019994195485657112, |
| "loss": 1.2401, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019993600586401587, |
| "loss": 1.3796, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001999297668034777, |
| "loss": 1.2028, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019992323769306577, |
| "loss": 0.8962, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019991641855173097, |
| "loss": 0.9164, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019990930939926618, |
| "loss": 1.2593, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019990191025630595, |
| "loss": 1.4855, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001998942211443266, |
| "loss": 1.1999, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019988624208564608, |
| "loss": 0.8947, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019987797310342385, |
| "loss": 1.1353, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019986941422166097, |
| "loss": 1.0174, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001998605654651999, |
| "loss": 1.0731, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019985142685972446, |
| "loss": 1.3482, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019984199843175985, |
| "loss": 1.1752, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019983228020867242, |
| "loss": 1.1188, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019982227221866964, |
| "loss": 0.9917, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001998119744908001, |
| "loss": 1.1577, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019980138705495338, |
| "loss": 1.1481, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019979050994185983, |
| "loss": 0.8178, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001997793431830907, |
| "loss": 1.1895, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019976788681105796, |
| "loss": 1.0866, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001997561408590141, |
| "loss": 1.1982, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019974410536105223, |
| "loss": 1.0071, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019973178035210583, |
| "loss": 1.3597, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019971916586794867, |
| "loss": 1.0774, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019970626194519477, |
| "loss": 1.0055, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019969306862129823, |
| "loss": 1.1297, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019967958593455323, |
| "loss": 1.2226, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019966581392409373, |
| "loss": 1.1444, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019965175262989346, |
| "loss": 0.9697, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019963740209276592, |
| "loss": 1.3132, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019962276235436404, |
| "loss": 1.1001, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019960783345718023, |
| "loss": 1.0889, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019959261544454614, |
| "loss": 1.2967, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019957710836063263, |
| "loss": 0.9959, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001995613122504496, |
| "loss": 1.208, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019954522715984584, |
| "loss": 0.9111, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019952885313550897, |
| "loss": 1.1781, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019951219022496512, |
| "loss": 1.0607, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019949523847657906, |
| "loss": 1.0173, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.0001994779979395539, |
| "loss": 0.8664, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019946046866393087, |
| "loss": 1.1037, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019944265070058942, |
| "loss": 1.3897, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019942454410124677, |
| "loss": 1.2395, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019940614891845809, |
| "loss": 1.0461, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.000199387465205616, |
| "loss": 0.9533, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019936849301695076, |
| "loss": 0.9421, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019934923240752975, |
| "loss": 1.2513, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019932968343325773, |
| "loss": 0.8376, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019930984615087623, |
| "loss": 1.1108, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019928972061796375, |
| "loss": 1.054, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019926930689293535, |
| "loss": 1.0191, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019924860503504267, |
| "loss": 1.1286, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019922761510437364, |
| "loss": 1.3716, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019920633716185226, |
| "loss": 1.111, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019918477126923862, |
| "loss": 1.0931, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019916291748912847, |
| "loss": 1.0406, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019914077588495323, |
| "loss": 1.008, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019911834652097977, |
| "loss": 1.0435, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019909562946231012, |
| "loss": 1.1845, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001990726247748814, |
| "loss": 1.3775, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019904933252546555, |
| "loss": 1.2698, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019902575278166927, |
| "loss": 1.0962, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019900188561193356, |
| "loss": 1.1448, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019897773108553378, |
| "loss": 1.3193, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019895328927257936, |
| "loss": 1.0702, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001989285602440136, |
| "loss": 1.2, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001989035440716134, |
| "loss": 1.1682, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019887824082798913, |
| "loss": 1.2194, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019885265058658443, |
| "loss": 1.2411, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019882677342167586, |
| "loss": 1.1677, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019880060940837292, |
| "loss": 1.1, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019877415862261762, |
| "loss": 1.0011, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019874742114118433, |
| "loss": 1.1166, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019872039704167964, |
| "loss": 1.2969, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019869308640254194, |
| "loss": 1.1585, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019866548930304144, |
| "loss": 1.1265, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001986376058232797, |
| "loss": 1.0731, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019860943604418956, |
| "loss": 1.0223, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019858098004753487, |
| "loss": 0.9901, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019855223791591023, |
| "loss": 1.1621, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019852320973274073, |
| "loss": 1.3315, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001984938955822818, |
| "loss": 1.2482, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001984642955496188, |
| "loss": 1.1693, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019843440972066697, |
| "loss": 1.1547, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.000198404238182171, |
| "loss": 0.9196, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019837378102170492, |
| "loss": 0.9227, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001983430383276718, |
| "loss": 1.1163, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019831201018930342, |
| "loss": 1.1671, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001982806966966601, |
| "loss": 0.8524, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001982490979406305, |
| "loss": 1.2517, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019821721401293106, |
| "loss": 0.9096, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019818504500610618, |
| "loss": 1.2354, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001981525910135276, |
| "loss": 0.8784, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019811985212939416, |
| "loss": 1.1312, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019808682844873175, |
| "loss": 1.0033, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019805352006739283, |
| "loss": 1.06, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019801992708205619, |
| "loss": 1.2113, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019798604959022677, |
| "loss": 1.0335, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001979518876902352, |
| "loss": 1.1529, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001979174414812377, |
| "loss": 0.7925, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001978827110632157, |
| "loss": 1.162, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019784769653697547, |
| "loss": 1.236, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.000197812398004148, |
| "loss": 1.1424, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019777681556718864, |
| "loss": 1.0721, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001977409493293767, |
| "loss": 1.1005, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019770479939481528, |
| "loss": 0.7105, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001976683658684309, |
| "loss": 0.8946, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019763164885597324, |
| "loss": 1.0184, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019759464846401482, |
| "loss": 1.3275, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019755736479995064, |
| "loss": 1.2415, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019751979797199792, |
| "loss": 1.0086, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019748194808919575, |
| "loss": 1.1779, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001974438152614049, |
| "loss": 1.1134, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019740539959930725, |
| "loss": 1.1024, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019736670121440572, |
| "loss": 1.2317, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019732772021902384, |
| "loss": 1.1601, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019728845672630537, |
| "loss": 1.0954, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001972489108502141, |
| "loss": 0.8213, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019720908270553334, |
| "loss": 1.3188, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001971689724078658, |
| "loss": 1.1258, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019712858007363314, |
| "loss": 0.907, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.0001970879058200756, |
| "loss": 1.1687, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019704694976525173, |
| "loss": 0.8651, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019700571202803797, |
| "loss": 1.356, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001969641927281284, |
| "loss": 1.1797, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019692239198603438, |
| "loss": 1.4008, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019688030992308405, |
| "loss": 1.0366, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001968379466614222, |
| "loss": 1.2611, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001967953023240098, |
| "loss": 0.8693, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019675237703462364, |
| "loss": 1.1452, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001967091709178559, |
| "loss": 0.8372, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.000196665684099114, |
| "loss": 0.9887, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019662191670462008, |
| "loss": 1.0321, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019657786886141052, |
| "loss": 1.0705, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019653354069733595, |
| "loss": 1.2854, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019648893234106045, |
| "loss": 1.48, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019644404392206148, |
| "loss": 1.1699, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019639887557062932, |
| "loss": 1.1834, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001963534274178668, |
| "loss": 1.2088, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019630769959568886, |
| "loss": 1.1226, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019626169223682224, |
| "loss": 1.0411, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.000196215405474805, |
| "loss": 0.8895, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019616883944398623, |
| "loss": 1.2787, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019612199427952552, |
| "loss": 1.267, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001960748701173927, |
| "loss": 1.1263, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019602746709436746, |
| "loss": 0.9675, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001959797853480388, |
| "loss": 1.0972, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019593182501680476, |
| "loss": 1.2443, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019588358623987202, |
| "loss": 1.2117, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019583506915725541, |
| "loss": 1.0921, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001957862739097776, |
| "loss": 1.3039, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019573720063906856, |
| "loss": 0.8299, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019568784948756533, |
| "loss": 1.0927, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019563822059851145, |
| "loss": 0.9748, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019558831411595665, |
| "loss": 1.1485, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001955381301847563, |
| "loss": 1.1562, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019548766895057123, |
| "loss": 1.1285, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019543693055986693, |
| "loss": 1.2285, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019538591515991355, |
| "loss": 1.1749, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019533462289878517, |
| "loss": 0.8369, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001952830539253595, |
| "loss": 1.2296, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019523120838931733, |
| "loss": 1.0707, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019517908644114243, |
| "loss": 1.0608, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019512668823212055, |
| "loss": 1.2011, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019507401391433948, |
| "loss": 1.2707, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019502106364068842, |
| "loss": 0.8953, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001949678375648575, |
| "loss": 1.2922, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019491433584133735, |
| "loss": 1.124, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001948605586254188, |
| "loss": 1.3535, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019480650607319217, |
| "loss": 1.0431, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.000194752178341547, |
| "loss": 0.9603, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001946975755881716, |
| "loss": 1.167, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019464269797155246, |
| "loss": 1.1835, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001945875456509739, |
| "loss": 0.8525, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019453211878651762, |
| "loss": 1.0272, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019447641753906215, |
| "loss": 1.2859, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001944204420702824, |
| "loss": 1.2566, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019436419254264932, |
| "loss": 0.9733, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019430766911942922, |
| "loss": 1.0347, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019425087196468346, |
| "loss": 1.123, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001941938012432679, |
| "loss": 1.3722, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001941364571208324, |
| "loss": 1.2421, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019407883976382047, |
| "loss": 1.1164, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019402094933946857, |
| "loss": 1.2285, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019396278601580588, |
| "loss": 1.0078, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019390434996165357, |
| "loss": 1.1597, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019384564134662454, |
| "loss": 1.0244, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001937866603411226, |
| "loss": 1.2346, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019372740711634244, |
| "loss": 1.2062, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001936678818442687, |
| "loss": 0.9959, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019360808469767572, |
| "loss": 1.0192, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019354801585012697, |
| "loss": 1.1645, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019348767547597444, |
| "loss": 0.7205, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001934270637503584, |
| "loss": 1.0883, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019336618084920656, |
| "loss": 1.1822, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001933050269492339, |
| "loss": 1.1024, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019324360222794184, |
| "loss": 0.8622, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019318190686361792, |
| "loss": 1.0641, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019311994103533527, |
| "loss": 1.2941, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.000193057704922952, |
| "loss": 1.2076, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019299519870711075, |
| "loss": 0.9718, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001929324225692381, |
| "loss": 1.2232, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001928693766915442, |
| "loss": 1.0738, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019280606125702203, |
| "loss": 1.1229, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019274247644944699, |
| "loss": 1.1557, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019267862245337636, |
| "loss": 1.2187, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019261449945414874, |
| "loss": 1.2721, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019255010763788351, |
| "loss": 0.9863, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019248544719148038, |
| "loss": 1.1638, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019242051830261866, |
| "loss": 1.1812, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019235532115975684, |
| "loss": 1.4268, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019228985595213215, |
| "loss": 1.0501, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001922241228697597, |
| "loss": 0.908, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019215812210343226, |
| "loss": 1.2303, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019209185384471954, |
| "loss": 1.3575, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019202531828596756, |
| "loss": 1.245, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019195851562029832, |
| "loss": 1.2686, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.000191891446041609, |
| "loss": 1.2543, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019182410974457158, |
| "loss": 1.0797, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019175650692463222, |
| "loss": 1.281, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019168863777801055, |
| "loss": 0.817, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001916205025016993, |
| "loss": 1.2101, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019155210129346371, |
| "loss": 1.139, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019148343435184079, |
| "loss": 1.1273, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001914145018761389, |
| "loss": 1.0817, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019134530406643711, |
| "loss": 1.2232, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001912758411235847, |
| "loss": 1.4168, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001912061132492004, |
| "loss": 1.0433, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.000191136120645672, |
| "loss": 1.1638, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001910658635161556, |
| "loss": 1.0237, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001909953420645752, |
| "loss": 0.9135, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019092455649562186, |
| "loss": 1.2252, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019085350701475337, |
| "loss": 1.4455, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019078219382819353, |
| "loss": 0.811, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001907106171429315, |
| "loss": 0.9931, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019063877716672125, |
| "loss": 0.8521, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019056667410808102, |
| "loss": 0.8681, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001904943081762926, |
| "loss": 0.9413, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019042167958140084, |
| "loss": 1.1446, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019034878853421293, |
| "loss": 0.8896, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019027563524629784, |
| "loss": 0.7721, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019020221992998573, |
| "loss": 1.0006, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019012854279836728, |
| "loss": 1.0832, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019005460406529311, |
| "loss": 1.0336, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018998040394537318, |
| "loss": 0.9962, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018990594265397607, |
| "loss": 1.2947, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018983122040722847, |
| "loss": 1.3027, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.0001897562374220145, |
| "loss": 1.2781, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018968099391597504, |
| "loss": 1.0292, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018960549010750718, |
| "loss": 1.2331, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018952972621576354, |
| "loss": 1.1173, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018945370246065165, |
| "loss": 1.1384, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018937741906283328, |
| "loss": 1.0338, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018930087624372387, |
| "loss": 1.3098, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018922407422549176, |
| "loss": 1.0469, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.0001891470132310577, |
| "loss": 1.1092, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018906969348409409, |
| "loss": 1.2412, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.0001889921152090244, |
| "loss": 1.3915, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018891427863102243, |
| "loss": 1.1104, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018883618397601176, |
| "loss": 0.9741, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.000188757831470665, |
| "loss": 1.1488, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018867922134240334, |
| "loss": 1.0913, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.0001886003538193955, |
| "loss": 1.3236, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00018852122913055742, |
| "loss": 1.1033, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.0001884418475055515, |
| "loss": 1.1394, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018836220917478583, |
| "loss": 1.1857, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018828231436941365, |
| "loss": 0.8392, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018820216332133258, |
| "loss": 1.08, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018812175626318407, |
| "loss": 1.0738, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018804109342835258, |
| "loss": 1.094, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018796017505096503, |
| "loss": 0.9702, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018787900136588994, |
| "loss": 1.3268, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018779757260873706, |
| "loss": 1.14, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018771588901585635, |
| "loss": 0.9881, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018763395082433746, |
| "loss": 1.155, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001875517582720091, |
| "loss": 1.1759, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018746931159743815, |
| "loss": 1.0663, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001873866110399292, |
| "loss": 1.0542, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001873036568395237, |
| "loss": 1.5055, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001872204492369993, |
| "loss": 1.0873, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018713698847386918, |
| "loss": 1.5043, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001870532747923813, |
| "loss": 0.9765, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018696930843551776, |
| "loss": 1.0198, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018688508964699404, |
| "loss": 0.8786, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018680061867125828, |
| "loss": 1.1144, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.0001867158957534907, |
| "loss": 1.2783, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018663092113960263, |
| "loss": 1.2231, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00018654569507623616, |
| "loss": 0.8776, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018646021781076305, |
| "loss": 1.1812, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001863744895912843, |
| "loss": 0.9862, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001862885106666292, |
| "loss": 1.1873, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018620228128635484, |
| "loss": 0.939, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018611580170074517, |
| "loss": 1.0042, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018602907216081044, |
| "loss": 1.4149, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018594209291828638, |
| "loss": 1.3869, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018585486422563344, |
| "loss": 1.3678, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001857673863360362, |
| "loss": 0.967, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018567965950340247, |
| "loss": 1.3628, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018559168398236264, |
| "loss": 1.0053, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018550346002826898, |
| "loss": 1.3316, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018541498789719478, |
| "loss": 1.0663, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018532626784593373, |
| "loss": 1.1287, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.000185237300131999, |
| "loss": 1.1678, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001851480850136228, |
| "loss": 1.005, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018505862274975526, |
| "loss": 1.1574, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018496891360006402, |
| "loss": 0.8346, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018487895782493317, |
| "loss": 1.1672, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001847887556854627, |
| "loss": 0.8411, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001846983074434677, |
| "loss": 1.2761, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018460761336147754, |
| "loss": 1.1325, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001845166737027352, |
| "loss": 1.3925, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00018442548873119643, |
| "loss": 0.9941, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018433405871152902, |
| "loss": 0.7731, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018424238390911198, |
| "loss": 0.7904, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018415046459003485, |
| "loss": 1.0654, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001840583010210969, |
| "loss": 1.1044, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001839658934698063, |
| "loss": 1.2158, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018387324220437944, |
| "loss": 1.1146, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018378034749374006, |
| "loss": 1.1444, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018368720960751852, |
| "loss": 1.0979, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.000183593828816051, |
| "loss": 1.116, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001835002053903787, |
| "loss": 1.2895, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018340633960224717, |
| "loss": 1.2178, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018331223172410535, |
| "loss": 0.8649, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018321788202910481, |
| "loss": 0.8912, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001831232907910991, |
| "loss": 0.836, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001830284582846428, |
| "loss": 0.8429, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018293338478499082, |
| "loss": 1.2104, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018283807056809748, |
| "loss": 1.3689, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001827425159106159, |
| "loss": 0.8456, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018264672108989703, |
| "loss": 0.9731, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018255068638398892, |
| "loss": 1.2742, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018245441207163582, |
| "loss": 1.1818, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018235789843227756, |
| "loss": 1.304, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00018226114574604863, |
| "loss": 0.8456, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.0001821641542937772, |
| "loss": 1.2534, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018206692435698463, |
| "loss": 1.3066, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018196945621788447, |
| "loss": 1.0395, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018187175015938153, |
| "loss": 1.219, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.0001817738064650714, |
| "loss": 1.1649, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018167562541923925, |
| "loss": 1.3486, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018157720730685922, |
| "loss": 1.4469, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018147855241359352, |
| "loss": 1.3363, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018137966102579176, |
| "loss": 1.1218, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018128053343048977, |
| "loss": 1.0041, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018118116991540917, |
| "loss": 1.0692, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.0001810815707689562, |
| "loss": 0.9318, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018098173628022118, |
| "loss": 1.013, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018088166673897732, |
| "loss": 0.9541, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018078136243568032, |
| "loss": 0.9689, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018068082366146706, |
| "loss": 1.1873, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018058005070815513, |
| "loss": 1.0434, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018047904386824177, |
| "loss": 1.0941, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018037780343490312, |
| "loss": 0.8938, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.0001802763297019933, |
| "loss": 1.3376, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018017462296404356, |
| "loss": 1.1218, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00018007268351626155, |
| "loss": 1.196, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00017997051165453035, |
| "loss": 1.3447, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.0001798681076754075, |
| "loss": 1.2547, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017976547187612444, |
| "loss": 1.0444, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001796626045545854, |
| "loss": 1.0356, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017955950600936658, |
| "loss": 1.2839, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001794561765397154, |
| "loss": 1.188, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017935261644554942, |
| "loss": 1.0599, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017924882602745576, |
| "loss": 0.9569, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017914480558668992, |
| "loss": 1.1798, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017904055542517514, |
| "loss": 1.3728, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017893607584550138, |
| "loss": 1.1367, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017883136715092455, |
| "loss": 1.1238, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001787264296453655, |
| "loss": 0.9529, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017862126363340925, |
| "loss": 0.7173, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001785158694203041, |
| "loss": 1.2545, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001784102473119607, |
| "loss": 1.0635, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001783043976149511, |
| "loss": 1.0547, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.000178198320636508, |
| "loss": 1.0106, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017809201668452385, |
| "loss": 0.9702, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017798548606754976, |
| "loss": 1.0135, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017787872909479487, |
| "loss": 1.1534, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017777174607612524, |
| "loss": 0.9367, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017766453732206305, |
| "loss": 0.8934, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017755710314378576, |
| "loss": 1.3794, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.00017744944385312506, |
| "loss": 1.1701, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017734155976256605, |
| "loss": 0.9438, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001772334511852463, |
| "loss": 1.2407, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017712511843495502, |
| "loss": 1.2564, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017701656182613203, |
| "loss": 0.9424, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017690778167386698, |
| "loss": 1.0412, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001767987782938983, |
| "loss": 1.1434, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017668955200261235, |
| "loss": 0.949, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017658010311704249, |
| "loss": 1.0636, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017647043195486822, |
| "loss": 1.1329, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017636053883441418, |
| "loss": 1.0195, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017625042407464923, |
| "loss": 1.133, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001761400879951856, |
| "loss": 1.2364, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017602953091627786, |
| "loss": 0.873, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017591875315882202, |
| "loss": 1.0126, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001758077550443547, |
| "loss": 1.2012, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017569653689505202, |
| "loss": 1.2604, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017558509903372883, |
| "loss": 1.0632, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001754734417838377, |
| "loss": 1.0401, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017536156546946793, |
| "loss": 1.1298, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001752494704153447, |
| "loss": 1.2415, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017513715694682805, |
| "loss": 1.1665, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017502462538991205, |
| "loss": 1.4334, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017491187607122374, |
| "loss": 1.0355, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00017479890931802224, |
| "loss": 0.8777, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017468572545819772, |
| "loss": 1.1524, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017457232482027054, |
| "loss": 1.1197, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017445870773339034, |
| "loss": 1.2017, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017434487452733493, |
| "loss": 1.2522, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001742308255325094, |
| "loss": 0.961, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017411656107994524, |
| "loss": 1.2319, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017400208150129926, |
| "loss": 1.2012, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017388738712885275, |
| "loss": 0.9532, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017377247829551036, |
| "loss": 1.1876, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017365735533479929, |
| "loss": 1.0737, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017354201858086818, |
| "loss": 1.2444, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017342646836848632, |
| "loss": 1.1259, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017331070503304248, |
| "loss": 1.114, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017319472891054403, |
| "loss": 1.143, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017307854033761605, |
| "loss": 0.9559, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017296213965150016, |
| "loss": 1.0642, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001728455271900537, |
| "loss": 1.088, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001727287032917487, |
| "loss": 0.95, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017261166829567087, |
| "loss": 0.8994, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017249442254151865, |
| "loss": 1.0113, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017237696636960217, |
| "loss": 1.0822, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00017225930012084245, |
| "loss": 1.1655, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001721414241367701, |
| "loss": 0.9005, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017202333875952452, |
| "loss": 1.0135, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017190504433185297, |
| "loss": 1.1891, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017178654119710945, |
| "loss": 0.9717, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017166782969925371, |
| "loss": 0.955, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017154891018285028, |
| "loss": 1.254, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001714297829930675, |
| "loss": 0.9329, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001713104484756765, |
| "loss": 1.1396, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017119090697705012, |
| "loss": 1.269, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001710711588441621, |
| "loss": 1.1882, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001709512044245858, |
| "loss": 1.0593, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017083104406649335, |
| "loss": 0.9855, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017071067811865476, |
| "loss": 1.2925, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017059010693043665, |
| "loss": 1.1894, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017046933085180136, |
| "loss": 0.9217, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017034835023330597, |
| "loss": 1.1415, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017022716542610127, |
| "loss": 1.0924, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00017010577678193067, |
| "loss": 1.2832, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00016998418465312922, |
| "loss": 0.8234, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001698623893926226, |
| "loss": 1.0568, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001697403913539261, |
| "loss": 1.3396, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00016961819089114362, |
| "loss": 1.249, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00016949578835896646, |
| "loss": 1.3629, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00016937318411267268, |
| "loss": 1.3083, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.0001692503785081255, |
| "loss": 1.0385, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016912737190177292, |
| "loss": 1.393, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016900416465064611, |
| "loss": 1.2389, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016888075711235872, |
| "loss": 1.0213, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016875714964510578, |
| "loss": 1.2462, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016863334260766255, |
| "loss": 1.2161, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016850933635938352, |
| "loss": 1.191, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016838513126020153, |
| "loss": 1.0732, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016826072767062646, |
| "loss": 0.7325, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016813612595174436, |
| "loss": 1.1928, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016801132646521638, |
| "loss": 1.2015, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016788632957327772, |
| "loss": 1.2422, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016776113563873648, |
| "loss": 0.8048, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.0001676357450249727, |
| "loss": 1.041, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016751015809593736, |
| "loss": 1.1517, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016738437521615117, |
| "loss": 0.9754, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016725839675070365, |
| "loss": 0.8443, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.000167132223065252, |
| "loss": 1.3241, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016700585452602, |
| "loss": 1.0755, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016687929149979708, |
| "loss": 1.1233, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016675253435393715, |
| "loss": 1.1966, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016662558345635753, |
| "loss": 1.5185, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016649843917553795, |
| "loss": 1.3418, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00016637110188051944, |
| "loss": 1.0663, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001662435719409032, |
| "loss": 1.2032, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016611584972684967, |
| "loss": 1.2022, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001659879356090773, |
| "loss": 1.0756, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016585982995886162, |
| "loss": 1.0288, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016573153314803407, |
| "loss": 1.2889, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001656030455489809, |
| "loss": 1.191, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001654743675346422, |
| "loss": 1.3033, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016534549947851062, |
| "loss": 1.0373, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016521644175463053, |
| "loss": 1.0724, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016508719473759683, |
| "loss": 0.8046, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016495775880255378, |
| "loss": 1.0738, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016482813432519398, |
| "loss": 1.0024, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016469832168175734, |
| "loss": 0.88, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016456832124902987, |
| "loss": 0.9519, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016443813340434273, |
| "loss": 0.8474, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016430775852557095, |
| "loss": 1.1408, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016417719699113244, |
| "loss": 0.902, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016404644917998698, |
| "loss": 1.1096, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016391551547163499, |
| "loss": 0.9255, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016378439624611634, |
| "loss": 1.1499, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016365309188400956, |
| "loss": 0.9418, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001635216027664304, |
| "loss": 1.0127, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00016338992927503098, |
| "loss": 0.8935, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016325807179199848, |
| "loss": 1.2696, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016312603070005416, |
| "loss": 1.1599, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016299380638245222, |
| "loss": 1.2544, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016286139922297866, |
| "loss": 1.1131, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016272880960595024, |
| "loss": 0.9249, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016259603791621323, |
| "loss": 1.1268, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016246308453914243, |
| "loss": 1.0301, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016232994986063991, |
| "loss": 1.2389, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016219663426713412, |
| "loss": 1.0197, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016206313814557852, |
| "loss": 1.0893, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016192946188345054, |
| "loss": 1.0444, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016179560586875057, |
| "loss": 1.1122, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016166157049000065, |
| "loss": 1.3133, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016152735613624345, |
| "loss": 1.1633, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016139296319704117, |
| "loss": 1.1313, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016125839206247434, |
| "loss": 1.1299, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016112364312314065, |
| "loss": 0.9512, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016098871677015393, |
| "loss": 1.0947, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016085361339514295, |
| "loss": 1.1431, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016071833339025032, |
| "loss": 0.9699, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016058287714813124, |
| "loss": 1.0614, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.0001604472450619526, |
| "loss": 1.309, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.0001603114375253915, |
| "loss": 0.9228, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00016017545493263443, |
| "loss": 1.3301, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00016003929767837588, |
| "loss": 1.2985, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015990296615781737, |
| "loss": 1.1141, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001597664607666663, |
| "loss": 1.0228, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015962978190113452, |
| "loss": 0.9883, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015949292995793768, |
| "loss": 0.8701, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015935590533429353, |
| "loss": 1.2218, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015921870842792123, |
| "loss": 1.1565, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015908133963703986, |
| "loss": 0.7912, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015894379936036752, |
| "loss": 1.2339, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015880608799711992, |
| "loss": 1.0096, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015866820594700944, |
| "loss": 1.1429, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015853015361024392, |
| "loss": 0.8691, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015839193138752537, |
| "loss": 1.2343, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015825353968004896, |
| "loss": 1.2665, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015811497888950176, |
| "loss": 1.0757, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015797624941806166, |
| "loss": 1.1863, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015783735166839612, |
| "loss": 0.888, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.000157698286043661, |
| "loss": 1.1358, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015755905294749948, |
| "loss": 0.9062, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015741965278404086, |
| "loss": 1.0632, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015728008595789926, |
| "loss": 1.2665, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015714035287417262, |
| "loss": 0.8746, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00015700045393844136, |
| "loss": 1.2927, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015686038955676747, |
| "loss": 1.2767, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015672016013569296, |
| "loss": 1.4846, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015657976608223897, |
| "loss": 1.0077, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015643920780390448, |
| "loss": 1.1175, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015629848570866513, |
| "loss": 1.201, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015615760020497204, |
| "loss": 1.0542, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015601655170175067, |
| "loss": 1.2379, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001558753406083995, |
| "loss": 1.1583, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015573396733478906, |
| "loss": 1.0846, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015559243229126048, |
| "loss": 1.0296, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015545073588862457, |
| "loss": 1.1944, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015530887853816038, |
| "loss": 1.1358, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015516686065161417, |
| "loss": 1.048, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015502468264119812, |
| "loss": 1.2631, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015488234491958927, |
| "loss": 1.0805, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001547398478999281, |
| "loss": 1.1348, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015459719199581757, |
| "loss": 1.1023, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015445437762132174, |
| "loss": 1.2496, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015431140519096462, |
| "loss": 0.8552, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001541682751197291, |
| "loss": 1.0058, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015402498782305548, |
| "loss": 1.2308, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001538815437168405, |
| "loss": 1.0761, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00015373794321743605, |
| "loss": 1.1198, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001535941867416479, |
| "loss": 1.1144, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015345027470673456, |
| "loss": 1.0435, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015330620753040616, |
| "loss": 0.8795, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.000153161985630823, |
| "loss": 0.9888, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001530176094265945, |
| "loss": 0.7235, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015287307933677802, |
| "loss": 1.0661, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001527283957808775, |
| "loss": 1.0423, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015258355917884236, |
| "loss": 1.1524, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015243856995106623, |
| "loss": 0.8811, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001522934285183858, |
| "loss": 0.905, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015214813530207944, |
| "loss": 1.1506, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015200269072386614, |
| "loss": 0.9856, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015185709520590417, |
| "loss": 1.1651, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015171134917079003, |
| "loss": 1.2696, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015156545304155698, |
| "loss": 1.0785, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.000151419407241674, |
| "loss": 1.2224, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001512732121950444, |
| "loss": 1.0825, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015112686832600484, |
| "loss": 1.2364, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015098037605932376, |
| "loss": 0.9494, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015083373582020052, |
| "loss": 1.2798, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015068694803426383, |
| "loss": 0.694, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001505400131275707, |
| "loss": 1.0668, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00015039293152660526, |
| "loss": 1.2511, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001502457036582772, |
| "loss": 0.8553, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00015009832994992102, |
| "loss": 1.2976, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014995081082929437, |
| "loss": 1.1229, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014980314672457696, |
| "loss": 0.9596, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001496553380643694, |
| "loss": 1.0995, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014950738527769178, |
| "loss": 1.1079, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014935928879398266, |
| "loss": 0.7743, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014921104904309755, |
| "loss": 1.1665, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014906266645530787, |
| "loss": 1.1426, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014891414146129961, |
| "loss": 1.0954, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014876547449217212, |
| "loss": 1.0341, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001486166659794368, |
| "loss": 1.1052, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014846771635501594, |
| "loss": 1.1636, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001483186260512413, |
| "loss": 1.1472, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014816939550085312, |
| "loss": 1.3896, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014802002513699858, |
| "loss": 1.1601, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001478705153932308, |
| "loss": 0.8679, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014772086670350736, |
| "loss": 1.0565, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014757107950218908, |
| "loss": 1.2605, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014742115422403904, |
| "loss": 1.1957, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014727109130422084, |
| "loss": 0.8931, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014712089117829776, |
| "loss": 1.5221, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014697055428223118, |
| "loss": 1.1861, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014682008105237966, |
| "loss": 1.2775, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00014666947192549726, |
| "loss": 1.1791, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014651872733873267, |
| "loss": 1.2382, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014636784772962763, |
| "loss": 1.1493, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014621683353611582, |
| "loss": 1.0587, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014606568519652158, |
| "loss": 1.0858, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014591440314955863, |
| "loss": 1.3016, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014576298783432868, |
| "loss": 1.407, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001456114396903204, |
| "loss": 1.0804, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014545975915740788, |
| "loss": 1.2282, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014530794667584956, |
| "loss": 1.0006, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014515600268628678, |
| "loss": 1.265, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014500392762974268, |
| "loss": 1.1789, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014485172194762078, |
| "loss": 0.7728, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014469938608170372, |
| "loss": 0.9393, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014454692047415207, |
| "loss": 1.1277, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014439432556750286, |
| "loss": 1.1549, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001442416018046686, |
| "loss": 1.1743, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001440887496289356, |
| "loss": 0.9329, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014393576948396308, |
| "loss": 1.1267, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001437826618137816, |
| "loss": 1.1975, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014362942706279186, |
| "loss": 1.0554, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014347606567576343, |
| "loss": 1.3932, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00014332257809783347, |
| "loss": 1.0105, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001431689647745054, |
| "loss": 1.1407, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014301522615164765, |
| "loss": 1.0894, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014286136267549224, |
| "loss": 0.9996, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014270737479263367, |
| "loss": 1.0836, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014255326295002754, |
| "loss": 1.2934, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014239902759498915, |
| "loss": 1.184, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014224466917519245, |
| "loss": 1.0338, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014209018813866845, |
| "loss": 0.8427, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014193558493380415, |
| "loss": 1.0827, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014178086000934113, |
| "loss": 1.2715, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014162601381437418, |
| "loss": 1.2018, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014147104679835024, |
| "loss": 1.396, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001413159594110668, |
| "loss": 1.2225, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014116075210267082, |
| "loss": 1.1727, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014100542532365724, |
| "loss": 0.9727, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001408499795248679, |
| "loss": 1.1066, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014069441515748992, |
| "loss": 1.2836, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014053873267305482, |
| "loss": 1.2468, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001403829325234367, |
| "loss": 1.0988, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001402270151608514, |
| "loss": 1.27, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00014007098103785477, |
| "loss": 1.3094, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001399148306073417, |
| "loss": 0.7418, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001397585643225447, |
| "loss": 1.2017, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00013960218263703243, |
| "loss": 1.1626, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001394456860047086, |
| "loss": 1.2494, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013928907487981048, |
| "loss": 1.2566, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013913234971690773, |
| "loss": 1.0832, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013897551097090096, |
| "loss": 1.0665, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001388185590970205, |
| "loss": 1.1717, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013866149455082495, |
| "loss": 1.2979, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001385043177882001, |
| "loss": 1.1943, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001383470292653573, |
| "loss": 1.3128, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013818962943883237, |
| "loss": 1.0029, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013803211876548422, |
| "loss": 1.0726, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013787449770249336, |
| "loss": 1.4438, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001377167667073609, |
| "loss": 0.8845, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013755892623790688, |
| "loss": 1.1164, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013740097675226913, |
| "loss": 1.0985, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013724291870890196, |
| "loss": 1.1328, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013708475256657472, |
| "loss": 0.8094, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013692647878437052, |
| "loss": 1.1899, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013676809782168495, |
| "loss": 1.0743, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013660961013822463, |
| "loss": 0.9542, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013645101619400596, |
| "loss": 1.2481, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001362923164493538, |
| "loss": 1.29, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013613351136490008, |
| "loss": 1.2064, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013597460140158247, |
| "loss": 1.162, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013581558702064305, |
| "loss": 1.0602, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013565646868362701, |
| "loss": 1.2206, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013549724685238122, |
| "loss": 1.0215, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013533792198905305, |
| "loss": 0.9311, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013517849455608878, |
| "loss": 1.0644, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013501896501623252, |
| "loss": 1.236, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001348593338325247, |
| "loss": 1.0976, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013469960146830073, |
| "loss": 1.0747, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013453976838718987, |
| "loss": 1.1195, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001343798350531135, |
| "loss": 1.2661, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013421980193028412, |
| "loss": 1.3589, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013405966948320383, |
| "loss": 0.9807, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.000133899438176663, |
| "loss": 1.1368, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013373910847573904, |
| "loss": 1.1415, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001335786808457948, |
| "loss": 1.027, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013341815575247754, |
| "loss": 1.3233, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001332575336617173, |
| "loss": 1.0318, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013309681503972565, |
| "loss": 1.1125, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013293600035299438, |
| "loss": 1.1152, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013277509006829417, |
| "loss": 0.8569, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013261408465267308, |
| "loss": 1.2112, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013245298457345537, |
| "loss": 1.0152, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013229179029823997, |
| "loss": 1.0359, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001321305022948993, |
| "loss": 1.2439, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001319691210315778, |
| "loss": 1.2524, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013180764697669065, |
| "loss": 0.7901, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013164608059892225, |
| "loss": 1.1626, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013148442236722506, |
| "loss": 1.1642, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013132267275081812, |
| "loss": 1.1882, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013116083221918575, |
| "loss": 0.9921, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013099890124207607, |
| "loss": 1.0634, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013083688028949983, |
| "loss": 1.0575, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013067476983172886, |
| "loss": 0.7774, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013051257033929483, |
| "loss": 1.1497, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013035028228298775, |
| "loss": 1.2153, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00013018790613385483, |
| "loss": 0.9838, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001300254423631989, |
| "loss": 1.0565, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00012986289144257705, |
| "loss": 1.3074, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001297002538437994, |
| "loss": 1.1343, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001295375300389277, |
| "loss": 1.0721, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00012937472050027378, |
| "loss": 1.1295, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00012921182570039847, |
| "loss": 1.0443, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00012904884611210994, |
| "loss": 0.7274, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001288857822084626, |
| "loss": 1.0291, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001287226344627554, |
| "loss": 1.2278, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00012855940334853086, |
| "loss": 1.2838, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001283960893395733, |
| "loss": 0.9653, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00012823269290990777, |
| "loss": 0.903, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012806921453379846, |
| "loss": 1.1787, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012790565468574747, |
| "loss": 1.1956, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012774201384049336, |
| "loss": 1.2115, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012757829247300975, |
| "loss": 1.2478, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012741449105850406, |
| "loss": 1.0137, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.000127250610072416, |
| "loss": 0.9397, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012708664999041625, |
| "loss": 1.1563, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.000126922611288405, |
| "loss": 1.2709, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012675849444251073, |
| "loss": 1.1755, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001265942999290887, |
| "loss": 1.2614, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012643002822471967, |
| "loss": 1.1363, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001262656798062083, |
| "loss": 0.9511, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012610125515058208, |
| "loss": 1.2596, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012593675473508958, |
| "loss": 0.8632, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012577217903719952, |
| "loss": 1.1799, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012560752853459895, |
| "loss": 1.1116, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012544280370519205, |
| "loss": 1.1188, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012527800502709887, |
| "loss": 1.0137, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012511313297865363, |
| "loss": 1.0204, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012494818803840367, |
| "loss": 1.2992, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012478317068510776, |
| "loss": 1.1653, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012461808139773494, |
| "loss": 0.9263, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00012445292065546305, |
| "loss": 1.0401, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012428768893767728, |
| "loss": 1.0902, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012412238672396888, |
| "loss": 1.2091, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012395701449413363, |
| "loss": 0.9188, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012379157272817066, |
| "loss": 0.8956, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001236260619062808, |
| "loss": 1.227, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012346048250886545, |
| "loss": 0.9034, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012329483501652492, |
| "loss": 0.9926, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012312911991005726, |
| "loss": 1.2983, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012296333767045678, |
| "loss": 0.765, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001227974887789125, |
| "loss": 1.1676, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012263157371680716, |
| "loss": 0.9539, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001224655929657153, |
| "loss": 1.227, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001222995470074023, |
| "loss": 1.0307, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001221334363238227, |
| "loss": 1.0954, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.000121967261397119, |
| "loss": 1.1854, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012180102270962005, |
| "loss": 1.155, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012163472074383994, |
| "loss": 0.7804, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012146835598247625, |
| "loss": 1.1191, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012130192890840893, |
| "loss": 1.0878, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012113544000469874, |
| "loss": 1.0806, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012096888975458595, |
| "loss": 1.0866, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012080227864148887, |
| "loss": 0.8719, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012063560714900249, |
| "loss": 1.1325, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00012046887576089699, |
| "loss": 1.0915, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00012030208496111646, |
| "loss": 1.053, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001201352352337774, |
| "loss": 1.0972, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011996832706316739, |
| "loss": 1.1301, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011980136093374359, |
| "loss": 1.2222, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011963433733013145, |
| "loss": 1.0343, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011946725673712318, |
| "loss": 1.2289, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011930011963967645, |
| "loss": 0.8809, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001191329265229129, |
| "loss": 1.0456, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011896567787211677, |
| "loss": 0.982, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011879837417273354, |
| "loss": 1.4084, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011863101591036842, |
| "loss": 1.2476, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011846360357078499, |
| "loss": 0.9267, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011829613763990384, |
| "loss": 1.3634, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011812861860380107, |
| "loss": 1.0939, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011796104694870686, |
| "loss": 1.21, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001177934231610043, |
| "loss": 1.2187, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011762574772722761, |
| "loss": 1.4067, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011745802113406101, |
| "loss": 1.1249, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001172902438683372, |
| "loss": 1.0712, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011712241641703591, |
| "loss": 1.0296, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001169545392672826, |
| "loss": 1.3177, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011678661290634696, |
| "loss": 1.0462, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00011661863782164153, |
| "loss": 0.9731, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011645061450072022, |
| "loss": 1.2532, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011628254343127698, |
| "loss": 1.0928, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011611442510114437, |
| "loss": 1.2552, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011594625999829213, |
| "loss": 1.1636, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011577804861082573, |
| "loss": 1.2689, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011560979142698498, |
| "loss": 0.982, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011544148893514264, |
| "loss": 1.016, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011527314162380298, |
| "loss": 1.1534, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001151047499816003, |
| "loss": 1.157, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011493631449729767, |
| "loss": 1.093, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011476783565978536, |
| "loss": 0.8906, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011459931395807943, |
| "loss": 0.9753, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011443074988132045, |
| "loss": 1.1915, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011426214391877193, |
| "loss": 0.9808, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011409349655981893, |
| "loss": 1.1079, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011392480829396674, |
| "loss": 1.2501, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011375607961083931, |
| "loss": 0.9037, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011358731100017797, |
| "loss": 1.312, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011341850295183986, |
| "loss": 0.8147, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011324965595579666, |
| "loss": 0.9961, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011308077050213304, |
| "loss": 0.7977, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011291184708104538, |
| "loss": 1.2475, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011274288618284016, |
| "loss": 1.0157, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00011257388829793274, |
| "loss": 0.7946, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011240485391684572, |
| "loss": 1.4047, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.0001122357835302077, |
| "loss": 1.0975, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011206667762875182, |
| "loss": 0.9613, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011189753670331424, |
| "loss": 1.0939, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011172836124483282, |
| "loss": 1.4617, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011155915174434561, |
| "loss": 0.9166, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.0001113899086929895, |
| "loss": 1.0673, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011122063258199877, |
| "loss": 1.3222, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011105132390270363, |
| "loss": 1.0569, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011088198314652884, |
| "loss": 1.4603, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011071261080499227, |
| "loss": 1.2668, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011054320736970343, |
| "loss": 1.0583, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011037377333236215, |
| "loss": 1.0224, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011020430918475694, |
| "loss": 1.0674, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00011003481541876392, |
| "loss": 1.1125, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00010986529252634503, |
| "loss": 1.2389, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00010969574099954676, |
| "loss": 1.1339, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00010952616133049877, |
| "loss": 1.1167, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00010935655401141236, |
| "loss": 1.2501, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00010918691953457906, |
| "loss": 1.0136, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00010901725839236931, |
| "loss": 1.2071, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00010884757107723091, |
| "loss": 1.1893, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.0001086778580816876, |
| "loss": 1.2503, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010850811989833766, |
| "loss": 0.8854, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010833835701985252, |
| "loss": 1.0585, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010816856993897522, |
| "loss": 1.0614, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010799875914851913, |
| "loss": 0.9775, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010782892514136643, |
| "loss": 0.8394, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001076590684104666, |
| "loss": 1.0794, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010748918944883517, |
| "loss": 1.2122, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010731928874955212, |
| "loss": 1.2122, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010714936680576062, |
| "loss": 1.1869, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010697942411066542, |
| "loss": 1.2394, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001068094611575315, |
| "loss": 1.1746, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010663947843968272, |
| "loss": 1.1461, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010646947645050023, |
| "loss": 1.3711, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001062994556834211, |
| "loss": 1.3087, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010612941663193703, |
| "loss": 1.0273, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010595935978959266, |
| "loss": 1.418, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010578928564998431, |
| "loss": 1.1015, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010561919470675854, |
| "loss": 1.353, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010544908745361064, |
| "loss": 1.0851, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001052789643842832, |
| "loss": 1.2279, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010510882599256486, |
| "loss": 1.0083, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001049386727722886, |
| "loss": 1.1582, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00010476850521733048, |
| "loss": 1.1348, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010459832382160819, |
| "loss": 1.1228, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010442812907907959, |
| "loss": 0.9685, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001042579214837412, |
| "loss": 0.8164, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010408770152962699, |
| "loss": 1.1839, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001039174697108067, |
| "loss": 0.7855, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010374722652138449, |
| "loss": 1.0066, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001035769724554976, |
| "loss": 1.2129, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001034067080073148, |
| "loss": 1.1711, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.000103236433671035, |
| "loss": 1.1091, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010306614994088582, |
| "loss": 0.866, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001028958573111221, |
| "loss": 1.2267, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010272555627602461, |
| "loss": 1.095, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010255524732989837, |
| "loss": 1.1093, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010238493096707149, |
| "loss": 0.9889, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010221460768189357, |
| "loss": 0.8531, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010204427796873431, |
| "loss": 1.3532, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010187394232198199, |
| "loss": 1.0877, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010170360123604223, |
| "loss": 1.0861, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010153325520533639, |
| "loss": 1.1346, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010136290472430013, |
| "loss": 1.0357, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010119255028738208, |
| "loss": 1.2121, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010102219238904238, |
| "loss": 1.002, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00010085183152375117, |
| "loss": 1.0372, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001006814681859872, |
| "loss": 1.0595, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00010051110287023639, |
| "loss": 0.9421, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00010034073607099046, |
| "loss": 0.6247, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00010017036828274538, |
| "loss": 0.9493, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001, |
| "loss": 0.9664, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.982963171725465e-05, |
| "loss": 1.2865, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.965926392900956e-05, |
| "loss": 0.7547, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.948889712976363e-05, |
| "loss": 1.2287, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.931853181401284e-05, |
| "loss": 1.0907, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.914816847624887e-05, |
| "loss": 1.179, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.897780761095761e-05, |
| "loss": 1.1764, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.880744971261791e-05, |
| "loss": 0.9159, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.863709527569988e-05, |
| "loss": 0.8849, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.846674479466364e-05, |
| "loss": 1.0282, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.829639876395778e-05, |
| "loss": 1.1118, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.812605767801803e-05, |
| "loss": 0.9433, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.795572203126573e-05, |
| "loss": 0.9086, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.778539231810645e-05, |
| "loss": 0.9833, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.761506903292854e-05, |
| "loss": 0.9916, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.744475267010167e-05, |
| "loss": 1.1747, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.727444372397545e-05, |
| "loss": 1.195, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.710414268887794e-05, |
| "loss": 0.9873, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.693385005911418e-05, |
| "loss": 0.8866, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.676356632896499e-05, |
| "loss": 1.1247, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.659329199268521e-05, |
| "loss": 0.7404, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.642302754450241e-05, |
| "loss": 1.0853, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.625277347861553e-05, |
| "loss": 1.0899, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.608253028919334e-05, |
| "loss": 1.0111, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.591229847037304e-05, |
| "loss": 1.1721, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.574207851625882e-05, |
| "loss": 1.2101, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.557187092092046e-05, |
| "loss": 1.1141, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.540167617839185e-05, |
| "loss": 1.0819, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.523149478266956e-05, |
| "loss": 1.0057, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.50613272277114e-05, |
| "loss": 0.7989, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.489117400743515e-05, |
| "loss": 1.263, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.47210356157168e-05, |
| "loss": 1.073, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.455091254638939e-05, |
| "loss": 1.2279, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.438080529324147e-05, |
| "loss": 1.293, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.42107143500157e-05, |
| "loss": 1.0419, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.404064021040737e-05, |
| "loss": 0.9361, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.387058336806298e-05, |
| "loss": 1.0399, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.370054431657891e-05, |
| "loss": 0.9662, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.353052354949982e-05, |
| "loss": 1.1323, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.336052156031733e-05, |
| "loss": 0.9891, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.319053884246854e-05, |
| "loss": 1.0306, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.302057588933459e-05, |
| "loss": 1.1106, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.285063319423939e-05, |
| "loss": 1.3911, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 9.268071125044789e-05, |
| "loss": 1.1049, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.251081055116485e-05, |
| "loss": 1.1446, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.234093158953342e-05, |
| "loss": 1.1112, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.21710748586336e-05, |
| "loss": 1.1878, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.200124085148089e-05, |
| "loss": 1.1817, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.183143006102482e-05, |
| "loss": 1.1264, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.166164298014753e-05, |
| "loss": 0.7426, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.149188010166239e-05, |
| "loss": 1.1711, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.132214191831246e-05, |
| "loss": 1.1819, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.115242892276909e-05, |
| "loss": 0.9723, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.098274160763067e-05, |
| "loss": 1.1953, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.081308046542095e-05, |
| "loss": 0.9595, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.064344598858766e-05, |
| "loss": 1.0287, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.047383866950124e-05, |
| "loss": 1.1355, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.030425900045325e-05, |
| "loss": 1.145, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.013470747365499e-05, |
| "loss": 1.241, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.996518458123609e-05, |
| "loss": 1.2091, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.979569081524308e-05, |
| "loss": 1.1651, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.962622666763792e-05, |
| "loss": 1.1544, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.945679263029661e-05, |
| "loss": 1.05, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.928738919500778e-05, |
| "loss": 1.0607, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.911801685347116e-05, |
| "loss": 1.2142, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.894867609729637e-05, |
| "loss": 1.0566, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 8.877936741800124e-05, |
| "loss": 1.1484, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.86100913070105e-05, |
| "loss": 1.0945, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.844084825565441e-05, |
| "loss": 1.0325, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.827163875516719e-05, |
| "loss": 1.1401, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.810246329668577e-05, |
| "loss": 1.0555, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.793332237124821e-05, |
| "loss": 0.8981, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.776421646979232e-05, |
| "loss": 0.98, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.759514608315433e-05, |
| "loss": 1.1285, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.742611170206731e-05, |
| "loss": 1.081, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.725711381715982e-05, |
| "loss": 0.9129, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.708815291895463e-05, |
| "loss": 1.0526, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.691922949786694e-05, |
| "loss": 0.9587, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.675034404420336e-05, |
| "loss": 1.0827, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.658149704816016e-05, |
| "loss": 1.4554, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.641268899982205e-05, |
| "loss": 0.8975, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.62439203891607e-05, |
| "loss": 1.1196, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.607519170603328e-05, |
| "loss": 1.1912, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.590650344018109e-05, |
| "loss": 1.0219, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.57378560812281e-05, |
| "loss": 1.1012, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.556925011867959e-05, |
| "loss": 0.9395, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.540068604192061e-05, |
| "loss": 1.3183, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.523216434021465e-05, |
| "loss": 1.2039, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.506368550270233e-05, |
| "loss": 0.9193, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.489525001839971e-05, |
| "loss": 1.1518, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 8.472685837619705e-05, |
| "loss": 0.9257, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.455851106485737e-05, |
| "loss": 0.8687, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.439020857301503e-05, |
| "loss": 1.2404, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.422195138917429e-05, |
| "loss": 1.2448, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.405374000170789e-05, |
| "loss": 1.1107, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.388557489885564e-05, |
| "loss": 1.1859, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.371745656872307e-05, |
| "loss": 1.146, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.354938549927985e-05, |
| "loss": 1.2778, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.338136217835848e-05, |
| "loss": 0.991, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.321338709365303e-05, |
| "loss": 1.0912, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.30454607327174e-05, |
| "loss": 1.1444, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.28775835829641e-05, |
| "loss": 1.0618, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.270975613166281e-05, |
| "loss": 1.2061, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.2541978865939e-05, |
| "loss": 1.0856, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.23742522727724e-05, |
| "loss": 0.9508, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.220657683899572e-05, |
| "loss": 1.3039, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.203895305129315e-05, |
| "loss": 1.0446, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.187138139619898e-05, |
| "loss": 1.1663, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.17038623600962e-05, |
| "loss": 1.2062, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.153639642921504e-05, |
| "loss": 1.3417, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.136898408963159e-05, |
| "loss": 1.2049, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.120162582726647e-05, |
| "loss": 1.4107, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.103432212788323e-05, |
| "loss": 1.3767, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 8.086707347708712e-05, |
| "loss": 1.2324, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 8.069988036032356e-05, |
| "loss": 1.2088, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 8.053274326287683e-05, |
| "loss": 1.0909, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 8.036566266986857e-05, |
| "loss": 1.1629, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 8.019863906625644e-05, |
| "loss": 1.1966, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 8.003167293683265e-05, |
| "loss": 1.1442, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.986476476622265e-05, |
| "loss": 1.4479, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.969791503888359e-05, |
| "loss": 1.1156, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.953112423910306e-05, |
| "loss": 1.1783, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.936439285099752e-05, |
| "loss": 1.1635, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.919772135851114e-05, |
| "loss": 1.1025, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.903111024541406e-05, |
| "loss": 1.0834, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.886455999530128e-05, |
| "loss": 1.209, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.86980710915911e-05, |
| "loss": 1.1955, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.853164401752377e-05, |
| "loss": 0.9492, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.836527925616008e-05, |
| "loss": 0.8436, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.819897729037996e-05, |
| "loss": 1.2784, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.803273860288103e-05, |
| "loss": 1.1449, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.786656367617732e-05, |
| "loss": 1.2376, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.770045299259774e-05, |
| "loss": 1.1101, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.753440703428469e-05, |
| "loss": 0.9723, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.736842628319285e-05, |
| "loss": 1.1402, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.720251122108749e-05, |
| "loss": 0.953, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.703666232954325e-05, |
| "loss": 1.2305, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 7.687088008994274e-05, |
| "loss": 1.286, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.67051649834751e-05, |
| "loss": 1.1356, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.653951749113459e-05, |
| "loss": 0.9518, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.637393809371921e-05, |
| "loss": 0.9689, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.620842727182938e-05, |
| "loss": 1.0175, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.60429855058664e-05, |
| "loss": 1.1571, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.587761327603117e-05, |
| "loss": 0.9125, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.571231106232273e-05, |
| "loss": 0.9396, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.554707934453695e-05, |
| "loss": 0.8306, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.538191860226507e-05, |
| "loss": 1.1028, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.521682931489226e-05, |
| "loss": 1.2743, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.505181196159636e-05, |
| "loss": 1.1956, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.488686702134639e-05, |
| "loss": 1.1668, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.472199497290115e-05, |
| "loss": 1.3385, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.455719629480795e-05, |
| "loss": 1.1544, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.439247146540109e-05, |
| "loss": 0.9162, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.422782096280051e-05, |
| "loss": 1.4428, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.406324526491044e-05, |
| "loss": 1.0323, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.389874484941797e-05, |
| "loss": 1.1348, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.37343201937917e-05, |
| "loss": 1.0564, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.356997177528034e-05, |
| "loss": 1.0826, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.340570007091128e-05, |
| "loss": 1.185, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.324150555748928e-05, |
| "loss": 1.1517, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 7.307738871159504e-05, |
| "loss": 1.2076, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.291335000958379e-05, |
| "loss": 1.1973, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.274938992758403e-05, |
| "loss": 1.4511, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.258550894149596e-05, |
| "loss": 1.1513, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.242170752699027e-05, |
| "loss": 1.324, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.22579861595067e-05, |
| "loss": 1.0467, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.209434531425257e-05, |
| "loss": 0.9784, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.193078546620157e-05, |
| "loss": 1.4247, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.176730709009225e-05, |
| "loss": 1.1479, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.16039106604267e-05, |
| "loss": 1.1339, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.144059665146916e-05, |
| "loss": 1.2366, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.12773655372446e-05, |
| "loss": 1.0941, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.111421779153745e-05, |
| "loss": 1.105, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.095115388789007e-05, |
| "loss": 1.1587, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.078817429960157e-05, |
| "loss": 1.1151, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.062527949972624e-05, |
| "loss": 1.0282, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.046246996107235e-05, |
| "loss": 1.1533, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.029974615620061e-05, |
| "loss": 1.1223, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 7.013710855742299e-05, |
| "loss": 1.0735, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 6.997455763680112e-05, |
| "loss": 1.3472, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 6.981209386614516e-05, |
| "loss": 1.2571, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 6.964971771701226e-05, |
| "loss": 1.013, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 6.94874296607052e-05, |
| "loss": 1.037, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 6.932523016827117e-05, |
| "loss": 1.1483, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.91631197105002e-05, |
| "loss": 0.8704, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.900109875792396e-05, |
| "loss": 1.3501, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.883916778081429e-05, |
| "loss": 1.3063, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.867732724918187e-05, |
| "loss": 1.2175, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.851557763277495e-05, |
| "loss": 0.9658, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.835391940107776e-05, |
| "loss": 1.0277, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.819235302330937e-05, |
| "loss": 0.9042, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.803087896842217e-05, |
| "loss": 1.3162, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.786949770510071e-05, |
| "loss": 1.1425, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.770820970176005e-05, |
| "loss": 1.1996, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.754701542654467e-05, |
| "loss": 1.2822, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.738591534732694e-05, |
| "loss": 1.1266, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.722490993170586e-05, |
| "loss": 0.9503, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.706399964700565e-05, |
| "loss": 1.2892, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.690318496027438e-05, |
| "loss": 0.9244, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.674246633828273e-05, |
| "loss": 0.8672, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.658184424752247e-05, |
| "loss": 0.8661, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.64213191542052e-05, |
| "loss": 1.121, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.626089152426097e-05, |
| "loss": 1.1323, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.610056182333699e-05, |
| "loss": 1.1832, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.594033051679619e-05, |
| "loss": 1.189, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.57801980697159e-05, |
| "loss": 1.0141, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.562016494688652e-05, |
| "loss": 1.2057, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 6.546023161281016e-05, |
| "loss": 1.046, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.530039853169927e-05, |
| "loss": 0.9373, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.514066616747534e-05, |
| "loss": 1.1819, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.49810349837675e-05, |
| "loss": 1.2527, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.482150544391126e-05, |
| "loss": 1.272, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.4662078010947e-05, |
| "loss": 0.9768, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.45027531476188e-05, |
| "loss": 1.4034, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.434353131637301e-05, |
| "loss": 0.9545, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.418441297935695e-05, |
| "loss": 1.0651, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.402539859841754e-05, |
| "loss": 0.7663, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.386648863509994e-05, |
| "loss": 1.1843, |
| "step": 1639 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.370768355064621e-05, |
| "loss": 1.1884, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.354898380599405e-05, |
| "loss": 0.9571, |
| "step": 1641 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.339038986177539e-05, |
| "loss": 1.1781, |
| "step": 1642 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.323190217831508e-05, |
| "loss": 0.9545, |
| "step": 1643 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.307352121562949e-05, |
| "loss": 0.9911, |
| "step": 1644 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.291524743342531e-05, |
| "loss": 0.9503, |
| "step": 1645 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.275708129109808e-05, |
| "loss": 1.1799, |
| "step": 1646 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.259902324773088e-05, |
| "loss": 0.8671, |
| "step": 1647 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.244107376209315e-05, |
| "loss": 1.3534, |
| "step": 1648 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.228323329263913e-05, |
| "loss": 0.9702, |
| "step": 1649 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.212550229750664e-05, |
| "loss": 0.8386, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.196788123451582e-05, |
| "loss": 1.2596, |
| "step": 1651 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 6.181037056116764e-05, |
| "loss": 1.3026, |
| "step": 1652 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.16529707346427e-05, |
| "loss": 0.8332, |
| "step": 1653 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.149568221179993e-05, |
| "loss": 0.9971, |
| "step": 1654 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.133850544917507e-05, |
| "loss": 1.0905, |
| "step": 1655 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.118144090297955e-05, |
| "loss": 1.1752, |
| "step": 1656 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.102448902909907e-05, |
| "loss": 0.9426, |
| "step": 1657 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.086765028309229e-05, |
| "loss": 1.1999, |
| "step": 1658 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.071092512018952e-05, |
| "loss": 1.037, |
| "step": 1659 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.0554313995291414e-05, |
| "loss": 1.1105, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.039781736296757e-05, |
| "loss": 1.0812, |
| "step": 1661 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.02414356774553e-05, |
| "loss": 0.9912, |
| "step": 1662 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 6.008516939265831e-05, |
| "loss": 1.2236, |
| "step": 1663 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.992901896214526e-05, |
| "loss": 1.3107, |
| "step": 1664 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.977298483914864e-05, |
| "loss": 1.0889, |
| "step": 1665 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.9617067476563306e-05, |
| "loss": 1.2452, |
| "step": 1666 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.94612673269452e-05, |
| "loss": 0.9633, |
| "step": 1667 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.9305584842510076e-05, |
| "loss": 1.1219, |
| "step": 1668 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.9150020475132164e-05, |
| "loss": 0.773, |
| "step": 1669 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.899457467634277e-05, |
| "loss": 1.135, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.883924789732922e-05, |
| "loss": 1.2301, |
| "step": 1671 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.868404058893322e-05, |
| "loss": 1.0682, |
| "step": 1672 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.8528953201649786e-05, |
| "loss": 0.8917, |
| "step": 1673 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.837398618562584e-05, |
| "loss": 1.2149, |
| "step": 1674 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.821913999065889e-05, |
| "loss": 0.8351, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 5.8064415066195876e-05, |
| "loss": 1.2609, |
| "step": 1676 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.7909811861331564e-05, |
| "loss": 1.1192, |
| "step": 1677 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.77553308248076e-05, |
| "loss": 1.1267, |
| "step": 1678 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.760097240501088e-05, |
| "loss": 1.1993, |
| "step": 1679 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.744673704997253e-05, |
| "loss": 1.2612, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.729262520736637e-05, |
| "loss": 1.2178, |
| "step": 1681 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.713863732450779e-05, |
| "loss": 1.0791, |
| "step": 1682 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.698477384835237e-05, |
| "loss": 1.3996, |
| "step": 1683 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.68310352254946e-05, |
| "loss": 1.2123, |
| "step": 1684 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.667742190216653e-05, |
| "loss": 0.9385, |
| "step": 1685 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.652393432423659e-05, |
| "loss": 0.806, |
| "step": 1686 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.6370572937208165e-05, |
| "loss": 1.1147, |
| "step": 1687 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.621733818621844e-05, |
| "loss": 0.9738, |
| "step": 1688 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.6064230516036934e-05, |
| "loss": 1.1287, |
| "step": 1689 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.591125037106444e-05, |
| "loss": 1.1948, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.575839819533145e-05, |
| "loss": 1.1239, |
| "step": 1691 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.560567443249718e-05, |
| "loss": 0.9468, |
| "step": 1692 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.545307952584795e-05, |
| "loss": 1.1759, |
| "step": 1693 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.5300613918296295e-05, |
| "loss": 1.2031, |
| "step": 1694 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.514827805237921e-05, |
| "loss": 1.1553, |
| "step": 1695 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.499607237025732e-05, |
| "loss": 0.9802, |
| "step": 1696 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.484399731371321e-05, |
| "loss": 1.0814, |
| "step": 1697 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.4692053324150485e-05, |
| "loss": 1.0263, |
| "step": 1698 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 5.4540240842592136e-05, |
| "loss": 1.1081, |
| "step": 1699 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.438856030967965e-05, |
| "loss": 0.925, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.423701216567134e-05, |
| "loss": 1.053, |
| "step": 1701 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.408559685044143e-05, |
| "loss": 0.9137, |
| "step": 1702 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.393431480347846e-05, |
| "loss": 0.8246, |
| "step": 1703 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.378316646388424e-05, |
| "loss": 0.7287, |
| "step": 1704 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.363215227037237e-05, |
| "loss": 1.1275, |
| "step": 1705 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.348127266126735e-05, |
| "loss": 1.0573, |
| "step": 1706 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.333052807450273e-05, |
| "loss": 1.1948, |
| "step": 1707 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.317991894762038e-05, |
| "loss": 1.063, |
| "step": 1708 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.302944571776882e-05, |
| "loss": 0.9115, |
| "step": 1709 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.287910882170231e-05, |
| "loss": 1.1256, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.272890869577918e-05, |
| "loss": 1.0871, |
| "step": 1711 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.257884577596102e-05, |
| "loss": 1.1788, |
| "step": 1712 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.2428920497810916e-05, |
| "loss": 1.0992, |
| "step": 1713 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.227913329649271e-05, |
| "loss": 1.0316, |
| "step": 1714 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.212948460676923e-05, |
| "loss": 0.8978, |
| "step": 1715 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.1979974863001425e-05, |
| "loss": 1.0852, |
| "step": 1716 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.1830604499146896e-05, |
| "loss": 1.0023, |
| "step": 1717 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.168137394875874e-05, |
| "loss": 0.9693, |
| "step": 1718 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.15322836449841e-05, |
| "loss": 1.3758, |
| "step": 1719 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.1383334020563235e-05, |
| "loss": 1.0946, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.123452550782789e-05, |
| "loss": 1.12, |
| "step": 1721 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.1085858538700415e-05, |
| "loss": 1.1558, |
| "step": 1722 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 5.093733354469215e-05, |
| "loss": 1.2351, |
| "step": 1723 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 5.078895095690249e-05, |
| "loss": 0.882, |
| "step": 1724 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 5.064071120601737e-05, |
| "loss": 1.257, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 5.0492614722308264e-05, |
| "loss": 1.0271, |
| "step": 1726 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 5.0344661935630655e-05, |
| "loss": 0.978, |
| "step": 1727 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 5.0196853275423075e-05, |
| "loss": 1.1042, |
| "step": 1728 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 5.0049189170705645e-05, |
| "loss": 0.796, |
| "step": 1729 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.9901670050078995e-05, |
| "loss": 1.0308, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.975429634172279e-05, |
| "loss": 1.0194, |
| "step": 1731 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.960706847339479e-05, |
| "loss": 1.0636, |
| "step": 1732 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.94599868724293e-05, |
| "loss": 1.0391, |
| "step": 1733 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.931305196573621e-05, |
| "loss": 1.1939, |
| "step": 1734 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.9166264179799506e-05, |
| "loss": 1.2227, |
| "step": 1735 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.901962394067627e-05, |
| "loss": 0.9324, |
| "step": 1736 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.887313167399521e-05, |
| "loss": 0.9211, |
| "step": 1737 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.8726787804955644e-05, |
| "loss": 1.1125, |
| "step": 1738 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.8580592758326004e-05, |
| "loss": 1.0388, |
| "step": 1739 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.8434546958443026e-05, |
| "loss": 0.9686, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.828865082920996e-05, |
| "loss": 0.9743, |
| "step": 1741 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.8142904794095835e-05, |
| "loss": 0.9522, |
| "step": 1742 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.7997309276133886e-05, |
| "loss": 1.4966, |
| "step": 1743 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.78518646979206e-05, |
| "loss": 0.9853, |
| "step": 1744 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.770657148161423e-05, |
| "loss": 1.2263, |
| "step": 1745 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.7561430048933795e-05, |
| "loss": 0.8788, |
| "step": 1746 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.741644082115767e-05, |
| "loss": 1.3248, |
| "step": 1747 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.727160421912256e-05, |
| "loss": 0.8355, |
| "step": 1748 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.712692066322202e-05, |
| "loss": 0.7953, |
| "step": 1749 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.698239057340555e-05, |
| "loss": 0.9706, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.6838014369177006e-05, |
| "loss": 1.2941, |
| "step": 1751 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.6693792469593855e-05, |
| "loss": 1.1932, |
| "step": 1752 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.654972529326542e-05, |
| "loss": 1.2877, |
| "step": 1753 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.6405813258352135e-05, |
| "loss": 1.2409, |
| "step": 1754 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.626205678256398e-05, |
| "loss": 1.1917, |
| "step": 1755 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.611845628315954e-05, |
| "loss": 1.3359, |
| "step": 1756 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.5975012176944545e-05, |
| "loss": 1.2, |
| "step": 1757 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.5831724880270956e-05, |
| "loss": 1.1246, |
| "step": 1758 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.5688594809035387e-05, |
| "loss": 1.196, |
| "step": 1759 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.5545622378678324e-05, |
| "loss": 1.1785, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.540280800418246e-05, |
| "loss": 1.22, |
| "step": 1761 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.526015210007191e-05, |
| "loss": 0.9871, |
| "step": 1762 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.511765508041073e-05, |
| "loss": 1.209, |
| "step": 1763 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.4975317358801885e-05, |
| "loss": 1.393, |
| "step": 1764 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.483313934838585e-05, |
| "loss": 1.1583, |
| "step": 1765 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.4691121461839646e-05, |
| "loss": 0.8574, |
| "step": 1766 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.4549264111375446e-05, |
| "loss": 1.1112, |
| "step": 1767 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.440756770873954e-05, |
| "loss": 1.1323, |
| "step": 1768 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.426603266521096e-05, |
| "loss": 1.0765, |
| "step": 1769 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.4124659391600546e-05, |
| "loss": 1.0914, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.3983448298249375e-05, |
| "loss": 1.0969, |
| "step": 1771 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.3842399795028014e-05, |
| "loss": 1.1876, |
| "step": 1772 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.370151429133491e-05, |
| "loss": 1.0217, |
| "step": 1773 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.3560792196095543e-05, |
| "loss": 1.2798, |
| "step": 1774 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.3420233917761044e-05, |
| "loss": 1.0224, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.327983986430708e-05, |
| "loss": 0.9387, |
| "step": 1776 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.3139610443232545e-05, |
| "loss": 1.4628, |
| "step": 1777 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.299954606155866e-05, |
| "loss": 1.1904, |
| "step": 1778 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.2859647125827416e-05, |
| "loss": 1.2621, |
| "step": 1779 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.2719914042100786e-05, |
| "loss": 0.9116, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.258034721595917e-05, |
| "loss": 1.1286, |
| "step": 1781 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.2440947052500556e-05, |
| "loss": 0.9446, |
| "step": 1782 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.2301713956339036e-05, |
| "loss": 1.0702, |
| "step": 1783 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.216264833160396e-05, |
| "loss": 1.2026, |
| "step": 1784 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.202375058193839e-05, |
| "loss": 1.0291, |
| "step": 1785 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.188502111049827e-05, |
| "loss": 1.0129, |
| "step": 1786 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.1746460319951064e-05, |
| "loss": 1.2207, |
| "step": 1787 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.160806861247467e-05, |
| "loss": 1.2272, |
| "step": 1788 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.14698463897561e-05, |
| "loss": 1.2714, |
| "step": 1789 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.133179405299058e-05, |
| "loss": 1.1545, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.119391200288011e-05, |
| "loss": 1.283, |
| "step": 1791 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.105620063963253e-05, |
| "loss": 0.8939, |
| "step": 1792 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.0918660362960146e-05, |
| "loss": 1.1717, |
| "step": 1793 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.0781291572078806e-05, |
| "loss": 0.8863, |
| "step": 1794 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.064409466570648e-05, |
| "loss": 0.8534, |
| "step": 1795 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.050707004206234e-05, |
| "loss": 1.1289, |
| "step": 1796 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.0370218098865466e-05, |
| "loss": 1.3018, |
| "step": 1797 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.023353923333375e-05, |
| "loss": 1.2786, |
| "step": 1798 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.009703384218263e-05, |
| "loss": 1.0018, |
| "step": 1799 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.996070232162417e-05, |
| "loss": 1.1475, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.9824545067365616e-05, |
| "loss": 1.3934, |
| "step": 1801 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.968856247460854e-05, |
| "loss": 1.2567, |
| "step": 1802 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.955275493804742e-05, |
| "loss": 1.3297, |
| "step": 1803 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.941712285186878e-05, |
| "loss": 1.0854, |
| "step": 1804 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.928166660974971e-05, |
| "loss": 1.0247, |
| "step": 1805 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.914638660485709e-05, |
| "loss": 1.5121, |
| "step": 1806 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.901128322984611e-05, |
| "loss": 0.8363, |
| "step": 1807 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.8876356876859375e-05, |
| "loss": 0.9047, |
| "step": 1808 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.874160793752566e-05, |
| "loss": 0.9844, |
| "step": 1809 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.860703680295883e-05, |
| "loss": 0.9935, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.847264386375654e-05, |
| "loss": 1.1391, |
| "step": 1811 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.8338429509999376e-05, |
| "loss": 1.5679, |
| "step": 1812 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.8204394131249436e-05, |
| "loss": 1.4066, |
| "step": 1813 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.807053811654948e-05, |
| "loss": 1.3245, |
| "step": 1814 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.79368618544215e-05, |
| "loss": 0.9457, |
| "step": 1815 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.780336573286591e-05, |
| "loss": 1.0076, |
| "step": 1816 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.76700501393601e-05, |
| "loss": 1.0894, |
| "step": 1817 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.75369154608576e-05, |
| "loss": 1.0674, |
| "step": 1818 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.7403962083786804e-05, |
| "loss": 1.0465, |
| "step": 1819 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.727119039404977e-05, |
| "loss": 1.0933, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.713860077702133e-05, |
| "loss": 0.9516, |
| "step": 1821 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.70061936175478e-05, |
| "loss": 1.1437, |
| "step": 1822 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.6873969299945854e-05, |
| "loss": 0.9869, |
| "step": 1823 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.674192820800156e-05, |
| "loss": 0.7317, |
| "step": 1824 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.661007072496904e-05, |
| "loss": 1.2995, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.6478397233569625e-05, |
| "loss": 1.2068, |
| "step": 1826 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.634690811599046e-05, |
| "loss": 0.8555, |
| "step": 1827 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.62156037538837e-05, |
| "loss": 0.7751, |
| "step": 1828 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.608448452836506e-05, |
| "loss": 1.1114, |
| "step": 1829 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.5953550820013014e-05, |
| "loss": 1.2707, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.582280300886758e-05, |
| "loss": 1.0908, |
| "step": 1831 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.569224147442908e-05, |
| "loss": 0.9854, |
| "step": 1832 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.556186659565727e-05, |
| "loss": 0.8007, |
| "step": 1833 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.543167875097013e-05, |
| "loss": 0.8837, |
| "step": 1834 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.530167831824266e-05, |
| "loss": 0.8803, |
| "step": 1835 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.517186567480605e-05, |
| "loss": 1.088, |
| "step": 1836 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.5042241197446247e-05, |
| "loss": 1.2759, |
| "step": 1837 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.49128052624032e-05, |
| "loss": 1.1598, |
| "step": 1838 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.478355824536948e-05, |
| "loss": 1.1513, |
| "step": 1839 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.46545005214894e-05, |
| "loss": 1.1851, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.452563246535785e-05, |
| "loss": 1.0903, |
| "step": 1841 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.43969544510191e-05, |
| "loss": 0.9046, |
| "step": 1842 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.426846685196592e-05, |
| "loss": 0.9698, |
| "step": 1843 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.4140170041138385e-05, |
| "loss": 1.2183, |
| "step": 1844 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.401206439092271e-05, |
| "loss": 1.1019, |
| "step": 1845 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.388415027315037e-05, |
| "loss": 1.2591, |
| "step": 1846 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.375642805909682e-05, |
| "loss": 1.0328, |
| "step": 1847 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.362889811948061e-05, |
| "loss": 1.1625, |
| "step": 1848 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.350156082446206e-05, |
| "loss": 1.0486, |
| "step": 1849 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.3374416543642504e-05, |
| "loss": 1.3249, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.324746564606288e-05, |
| "loss": 0.958, |
| "step": 1851 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.3120708500202925e-05, |
| "loss": 1.1532, |
| "step": 1852 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.299414547398003e-05, |
| "loss": 1.2116, |
| "step": 1853 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.286777693474803e-05, |
| "loss": 1.2181, |
| "step": 1854 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.274160324929635e-05, |
| "loss": 0.9852, |
| "step": 1855 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.2615624783848855e-05, |
| "loss": 1.0954, |
| "step": 1856 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.248984190406266e-05, |
| "loss": 1.2369, |
| "step": 1857 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.236425497502733e-05, |
| "loss": 0.9774, |
| "step": 1858 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.223886436126355e-05, |
| "loss": 1.0015, |
| "step": 1859 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.211367042672232e-05, |
| "loss": 1.2828, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.198867353478363e-05, |
| "loss": 1.2887, |
| "step": 1861 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.186387404825565e-05, |
| "loss": 0.9122, |
| "step": 1862 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.173927232937358e-05, |
| "loss": 1.2245, |
| "step": 1863 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.1614868739798495e-05, |
| "loss": 1.1962, |
| "step": 1864 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.1490663640616505e-05, |
| "loss": 1.265, |
| "step": 1865 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.1366657392337484e-05, |
| "loss": 1.015, |
| "step": 1866 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.1242850354894217e-05, |
| "loss": 1.0354, |
| "step": 1867 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.1119242887641286e-05, |
| "loss": 1.3527, |
| "step": 1868 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.099583534935391e-05, |
| "loss": 1.0231, |
| "step": 1869 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.087262809822712e-05, |
| "loss": 0.9154, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.0749621491874504e-05, |
| "loss": 1.1116, |
| "step": 1871 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.062681588732738e-05, |
| "loss": 0.9846, |
| "step": 1872 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.0504211641033544e-05, |
| "loss": 1.2489, |
| "step": 1873 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.0381809108856398e-05, |
| "loss": 0.8906, |
| "step": 1874 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.0259608646073912e-05, |
| "loss": 1.06, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.0137610607377408e-05, |
| "loss": 1.231, |
| "step": 1876 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.0015815346870813e-05, |
| "loss": 1.0565, |
| "step": 1877 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.989422321806935e-05, |
| "loss": 1.157, |
| "step": 1878 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.9772834573898724e-05, |
| "loss": 1.0414, |
| "step": 1879 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.9651649766694034e-05, |
| "loss": 0.9937, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.9530669148198653e-05, |
| "loss": 1.0488, |
| "step": 1881 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.940989306956339e-05, |
| "loss": 1.1514, |
| "step": 1882 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.9289321881345254e-05, |
| "loss": 1.2338, |
| "step": 1883 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.9168955933506648e-05, |
| "loss": 1.123, |
| "step": 1884 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.9048795575414243e-05, |
| "loss": 1.3733, |
| "step": 1885 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.8928841155837917e-05, |
| "loss": 1.0975, |
| "step": 1886 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.8809093022949886e-05, |
| "loss": 1.3217, |
| "step": 1887 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.868955152432352e-05, |
| "loss": 1.1087, |
| "step": 1888 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.8570217006932497e-05, |
| "loss": 0.9647, |
| "step": 1889 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.8451089817149746e-05, |
| "loss": 1.2487, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.833217030074631e-05, |
| "loss": 1.1715, |
| "step": 1891 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.821345880289058e-05, |
| "loss": 1.0353, |
| "step": 1892 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.8094955668147037e-05, |
| "loss": 1.2783, |
| "step": 1893 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.79766612404755e-05, |
| "loss": 1.1498, |
| "step": 1894 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.785857586322995e-05, |
| "loss": 1.2549, |
| "step": 1895 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.7740699879157574e-05, |
| "loss": 0.8585, |
| "step": 1896 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.7623033630397844e-05, |
| "loss": 1.1649, |
| "step": 1897 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.750557745848139e-05, |
| "loss": 1.1877, |
| "step": 1898 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.7388331704329175e-05, |
| "loss": 1.1039, |
| "step": 1899 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.7271296708251325e-05, |
| "loss": 1.1846, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.7154472809946307e-05, |
| "loss": 1.1342, |
| "step": 1901 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.7037860348499866e-05, |
| "loss": 1.1426, |
| "step": 1902 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.6921459662383964e-05, |
| "loss": 0.9268, |
| "step": 1903 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.6805271089455986e-05, |
| "loss": 1.3325, |
| "step": 1904 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.6689294966957545e-05, |
| "loss": 0.9903, |
| "step": 1905 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.657353163151368e-05, |
| "loss": 1.1748, |
| "step": 1906 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.645798141913183e-05, |
| "loss": 1.0888, |
| "step": 1907 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.6342644665200735e-05, |
| "loss": 0.9025, |
| "step": 1908 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.6227521704489666e-05, |
| "loss": 1.1034, |
| "step": 1909 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.6112612871147268e-05, |
| "loss": 1.1873, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.5997918498700758e-05, |
| "loss": 1.1197, |
| "step": 1911 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.588343892005478e-05, |
| "loss": 1.1852, |
| "step": 1912 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.576917446749061e-05, |
| "loss": 1.1167, |
| "step": 1913 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.565512547266511e-05, |
| "loss": 1.0624, |
| "step": 1914 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.554129226660966e-05, |
| "loss": 1.038, |
| "step": 1915 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.542767517972945e-05, |
| "loss": 1.0595, |
| "step": 1916 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.531427454180231e-05, |
| "loss": 1.0322, |
| "step": 1917 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.5201090681977767e-05, |
| "loss": 1.0181, |
| "step": 1918 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.5088123928776262e-05, |
| "loss": 1.1169, |
| "step": 1919 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.4975374610087952e-05, |
| "loss": 1.1515, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.486284305317198e-05, |
| "loss": 0.8953, |
| "step": 1921 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.4750529584655336e-05, |
| "loss": 1.3818, |
| "step": 1922 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.4638434530532118e-05, |
| "loss": 1.1907, |
| "step": 1923 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.4526558216162322e-05, |
| "loss": 1.0083, |
| "step": 1924 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.441490096627116e-05, |
| "loss": 1.0517, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.4303463104948e-05, |
| "loss": 1.0965, |
| "step": 1926 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.4192244955645315e-05, |
| "loss": 1.1296, |
| "step": 1927 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.4081246841177973e-05, |
| "loss": 1.0281, |
| "step": 1928 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.397046908372216e-05, |
| "loss": 1.3342, |
| "step": 1929 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.3859912004814387e-05, |
| "loss": 1.2383, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.3749575925350774e-05, |
| "loss": 1.2479, |
| "step": 1931 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.363946116558583e-05, |
| "loss": 1.2529, |
| "step": 1932 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.35295680451318e-05, |
| "loss": 1.1776, |
| "step": 1933 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.3419896882957527e-05, |
| "loss": 1.1951, |
| "step": 1934 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.3310447997387663e-05, |
| "loss": 0.9308, |
| "step": 1935 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.3201221706101716e-05, |
| "loss": 1.062, |
| "step": 1936 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.309221832613301e-05, |
| "loss": 1.0434, |
| "step": 1937 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2983438173867934e-05, |
| "loss": 1.1451, |
| "step": 1938 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.287488156504499e-05, |
| "loss": 0.7659, |
| "step": 1939 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2766548814753695e-05, |
| "loss": 0.9682, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2658440237433987e-05, |
| "loss": 1.2541, |
| "step": 1941 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2550556146874958e-05, |
| "loss": 0.9931, |
| "step": 1942 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2442896856214256e-05, |
| "loss": 1.1053, |
| "step": 1943 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2335462677936957e-05, |
| "loss": 1.051, |
| "step": 1944 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2228253923874807e-05, |
| "loss": 1.2562, |
| "step": 1945 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.212127090520516e-05, |
| "loss": 1.104, |
| "step": 1946 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.2014513932450243e-05, |
| "loss": 1.2474, |
| "step": 1947 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1907983315476176e-05, |
| "loss": 1.1571, |
| "step": 1948 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.180167936349199e-05, |
| "loss": 0.9069, |
| "step": 1949 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1695602385048908e-05, |
| "loss": 1.2766, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1589752688039323e-05, |
| "loss": 1.1813, |
| "step": 1951 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1484130579695883e-05, |
| "loss": 0.591, |
| "step": 1952 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1378736366590758e-05, |
| "loss": 0.8918, |
| "step": 1953 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1273570354634508e-05, |
| "loss": 1.0874, |
| "step": 1954 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1168632849075476e-05, |
| "loss": 0.86, |
| "step": 1955 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.1063924154498626e-05, |
| "loss": 1.1623, |
| "step": 1956 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.095944457482488e-05, |
| "loss": 1.1211, |
| "step": 1957 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.0855194413310096e-05, |
| "loss": 1.0461, |
| "step": 1958 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.0751173972544258e-05, |
| "loss": 1.1152, |
| "step": 1959 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.064738355445057e-05, |
| "loss": 1.2621, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.0543823460284626e-05, |
| "loss": 0.8185, |
| "step": 1961 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.0440493990633414e-05, |
| "loss": 0.8736, |
| "step": 1962 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.033739544541462e-05, |
| "loss": 0.8936, |
| "step": 1963 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.023452812387555e-05, |
| "loss": 1.2223, |
| "step": 1964 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.0131892324592506e-05, |
| "loss": 1.2886, |
| "step": 1965 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.002948834546966e-05, |
| "loss": 1.3925, |
| "step": 1966 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.9927316483738445e-05, |
| "loss": 1.1146, |
| "step": 1967 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.982537703595644e-05, |
| "loss": 0.939, |
| "step": 1968 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.9723670298006737e-05, |
| "loss": 0.8487, |
| "step": 1969 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.9622196565096895e-05, |
| "loss": 1.1439, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.952095613175823e-05, |
| "loss": 1.0104, |
| "step": 1971 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.9419949291844864e-05, |
| "loss": 1.1069, |
| "step": 1972 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.931917633853295e-05, |
| "loss": 1.364, |
| "step": 1973 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.9218637564319696e-05, |
| "loss": 1.3842, |
| "step": 1974 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.911833326102268e-05, |
| "loss": 1.1586, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.901826371977885e-05, |
| "loss": 0.8457, |
| "step": 1976 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.891842923104381e-05, |
| "loss": 0.9269, |
| "step": 1977 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.8818830084590845e-05, |
| "loss": 1.0507, |
| "step": 1978 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.8719466569510245e-05, |
| "loss": 1.333, |
| "step": 1979 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.8620338974208262e-05, |
| "loss": 1.118, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.852144758640647e-05, |
| "loss": 1.2172, |
| "step": 1981 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.842279269314079e-05, |
| "loss": 1.007, |
| "step": 1982 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.8324374580760774e-05, |
| "loss": 1.1292, |
| "step": 1983 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.8226193534928604e-05, |
| "loss": 1.0928, |
| "step": 1984 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.8128249840618473e-05, |
| "loss": 0.8664, |
| "step": 1985 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.803054378211555e-05, |
| "loss": 1.0319, |
| "step": 1986 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.7933075643015383e-05, |
| "loss": 1.2775, |
| "step": 1987 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.783584570622281e-05, |
| "loss": 1.1305, |
| "step": 1988 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.773885425395141e-05, |
| "loss": 1.1049, |
| "step": 1989 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.7642101567722425e-05, |
| "loss": 0.8339, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.7545587928364195e-05, |
| "loss": 0.9583, |
| "step": 1991 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.744931361601111e-05, |
| "loss": 1.3531, |
| "step": 1992 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.7353278910102965e-05, |
| "loss": 1.1924, |
| "step": 1993 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.725748408938408e-05, |
| "loss": 1.2029, |
| "step": 1994 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.7161929431902525e-05, |
| "loss": 0.9831, |
| "step": 1995 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.7066615215009195e-05, |
| "loss": 1.3325, |
| "step": 1996 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6971541715357207e-05, |
| "loss": 0.8494, |
| "step": 1997 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.687670920890091e-05, |
| "loss": 1.0865, |
| "step": 1998 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6782117970895205e-05, |
| "loss": 1.1821, |
| "step": 1999 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6687768275894666e-05, |
| "loss": 0.8278, |
| "step": 2000 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 2344, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 100, |
| "total_flos": 1.3522706142068736e+19, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|