|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9987389659520807, |
|
"global_step": 594, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.8042, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2e-08, |
|
"loss": 4.9828, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4e-08, |
|
"loss": 4.8735, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 4.8238, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8e-08, |
|
"loss": 4.7747, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 4.7918, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 4.8798, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4e-07, |
|
"loss": 4.7786, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6e-07, |
|
"loss": 4.9028, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8e-07, |
|
"loss": 4.8233, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 4.8176, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.2e-07, |
|
"loss": 4.8817, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 4.8089, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.6e-07, |
|
"loss": 4.7724, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.8e-07, |
|
"loss": 4.662, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 4.8532, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.2e-07, |
|
"loss": 4.8698, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.4000000000000003e-07, |
|
"loss": 4.9494, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.6e-07, |
|
"loss": 4.7211, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.8e-07, |
|
"loss": 4.9558, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.9076, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 4.8373, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.4e-07, |
|
"loss": 4.8605, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 4.9537, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 4.7198, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 5.1327, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.2e-07, |
|
"loss": 4.8064, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.4e-07, |
|
"loss": 4.7093, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.4e-07, |
|
"loss": 4.7345, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.6e-07, |
|
"loss": 4.8727, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.800000000000001e-07, |
|
"loss": 4.8892, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.7728, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.200000000000001e-07, |
|
"loss": 4.78, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.4e-07, |
|
"loss": 4.6903, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.6e-07, |
|
"loss": 4.7677, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.800000000000001e-07, |
|
"loss": 4.9281, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 4.7253, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.2e-07, |
|
"loss": 4.8761, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.4e-07, |
|
"loss": 5.0624, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.6e-07, |
|
"loss": 4.6726, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.8e-07, |
|
"loss": 4.7354, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 4.987, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.200000000000001e-07, |
|
"loss": 4.739, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 4.8959, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.6e-07, |
|
"loss": 5.0197, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.8e-07, |
|
"loss": 5.1426, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 5.0508, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 5.4743, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 5.5167, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 5.3789, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 4.5449, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.800000000000001e-07, |
|
"loss": 4.5448, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.609, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.02e-06, |
|
"loss": 4.6805, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.04e-06, |
|
"loss": 4.5197, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.06e-06, |
|
"loss": 4.7056, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.08e-06, |
|
"loss": 4.5713, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1e-06, |
|
"loss": 4.8102, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.12e-06, |
|
"loss": 4.6042, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.14e-06, |
|
"loss": 4.3496, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1600000000000001e-06, |
|
"loss": 4.6199, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1800000000000001e-06, |
|
"loss": 4.675, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 4.5535, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2200000000000002e-06, |
|
"loss": 4.4975, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2400000000000002e-06, |
|
"loss": 4.5554, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.26e-06, |
|
"loss": 4.4926, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.28e-06, |
|
"loss": 4.4971, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3e-06, |
|
"loss": 4.4337, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.32e-06, |
|
"loss": 4.5496, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.34e-06, |
|
"loss": 4.5592, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 4.6406, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 4.5955, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 4.3985, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.42e-06, |
|
"loss": 4.582, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.44e-06, |
|
"loss": 4.5618, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.46e-06, |
|
"loss": 4.561, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.48e-06, |
|
"loss": 4.5283, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.5e-06, |
|
"loss": 4.5355, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.52e-06, |
|
"loss": 4.412, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.54e-06, |
|
"loss": 4.5652, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.56e-06, |
|
"loss": 4.5097, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.5800000000000001e-06, |
|
"loss": 4.5247, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 4.5395, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 4.5561, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6400000000000002e-06, |
|
"loss": 4.4865, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6600000000000002e-06, |
|
"loss": 4.3575, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 4.6712, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 4.5597, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.72e-06, |
|
"loss": 4.5239, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.74e-06, |
|
"loss": 4.568, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.76e-06, |
|
"loss": 4.5041, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.7800000000000001e-06, |
|
"loss": 4.6208, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 4.4879, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8200000000000002e-06, |
|
"loss": 4.7273, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8400000000000002e-06, |
|
"loss": 4.6565, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8600000000000002e-06, |
|
"loss": 4.694, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8800000000000002e-06, |
|
"loss": 4.7673, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 4.5242, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 4.7109, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.94e-06, |
|
"loss": 4.4853, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 4.5437, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.98e-06, |
|
"loss": 4.3413, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 4.3155, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.02e-06, |
|
"loss": 4.2955, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.04e-06, |
|
"loss": 4.2886, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.06e-06, |
|
"loss": 4.325, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.08e-06, |
|
"loss": 4.4709, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 4.4975, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.12e-06, |
|
"loss": 4.5738, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.1400000000000003e-06, |
|
"loss": 4.3586, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.16e-06, |
|
"loss": 4.3516, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.1800000000000003e-06, |
|
"loss": 4.4207, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.2e-06, |
|
"loss": 4.4198, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.2200000000000003e-06, |
|
"loss": 4.441, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.24e-06, |
|
"loss": 4.3794, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.2600000000000004e-06, |
|
"loss": 4.3902, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.28e-06, |
|
"loss": 4.4838, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 4.2504, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.3200000000000002e-06, |
|
"loss": 4.228, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.3400000000000005e-06, |
|
"loss": 4.2372, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.3600000000000003e-06, |
|
"loss": 4.4673, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.38e-06, |
|
"loss": 4.553, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 4.315, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.42e-06, |
|
"loss": 4.3808, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 4.3912, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.46e-06, |
|
"loss": 4.2075, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4800000000000004e-06, |
|
"loss": 4.266, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.5e-06, |
|
"loss": 4.2219, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.52e-06, |
|
"loss": 4.3792, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.5400000000000002e-06, |
|
"loss": 4.4051, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.56e-06, |
|
"loss": 4.3441, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.5800000000000003e-06, |
|
"loss": 4.2955, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.6e-06, |
|
"loss": 4.3969, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6200000000000003e-06, |
|
"loss": 4.402, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.64e-06, |
|
"loss": 4.4528, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6600000000000004e-06, |
|
"loss": 4.3123, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.68e-06, |
|
"loss": 4.3315, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 4.329, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 4.3091, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.7400000000000004e-06, |
|
"loss": 4.5347, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 4.453, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.7800000000000005e-06, |
|
"loss": 4.4934, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 4.5347, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.82e-06, |
|
"loss": 4.3281, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.84e-06, |
|
"loss": 4.4223, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.86e-06, |
|
"loss": 4.7379, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.88e-06, |
|
"loss": 4.7251, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9e-06, |
|
"loss": 4.7122, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.92e-06, |
|
"loss": 4.6427, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 4.7168, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.96e-06, |
|
"loss": 4.3009, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.9800000000000003e-06, |
|
"loss": 4.2353, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3e-06, |
|
"loss": 4.2273, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.0200000000000003e-06, |
|
"loss": 4.1507, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.04e-06, |
|
"loss": 4.1878, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.0600000000000003e-06, |
|
"loss": 4.1888, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.08e-06, |
|
"loss": 4.3586, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 4.1796, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.12e-06, |
|
"loss": 4.1439, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.1400000000000004e-06, |
|
"loss": 4.2612, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.1600000000000002e-06, |
|
"loss": 4.3082, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.1800000000000005e-06, |
|
"loss": 4.1563, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 4.2179, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.2200000000000005e-06, |
|
"loss": 4.2514, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.2400000000000003e-06, |
|
"loss": 4.2873, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.2600000000000006e-06, |
|
"loss": 4.3925, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.2800000000000004e-06, |
|
"loss": 4.2303, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 4.4668, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.3200000000000004e-06, |
|
"loss": 4.2762, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.3400000000000006e-06, |
|
"loss": 4.3279, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 4.1924, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.3800000000000007e-06, |
|
"loss": 4.1893, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 4.2197, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 4.1982, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.44e-06, |
|
"loss": 4.3419, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.46e-06, |
|
"loss": 4.3705, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.48e-06, |
|
"loss": 4.3055, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5e-06, |
|
"loss": 4.1993, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.52e-06, |
|
"loss": 4.3358, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.54e-06, |
|
"loss": 4.1873, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 4.2644, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.58e-06, |
|
"loss": 4.2642, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 4.2311, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.62e-06, |
|
"loss": 4.1317, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.6400000000000003e-06, |
|
"loss": 4.4816, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.66e-06, |
|
"loss": 4.268, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.6800000000000003e-06, |
|
"loss": 4.3489, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.7e-06, |
|
"loss": 4.3619, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.7200000000000004e-06, |
|
"loss": 4.5345, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.74e-06, |
|
"loss": 4.2482, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.7600000000000004e-06, |
|
"loss": 4.5152, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 4.4431, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 4.3379, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.820000000000001e-06, |
|
"loss": 4.4033, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 4.3606, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.86e-06, |
|
"loss": 4.2561, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.88e-06, |
|
"loss": 4.6605, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 4.6509, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 4.5575, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.94e-06, |
|
"loss": 4.7293, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.96e-06, |
|
"loss": 4.119, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.980000000000001e-06, |
|
"loss": 4.2329, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 4.2809, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 4.1888, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.04e-06, |
|
"loss": 4.2201, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.060000000000001e-06, |
|
"loss": 4.1575, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.08e-06, |
|
"loss": 4.2356, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.1e-06, |
|
"loss": 4.1999, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.12e-06, |
|
"loss": 4.2149, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.14e-06, |
|
"loss": 4.3318, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.16e-06, |
|
"loss": 4.299, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.18e-06, |
|
"loss": 4.2981, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 4.2578, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.22e-06, |
|
"loss": 4.353, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.24e-06, |
|
"loss": 4.2134, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.26e-06, |
|
"loss": 4.24, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.2800000000000005e-06, |
|
"loss": 4.1222, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3e-06, |
|
"loss": 4.2403, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.32e-06, |
|
"loss": 4.2177, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.34e-06, |
|
"loss": 4.3008, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.360000000000001e-06, |
|
"loss": 4.3402, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.38e-06, |
|
"loss": 4.3275, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.4e-06, |
|
"loss": 4.1779, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.42e-06, |
|
"loss": 4.2863, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.440000000000001e-06, |
|
"loss": 4.4608, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 4.2403, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.48e-06, |
|
"loss": 4.1853, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.5e-06, |
|
"loss": 4.2403, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.520000000000001e-06, |
|
"loss": 4.3736, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.540000000000001e-06, |
|
"loss": 4.3332, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.56e-06, |
|
"loss": 4.2064, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.58e-06, |
|
"loss": 4.2761, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 4.3082, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.620000000000001e-06, |
|
"loss": 4.2818, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.6400000000000005e-06, |
|
"loss": 4.4403, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.66e-06, |
|
"loss": 4.2043, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.680000000000001e-06, |
|
"loss": 4.2007, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7e-06, |
|
"loss": 4.2285, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 4.1442, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.74e-06, |
|
"loss": 4.3297, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.76e-06, |
|
"loss": 4.253, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.78e-06, |
|
"loss": 4.2696, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 4.2858, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.8200000000000004e-06, |
|
"loss": 4.3687, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.84e-06, |
|
"loss": 4.4333, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.86e-06, |
|
"loss": 4.476, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 4.4581, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 4.5935, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.92e-06, |
|
"loss": 4.5562, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.94e-06, |
|
"loss": 4.5708, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 4.2715, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.980000000000001e-06, |
|
"loss": 4.2646, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5e-06, |
|
"loss": 4.2305, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.02e-06, |
|
"loss": 4.1202, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.04e-06, |
|
"loss": 4.2932, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.060000000000001e-06, |
|
"loss": 4.2446, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.0800000000000005e-06, |
|
"loss": 4.1774, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.1e-06, |
|
"loss": 4.203, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.12e-06, |
|
"loss": 4.2629, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.140000000000001e-06, |
|
"loss": 4.2674, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.1600000000000006e-06, |
|
"loss": 4.2434, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.18e-06, |
|
"loss": 4.1899, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.2e-06, |
|
"loss": 4.2941, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.220000000000001e-06, |
|
"loss": 4.0175, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.240000000000001e-06, |
|
"loss": 4.1044, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.2600000000000005e-06, |
|
"loss": 4.2199, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.28e-06, |
|
"loss": 4.2392, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.300000000000001e-06, |
|
"loss": 4.1779, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.320000000000001e-06, |
|
"loss": 4.2911, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.3400000000000005e-06, |
|
"loss": 4.2804, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.36e-06, |
|
"loss": 4.2093, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.380000000000001e-06, |
|
"loss": 4.3397, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 4.2085, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 4.2158, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 4.2876, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 4.2118, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.480000000000001e-06, |
|
"loss": 4.3481, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 4.1363, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.5200000000000005e-06, |
|
"loss": 4.3209, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.540000000000001e-06, |
|
"loss": 4.3781, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 4.3857, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.580000000000001e-06, |
|
"loss": 4.2594, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 4.161, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.620000000000001e-06, |
|
"loss": 4.1539, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.64e-06, |
|
"loss": 4.4102, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.66e-06, |
|
"loss": 4.166, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.68e-06, |
|
"loss": 4.5148, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.7e-06, |
|
"loss": 4.0003, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.72e-06, |
|
"loss": 4.2587, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.74e-06, |
|
"loss": 4.2268, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.76e-06, |
|
"loss": 4.4759, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.78e-06, |
|
"loss": 4.1496, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.8e-06, |
|
"loss": 4.3539, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.82e-06, |
|
"loss": 4.3933, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.84e-06, |
|
"loss": 4.3932, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.86e-06, |
|
"loss": 4.2008, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 4.2785, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.9e-06, |
|
"loss": 4.4644, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.92e-06, |
|
"loss": 4.4931, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.94e-06, |
|
"loss": 4.7331, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 4.3267, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.98e-06, |
|
"loss": 4.2696, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6e-06, |
|
"loss": 4.3485, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.02e-06, |
|
"loss": 4.1293, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.040000000000001e-06, |
|
"loss": 4.3256, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 4.313, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.08e-06, |
|
"loss": 4.1031, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.1e-06, |
|
"loss": 4.3191, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.120000000000001e-06, |
|
"loss": 4.2111, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.1400000000000005e-06, |
|
"loss": 4.3018, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.16e-06, |
|
"loss": 4.1658, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.18e-06, |
|
"loss": 4.2064, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 4.2251, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.220000000000001e-06, |
|
"loss": 4.2262, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.24e-06, |
|
"loss": 4.2471, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.26e-06, |
|
"loss": 4.2381, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.280000000000001e-06, |
|
"loss": 4.1972, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.300000000000001e-06, |
|
"loss": 4.2808, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.3200000000000005e-06, |
|
"loss": 4.1035, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.34e-06, |
|
"loss": 4.265, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.360000000000001e-06, |
|
"loss": 4.1205, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.380000000000001e-06, |
|
"loss": 4.1692, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 4.1871, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.42e-06, |
|
"loss": 4.2543, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.440000000000001e-06, |
|
"loss": 4.2072, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 4.1326, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.480000000000001e-06, |
|
"loss": 4.4151, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 4.1615, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.520000000000001e-06, |
|
"loss": 4.0782, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.540000000000001e-06, |
|
"loss": 4.3004, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 4.1869, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.5800000000000005e-06, |
|
"loss": 4.137, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 4.1984, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.620000000000001e-06, |
|
"loss": 4.2389, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.640000000000001e-06, |
|
"loss": 4.2559, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.660000000000001e-06, |
|
"loss": 4.1806, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.680000000000001e-06, |
|
"loss": 4.1488, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.700000000000001e-06, |
|
"loss": 4.2468, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.720000000000001e-06, |
|
"loss": 4.2594, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.740000000000001e-06, |
|
"loss": 4.287, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.760000000000001e-06, |
|
"loss": 4.2912, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.780000000000001e-06, |
|
"loss": 4.3932, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 4.418, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.820000000000001e-06, |
|
"loss": 4.2976, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.8400000000000014e-06, |
|
"loss": 4.4477, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.860000000000001e-06, |
|
"loss": 4.1469, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.88e-06, |
|
"loss": 4.2839, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.9e-06, |
|
"loss": 4.6134, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.92e-06, |
|
"loss": 4.4709, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.9400000000000005e-06, |
|
"loss": 4.7728, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.96e-06, |
|
"loss": 4.1291, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.98e-06, |
|
"loss": 4.237, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7e-06, |
|
"loss": 4.2614, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.0200000000000006e-06, |
|
"loss": 4.1032, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.04e-06, |
|
"loss": 4.1884, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.06e-06, |
|
"loss": 4.2002, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.08e-06, |
|
"loss": 4.2122, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.100000000000001e-06, |
|
"loss": 4.1464, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.1200000000000004e-06, |
|
"loss": 4.1748, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.14e-06, |
|
"loss": 4.1205, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.16e-06, |
|
"loss": 4.2075, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.180000000000001e-06, |
|
"loss": 4.2108, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 4.2577, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.22e-06, |
|
"loss": 4.1437, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.24e-06, |
|
"loss": 4.2128, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.260000000000001e-06, |
|
"loss": 4.1773, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.280000000000001e-06, |
|
"loss": 4.179, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.3e-06, |
|
"loss": 4.164, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.32e-06, |
|
"loss": 4.2615, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.340000000000001e-06, |
|
"loss": 4.1449, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 4.0527, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.3800000000000005e-06, |
|
"loss": 4.2189, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.4e-06, |
|
"loss": 4.0695, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 4.0884, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.440000000000001e-06, |
|
"loss": 4.2513, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 4.1336, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.48e-06, |
|
"loss": 4.2286, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 4.2841, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.520000000000001e-06, |
|
"loss": 4.1134, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.540000000000001e-06, |
|
"loss": 4.337, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 4.2637, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.58e-06, |
|
"loss": 4.1276, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 4.0296, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.620000000000001e-06, |
|
"loss": 4.2207, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.640000000000001e-06, |
|
"loss": 4.4024, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.660000000000001e-06, |
|
"loss": 4.1624, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 4.3254, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.7e-06, |
|
"loss": 4.2542, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.72e-06, |
|
"loss": 4.1433, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.74e-06, |
|
"loss": 4.1481, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.76e-06, |
|
"loss": 4.3634, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.78e-06, |
|
"loss": 4.2915, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 4.5435, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.820000000000001e-06, |
|
"loss": 4.3662, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.840000000000001e-06, |
|
"loss": 4.2978, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 4.2279, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.88e-06, |
|
"loss": 4.4285, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.9e-06, |
|
"loss": 4.6224, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.92e-06, |
|
"loss": 4.4336, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.94e-06, |
|
"loss": 4.5344, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 4.2421, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.980000000000002e-06, |
|
"loss": 4.1492, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 4.211, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.020000000000001e-06, |
|
"loss": 4.1921, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.040000000000001e-06, |
|
"loss": 4.1333, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.06e-06, |
|
"loss": 4.1795, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.08e-06, |
|
"loss": 4.1875, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.1e-06, |
|
"loss": 4.1004, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.120000000000002e-06, |
|
"loss": 4.1633, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.14e-06, |
|
"loss": 4.1125, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.16e-06, |
|
"loss": 4.1233, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.18e-06, |
|
"loss": 4.2896, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.2e-06, |
|
"loss": 4.1791, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.220000000000001e-06, |
|
"loss": 4.2289, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.24e-06, |
|
"loss": 4.2061, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.26e-06, |
|
"loss": 4.1171, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.28e-06, |
|
"loss": 4.1973, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.3e-06, |
|
"loss": 4.0318, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.32e-06, |
|
"loss": 4.2931, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.34e-06, |
|
"loss": 4.1999, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.36e-06, |
|
"loss": 3.9695, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.380000000000001e-06, |
|
"loss": 4.1124, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 4.2425, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.42e-06, |
|
"loss": 4.1725, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.44e-06, |
|
"loss": 4.166, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.46e-06, |
|
"loss": 4.2335, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.48e-06, |
|
"loss": 4.1832, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.5e-06, |
|
"loss": 4.1219, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.52e-06, |
|
"loss": 4.237, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.540000000000001e-06, |
|
"loss": 4.1834, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 4.2492, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.580000000000001e-06, |
|
"loss": 4.0503, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.6e-06, |
|
"loss": 4.2888, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.62e-06, |
|
"loss": 4.2068, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.64e-06, |
|
"loss": 4.2792, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.66e-06, |
|
"loss": 4.1248, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.68e-06, |
|
"loss": 4.0674, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.700000000000001e-06, |
|
"loss": 4.2741, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.720000000000001e-06, |
|
"loss": 4.1833, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.740000000000001e-06, |
|
"loss": 4.3683, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.76e-06, |
|
"loss": 4.2469, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.78e-06, |
|
"loss": 4.4203, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.8e-06, |
|
"loss": 4.3338, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.82e-06, |
|
"loss": 4.2684, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.84e-06, |
|
"loss": 4.0929, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.860000000000002e-06, |
|
"loss": 4.3938, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.880000000000001e-06, |
|
"loss": 4.3174, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 4.2561, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 4.444, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.94e-06, |
|
"loss": 4.5666, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.96e-06, |
|
"loss": 4.2167, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.98e-06, |
|
"loss": 4.2266, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9e-06, |
|
"loss": 4.1614, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.020000000000002e-06, |
|
"loss": 4.0323, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.040000000000002e-06, |
|
"loss": 4.214, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.060000000000001e-06, |
|
"loss": 4.152, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.080000000000001e-06, |
|
"loss": 4.2426, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.100000000000001e-06, |
|
"loss": 4.2681, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.12e-06, |
|
"loss": 4.1891, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.14e-06, |
|
"loss": 4.2, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.16e-06, |
|
"loss": 4.1147, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 4.1357, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 4.1446, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.220000000000002e-06, |
|
"loss": 4.2087, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.240000000000001e-06, |
|
"loss": 4.2335, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.260000000000001e-06, |
|
"loss": 4.1698, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.280000000000001e-06, |
|
"loss": 4.1785, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.3e-06, |
|
"loss": 4.1921, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.32e-06, |
|
"loss": 4.1838, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.340000000000002e-06, |
|
"loss": 4.1536, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 4.081, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.38e-06, |
|
"loss": 4.1874, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.4e-06, |
|
"loss": 4.1796, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.42e-06, |
|
"loss": 4.0716, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.440000000000001e-06, |
|
"loss": 4.1163, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 4.1431, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.48e-06, |
|
"loss": 4.1631, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.5e-06, |
|
"loss": 4.1923, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.52e-06, |
|
"loss": 4.0699, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.54e-06, |
|
"loss": 4.0712, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.56e-06, |
|
"loss": 4.3093, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.58e-06, |
|
"loss": 4.1374, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 4.3051, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.620000000000001e-06, |
|
"loss": 4.2504, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.640000000000001e-06, |
|
"loss": 4.2339, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.66e-06, |
|
"loss": 4.1411, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.68e-06, |
|
"loss": 4.276, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.7e-06, |
|
"loss": 4.2906, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.72e-06, |
|
"loss": 4.216, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.74e-06, |
|
"loss": 4.1313, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.760000000000001e-06, |
|
"loss": 4.3359, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 4.1545, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 4.036, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.820000000000001e-06, |
|
"loss": 4.2527, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.84e-06, |
|
"loss": 4.3611, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.86e-06, |
|
"loss": 4.3937, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.88e-06, |
|
"loss": 4.2911, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.9e-06, |
|
"loss": 4.4337, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 4.4938, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.940000000000001e-06, |
|
"loss": 4.4108, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.200125694274902, |
|
"eval_runtime": 786.8447, |
|
"eval_samples_per_second": 3.358, |
|
"eval_steps_per_second": 0.281, |
|
"eval_wer": 1.9598968663228877, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 4.2338, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.980000000000001e-06, |
|
"loss": 4.1417, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1e-05, |
|
"loss": 4.1053, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.893617021276596e-06, |
|
"loss": 4.1975, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.787234042553192e-06, |
|
"loss": 4.1617, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.680851063829787e-06, |
|
"loss": 4.2979, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.574468085106385e-06, |
|
"loss": 4.1643, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.46808510638298e-06, |
|
"loss": 4.158, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.361702127659576e-06, |
|
"loss": 4.0941, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.255319148936171e-06, |
|
"loss": 4.17, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.148936170212767e-06, |
|
"loss": 4.2423, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.042553191489362e-06, |
|
"loss": 4.1385, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 8.936170212765958e-06, |
|
"loss": 4.1332, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 8.829787234042555e-06, |
|
"loss": 4.0833, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.72340425531915e-06, |
|
"loss": 4.2107, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.617021276595746e-06, |
|
"loss": 4.1385, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 4.1791, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.404255319148937e-06, |
|
"loss": 4.2385, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.297872340425532e-06, |
|
"loss": 4.0488, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.191489361702128e-06, |
|
"loss": 4.1701, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.085106382978723e-06, |
|
"loss": 4.1984, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.97872340425532e-06, |
|
"loss": 4.2571, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.872340425531916e-06, |
|
"loss": 4.0696, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.765957446808511e-06, |
|
"loss": 3.9969, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.659574468085107e-06, |
|
"loss": 4.0892, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.553191489361703e-06, |
|
"loss": 4.0812, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.446808510638298e-06, |
|
"loss": 4.2357, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.340425531914894e-06, |
|
"loss": 4.1053, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.234042553191491e-06, |
|
"loss": 4.056, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.127659574468085e-06, |
|
"loss": 4.0824, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.021276595744682e-06, |
|
"loss": 4.0396, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.914893617021278e-06, |
|
"loss": 4.2931, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.808510638297873e-06, |
|
"loss": 4.1061, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.702127659574469e-06, |
|
"loss": 4.124, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.595744680851064e-06, |
|
"loss": 4.149, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.48936170212766e-06, |
|
"loss": 4.0031, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.382978723404256e-06, |
|
"loss": 4.2344, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.276595744680851e-06, |
|
"loss": 4.2397, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.170212765957447e-06, |
|
"loss": 4.0717, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.063829787234044e-06, |
|
"loss": 4.156, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.957446808510638e-06, |
|
"loss": 4.151, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.851063829787235e-06, |
|
"loss": 4.1377, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.744680851063831e-06, |
|
"loss": 4.3309, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.638297872340426e-06, |
|
"loss": 4.161, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.531914893617022e-06, |
|
"loss": 4.2825, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.425531914893617e-06, |
|
"loss": 4.4817, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.319148936170213e-06, |
|
"loss": 4.2491, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.212765957446809e-06, |
|
"loss": 4.5033, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.106382978723404e-06, |
|
"loss": 4.6914, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5e-06, |
|
"loss": 4.3848, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.893617021276596e-06, |
|
"loss": 4.1878, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.787234042553192e-06, |
|
"loss": 4.1017, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.680851063829788e-06, |
|
"loss": 4.091, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.574468085106383e-06, |
|
"loss": 4.0357, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.468085106382979e-06, |
|
"loss": 4.0749, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.361702127659575e-06, |
|
"loss": 4.106, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 4.1974, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.148936170212766e-06, |
|
"loss": 4.2221, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.042553191489362e-06, |
|
"loss": 4.0609, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.936170212765958e-06, |
|
"loss": 4.1074, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.8297872340425535e-06, |
|
"loss": 4.4338, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.723404255319149e-06, |
|
"loss": 4.041, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.6170212765957453e-06, |
|
"loss": 4.0901, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.510638297872341e-06, |
|
"loss": 4.1839, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4042553191489363e-06, |
|
"loss": 4.1574, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.297872340425532e-06, |
|
"loss": 4.0428, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.191489361702128e-06, |
|
"loss": 4.1384, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.0851063829787237e-06, |
|
"loss": 4.0906, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.978723404255319e-06, |
|
"loss": 3.9581, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8723404255319155e-06, |
|
"loss": 4.1344, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.765957446808511e-06, |
|
"loss": 4.1249, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.6595744680851065e-06, |
|
"loss": 4.1715, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.553191489361702e-06, |
|
"loss": 4.2385, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.446808510638298e-06, |
|
"loss": 4.1462, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.340425531914894e-06, |
|
"loss": 4.258, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.2340425531914894e-06, |
|
"loss": 4.1278, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 4.1169, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.021276595744681e-06, |
|
"loss": 4.0495, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9148936170212767e-06, |
|
"loss": 4.004, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8085106382978727e-06, |
|
"loss": 4.2692, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7021276595744682e-06, |
|
"loss": 4.3664, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.595744680851064e-06, |
|
"loss": 4.2793, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4893617021276596e-06, |
|
"loss": 4.0669, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3829787234042555e-06, |
|
"loss": 4.2035, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.276595744680851e-06, |
|
"loss": 4.1755, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.170212765957447e-06, |
|
"loss": 4.0676, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0638297872340427e-06, |
|
"loss": 4.2543, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.574468085106384e-07, |
|
"loss": 4.2331, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.510638297872341e-07, |
|
"loss": 4.3553, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.446808510638298e-07, |
|
"loss": 4.2031, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.382978723404255e-07, |
|
"loss": 4.4084, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.319148936170213e-07, |
|
"loss": 4.2994, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2553191489361704e-07, |
|
"loss": 4.5066, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.1914893617021275e-07, |
|
"loss": 4.4907, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 594, |
|
"total_flos": 0.0, |
|
"train_loss": 4.336673439953865, |
|
"train_runtime": 5718.4377, |
|
"train_samples_per_second": 4.991, |
|
"train_steps_per_second": 0.104 |
|
} |
|
], |
|
"max_steps": 594, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|